code stringlengths 22 1.05M | apis listlengths 1 3.31k | extract_api stringlengths 75 3.25M |
|---|---|---|
from imp.better import BetterInteraction
from discord import app_commands
from imp.better import errors
def IsHost(message: str = None):
message = message or "You must be a game host for this."
async def predicate(interaction: BetterInteraction):
fulfilled = interaction.client.game_manager.is_host(interaction.user)
if not fulfilled:
await interaction.response.send_message(
content=message,
ephemeral=True
)
raise errors.HostCheckError(message)
return True
return app_commands.check(predicate)
def IsNotHost(message: str = None):
message = message or "You mustn't be a game host for this."
async def predicate(interaction: BetterInteraction):
fulfilled = not interaction.client.game_manager.is_host(interaction.user)
if not fulfilled:
await interaction.response.send_message(
content=message,
ephemeral=True
)
raise errors.HostCheckError(message)
return True
return app_commands.check(predicate)
def IsParticipant(message: str = None):
message = message or "You must participate in a game for this."
async def predicate(interaction: BetterInteraction):
fulfilled = interaction.client.game_manager.is_player(interaction.user)
if not fulfilled:
await interaction.response.send_message(
content=message,
ephemeral=True
)
raise errors.ParticipantCheckError(message)
return True
return app_commands.check(predicate)
def IsNotParticipant(message: str = None):
message = message or "You mustn't participate in a game for this."
async def predicate(interaction: BetterInteraction):
fulfilled = not interaction.client.game_manager.is_player(interaction.user)
if not fulfilled:
await interaction.response.send_message(
content=message,
ephemeral=True
)
raise errors.ParticipantCheckError(message)
return True
return app_commands.check(predicate)
def GameStarted(message: str = None):
message = message or "The game must be started for this."
async def predicate(interaction: BetterInteraction):
fulfilled = interaction.client.game_manager.get_player_game(interaction.user).started
if not fulfilled:
await interaction.response.send_message(
content=message,
ephemeral=True
)
raise errors.GameCheckError(message)
return True
return app_commands.check(predicate)
def GameNotStarted(message: str = None):
message = message or "The game mustn't be started for this."
async def predicate(interaction: BetterInteraction):
fulfilled = not interaction.client.game_manager.get_player_game(interaction.user).started
if not fulfilled:
await interaction.response.send_message(
content=message,
ephemeral=True
)
raise errors.GameCheckError(message)
return True
return app_commands.check(predicate)
def PlayerTurn(message: str = None):
message = message or "It's not your turn!"
async def predicate(interaction: BetterInteraction):
game = interaction.client.game_manager.get_player_game(interaction.user)
fulfilled = game.is_turn(game.get_participant(interaction.user))
if not fulfilled:
await interaction.response.send_message(
content=message,
ephemeral=True
)
raise errors.PlayerTurnCheckError(message)
return True
return app_commands.check(predicate)
| [
"imp.better.errors.GameCheckError",
"discord.app_commands.check",
"imp.better.errors.HostCheckError",
"imp.better.errors.ParticipantCheckError",
"imp.better.errors.PlayerTurnCheckError"
] | [((576, 605), 'discord.app_commands.check', 'app_commands.check', (['predicate'], {}), '(predicate)\n', (594, 605), False, 'from discord import app_commands\n'), ((1087, 1116), 'discord.app_commands.check', 'app_commands.check', (['predicate'], {}), '(predicate)\n', (1105, 1116), False, 'from discord import app_commands\n'), ((1612, 1641), 'discord.app_commands.check', 'app_commands.check', (['predicate'], {}), '(predicate)\n', (1630, 1641), False, 'from discord import app_commands\n'), ((2146, 2175), 'discord.app_commands.check', 'app_commands.check', (['predicate'], {}), '(predicate)\n', (2164, 2175), False, 'from discord import app_commands\n'), ((2670, 2699), 'discord.app_commands.check', 'app_commands.check', (['predicate'], {}), '(predicate)\n', (2688, 2699), False, 'from discord import app_commands\n'), ((3203, 3232), 'discord.app_commands.check', 'app_commands.check', (['predicate'], {}), '(predicate)\n', (3221, 3232), False, 'from discord import app_commands\n'), ((3777, 3806), 'discord.app_commands.check', 'app_commands.check', (['predicate'], {}), '(predicate)\n', (3795, 3806), False, 'from discord import app_commands\n'), ((512, 542), 'imp.better.errors.HostCheckError', 'errors.HostCheckError', (['message'], {}), '(message)\n', (533, 542), False, 'from imp.better import errors\n'), ((1023, 1053), 'imp.better.errors.HostCheckError', 'errors.HostCheckError', (['message'], {}), '(message)\n', (1044, 1053), False, 'from imp.better import errors\n'), ((1541, 1578), 'imp.better.errors.ParticipantCheckError', 'errors.ParticipantCheckError', (['message'], {}), '(message)\n', (1569, 1578), False, 'from imp.better import errors\n'), ((2075, 2112), 'imp.better.errors.ParticipantCheckError', 'errors.ParticipantCheckError', (['message'], {}), '(message)\n', (2103, 2112), False, 'from imp.better import errors\n'), ((2606, 2636), 'imp.better.errors.GameCheckError', 'errors.GameCheckError', (['message'], {}), '(message)\n', (2627, 2636), False, 'from imp.better import errors\n'), ((3139, 3169), 'imp.better.errors.GameCheckError', 'errors.GameCheckError', (['message'], {}), '(message)\n', (3160, 3169), False, 'from imp.better import errors\n'), ((3707, 3743), 'imp.better.errors.PlayerTurnCheckError', 'errors.PlayerTurnCheckError', (['message'], {}), '(message)\n', (3734, 3743), False, 'from imp.better import errors\n')] |
from unittest import TestLoader, TextTestRunner
if __name__ == "__main__":
loader = TestLoader()
tests = loader.discover('.')
testRunner = TextTestRunner()
test_results = testRunner.run(tests)
if len(test_results.errors) > 0:
exit(1)
| [
"unittest.TextTestRunner",
"unittest.TestLoader"
] | [((89, 101), 'unittest.TestLoader', 'TestLoader', ([], {}), '()\n', (99, 101), False, 'from unittest import TestLoader, TextTestRunner\n'), ((152, 168), 'unittest.TextTestRunner', 'TextTestRunner', ([], {}), '()\n', (166, 168), False, 'from unittest import TestLoader, TextTestRunner\n')] |
import time, subprocess, os.path, re, multiprocessing, threading
from selenium import webdriver
from selenium.common.exceptions import TimeoutException
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.ui import WebDriverWait
class Kink:
driver = None
dispatcher_thread = None
argument_lists = []
stop_dispatching = False
def __init__(self, exec_dir, username, password, dir, process_limit, urls, type, quality, clip_quality, trailer, photos, clips, join):
Kink.dispatcher_thread = threading.Thread(target=self.__dispatch)
self.username = username
self.password = password
self.root_url = "https://www.kink.com/"
self.dir = dir
self.exec_dir = exec_dir
self.process_limit = process_limit
self.urls = []
self.type = type
self.performers_completed = 0
self.shoots_completed = 0
self.channels_completed = 0
if type == "performer":
for url in urls:
self.urls.append(self.__build_performer_url(url))
if type == "channel":
for url in urls:
self.urls.append(self.__build_channel_url(url))
else:
self.urls = urls
#Kink.dispatcher_thread.start()
def __dispatch(self):
print("Beginning dispatcher thread...")
while not Kink.stop_dispatching or len(Kink.argument_lists) != 0:
if len(Kink.argument_lists) != 0:
print("Argument list found! Dispatching...")
argument_list = Kink.argument_lists.pop(0)
pool = multiprocessing.Pool(self.process_limit)
pool.map(self.download_image, argument_list)
print("Exiting dispatcher thread...")
def startup(self):
Kink.driver = webdriver.Chrome(executable_path=os.path.join(os.path.abspath(self.exec_dir), "dependencies", "chromedriver.exe"))
Kink.driver.maximize_window()
Kink.driver.implicitly_wait(5)
Kink.driver.get(self.root_url)
self.__bypass_splash_screen()
self.__login()
def shutdown(self):
Kink.driver.quit()
def __bypass_splash_screen(self):
splash_screen_xpath = "//form[@id='contentTypeModal']"
show_everything_xpath = "//button[@type='submit' and contains(@value,'gay') and contains(@value,'straight')]"
try:
Kink.driver.find_element_by_xpath(splash_screen_xpath)
Kink.driver.find_element_by_xpath(show_everything_xpath).click()
except:
pass
def __login(self):
login_button_xpath = "//a[@id='kBarLogin']"
login_form_submit_xpath = "//button[@type='submit' and @name='login']"
username_box_xpath = "//input[@name='username']"
password_box_xpath = "//input[@name='password']"
Kink.driver.find_element_by_xpath(login_button_xpath).click()
time.sleep(1)
Kink.driver.find_element_by_xpath(username_box_xpath).send_keys(self.username)
Kink.driver.find_element_by_xpath(password_box_xpath).send_keys(self.password)
Kink.driver.find_element_by_xpath(login_form_submit_xpath).click()
time.sleep(5)
flag = False;
while True:
try:
# Figure out what's needed for detecting captchas
break
except:
break
print("No captcha found!")
def rip(self):
for url in self.urls:
Kink.driver.get(url)
if self.type == "channel":
print("Channel")
from . import kink_channel
kink_channel.rip(url)
self.__rip_channel()
elif self.type == "performer":
print("Performer")
from . import kink_performer
kink_performer.rip(url)
self.__rip_performer()
elif self.type == "shoot":
print("Single shoot")
from . import kink_shoot
kink_shoot.rip(url)
self.__rip_shoot()
Kink.stop_dispatching = True
#Kink.dispatcher_thread.join()
print("Rip completed.")
print("Total shoots ripped: " + str(self.shoots_completed))
print("Total channels ripped: " + str(self.channels_completed))
print("Total performers ripped: " + str(self.performers_completed))
def __rip_channel(self):
print("Sorry bro, not done yet.")
def __rip_performer(self):
print("Sorry bro, not done yet.")
def __rip_shoot(self):
movie_button_xpath = "//div[@class='full']"
movie_quality_xpath = "//ul[contains(@class,'full-movie') and contains(@class,'open')]"
movie_xpath = "//ul[contains(@class,'full-movie') and contains(@class,'open')]/li/a"
clips_button_xpath = "//div[@class='clips']"
clips_categories_xpath = "//ul[contains(@class,'clips') and contains(@class,'open')]/li"
clips_xpath = "//ul[contains(@class,'clips') and contains(@class,'open')]/li/ul/li/a"
trailer_button_xpath = "//div[@class='trailer']"
show_zips_xpath = "//a[@class='ziplink']"
image_zip_link_xpath = "//a[@class='zip-links']"
title_xpath = "//h1[@class='shoot-title']"
performers_xpath = "//span[@class='names']/a"
tags_xpath = "//p[@class='starring']/a"
girl = Kink.driver.find_element_by_xpath(girl_xpath).text
title = Kink.driver.find_element_by_xpath(title_xpath).text
dir_name = os.path.join("Suicide Girls", girl.title(), title.title())
dir_name = re.subn("[<>:\"/\|?*]", "", dir_name)[0]
dir_name = re.subn("\\.{3,}", "…", dir_name)[0]
dir_name = os.path.join(self.dir, dir_name)
check = False
if os.path.exists(dir_name):
check = True
Kink.driver.find_element_by_xpath(full_image_button_xpath).click()
time.sleep(5)
images = Kink.driver.find_elements_by_xpath(full_image_url_xpath)
image_urls = []
for i in range(0, len(images)):
url = images[i].get_attribute("data-image_url")
ext = url[url.rindex("."):]
file_name = "Suicide Girls - " + girl.title() + " - " + title.title() + " - Img" + str(i + 1).zfill(3) + ext
file_name = re.subn("[<>:\"/\|?*]", "", file_name)[0]
file_name = re.subn("\\.{3,}", "…", file_name)[0]
if not os.path.exists(os.path.join(dir_name, file_name)):
image_urls.append(url)
else:
print(girl.title() + "/" + title.title() + " Img" + str(i).zfill(3) + " already exists, skipping...")
self.__download_and_save_set(image_urls, girl, title)
self.sets_completed += 1
def __rip_all(self, type_xpath):
time_period_xpath = "//li[@class='dropdown'][3]//ul/li/a[text() = '" + self.time_period + "']"
girl_name_xpath = "//article/header//h2/a"
load_more_xpath = "//a[@id='load-more']"
choice = Kink.driver.find_element_by_xpath(type_xpath)
Kink.driver.get(choice.get_attribute("href"))
choice = Kink.driver.find_element_by_xpath(time_period_xpath)
Kink.driver.get(choice.get_attribute("href"))
girls = []
iteration = 0
while True:
iteration += 1
names = Kink.driver.find_elements_by_xpath(girl_name_xpath)
for name in names:
girls.append(name.text)
if iteration > 1:
Kink.driver.execute_script("for(i=0;i<24;i++) {e = document.evaluate(\"//article[1]\", document.documentElement); e = e.iterateNext(); if (e == null) {break;}e.parentNode.removeChild(e);}")
time.sleep(2)
lmb = Kink.driver.find_elements_by_xpath(load_more_xpath)
if len(lmb) > 0 and lmb[0].is_displayed():
lmb[0].click()
time.sleep(10)
else:
break
girls = list(set(girls))
for girl in sorted(girls):
url = self.__build_url(girl)
Kink.driver.get(url)
self.__rip_girl()
def __rip_girl(self):
load_more_xpath = "//a[@id='load-more']"
photos_xpath = "//div[@id='content-container']//a[text()='Photos']"
photosets_xpath = "//div[@id='content-container']//a[text()='Photosets']"
set_title_xpath = "//article/header//h2/a"
url = Kink.driver.find_element_by_xpath(photos_xpath).get_attribute("href")
Kink.driver.get(url)
url = Kink.driver.find_element_by_xpath(photosets_xpath).get_attribute("href")
Kink.driver.get(url)
set_links = []
iteration = 0
while True:
iteration += 1
titles = Kink.driver.find_elements_by_xpath(set_title_xpath)
for title in titles:
set_links.append(title.get_attribute("href"))
if iteration > 1:
Kink.driver.execute_script("for(i=0;i<9;i++) {e = document.evaluate(\"//article[1]\", document.documentElement); e = e.iterateNext(); if (e == null) {break;}e.parentNode.removeChild(e);}")
time.sleep(2)
lmb = Kink.driver.find_elements_by_xpath(load_more_xpath)
if len(lmb) > 0 and lmb[0].is_displayed():
lmb[0].click()
time.sleep(10)
else:
break
set_links = list(set(set_links))
for link in set_links:
Kink.driver.get(link)
self.__rip_set()
self.girls_completed += 1
def __download_and_save_set(self, urls, girl, title):
aria_path = os.path.join(self.exec_dir, "dependencies", "aria2", "aria2c.exe")
error_strings = []
dir_name = os.path.join("Suicide Girls", girl.title(), title.title())
dir_name = re.subn("[<>:\"/\|?*]", "", dir_name)[0]
dir_name = re.subn("\\.{3,}", "…", dir_name)[0]
dir_name = os.path.join(self.dir, dir_name)
with multiprocessing.Pool(8) as pool:
args = []
for i in range (0, len(urls)):
command = [aria_path, "-d", dir_name, "-o"]
ext = urls[i][urls[i].rindex("."):]
file_name = "Suicide Girls - " + girl.title() + " - " + title.title() + " - Img" + str(i + 1).zfill(3) + ext
file_name = re.subn("[<>:\"/\|?*]", "", file_name)[0]
file_name = re.subn("\\.{3,}", "…", file_name)[0]
if os.path.exists(dir_name + file_name):
continue
command.append(file_name)
command.append(urls[i])
args.append((error_strings, command, str(i + 1), urls[i], girl, title))
Kink.argument_lists.append(args)
if len(error_strings) > 0:
f = open(os.path.join(dir_name, "errors.txt", "w"))
f.write("\n".join(sorted(error_strings)))
f.close()
def __build_channel_url(self, name):
return "https://www.kink.com/channel/" + name
def __build_performer_url(self, id):
return "https://www.kink.com/model/" + id
def download_image(self, args):
process = subprocess.run(args[1])
if process.returncode != 0:
args[0].append("\tImage " + args[2] + " failed; URL: " + args[3])
print(args[4].title() + "/" + args[5].title() + " #" + args[2] + " complete")
def start_processes(async_result):
async_result.get()
def print_warning():
print("This file is meant to be imported by other Python files, not run directly. Exiting now.")
if __name__ == "__main__":
print_warning()
| [
"subprocess.run",
"time.sleep",
"multiprocessing.Pool",
"threading.Thread",
"re.subn"
] | [((539, 579), 'threading.Thread', 'threading.Thread', ([], {'target': 'self.__dispatch'}), '(target=self.__dispatch)\n', (555, 579), False, 'import time, subprocess, os.path, re, multiprocessing, threading\n'), ((2584, 2597), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (2594, 2597), False, 'import time, subprocess, os.path, re, multiprocessing, threading\n'), ((2832, 2845), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (2842, 2845), False, 'import time, subprocess, os.path, re, multiprocessing, threading\n'), ((5143, 5156), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (5153, 5156), False, 'import time, subprocess, os.path, re, multiprocessing, threading\n'), ((9648, 9671), 'subprocess.run', 'subprocess.run', (['args[1]'], {}), '(args[1])\n', (9662, 9671), False, 'import time, subprocess, os.path, re, multiprocessing, threading\n'), ((4866, 4903), 're.subn', 're.subn', (['"""[<>:"/\\\\|?*]"""', '""""""', 'dir_name'], {}), '(\'[<>:"/\\\\|?*]\', \'\', dir_name)\n', (4873, 4903), False, 'import time, subprocess, os.path, re, multiprocessing, threading\n'), ((4920, 4953), 're.subn', 're.subn', (['"""\\\\.{3,}"""', '"""…"""', 'dir_name'], {}), "('\\\\.{3,}', '…', dir_name)\n", (4927, 4953), False, 'import time, subprocess, os.path, re, multiprocessing, threading\n'), ((8505, 8542), 're.subn', 're.subn', (['"""[<>:"/\\\\|?*]"""', '""""""', 'dir_name'], {}), '(\'[<>:"/\\\\|?*]\', \'\', dir_name)\n', (8512, 8542), False, 'import time, subprocess, os.path, re, multiprocessing, threading\n'), ((8559, 8592), 're.subn', 're.subn', (['"""\\\\.{3,}"""', '"""…"""', 'dir_name'], {}), "('\\\\.{3,}', '…', dir_name)\n", (8566, 8592), False, 'import time, subprocess, os.path, re, multiprocessing, threading\n'), ((8650, 8673), 'multiprocessing.Pool', 'multiprocessing.Pool', (['(8)'], {}), '(8)\n', (8670, 8673), False, 'import time, subprocess, os.path, re, multiprocessing, threading\n'), ((1430, 1470), 'multiprocessing.Pool', 'multiprocessing.Pool', (['self.process_limit'], {}), '(self.process_limit)\n', (1450, 1470), False, 'import time, subprocess, os.path, re, multiprocessing, threading\n'), ((5492, 5530), 're.subn', 're.subn', (['"""[<>:"/\\\\|?*]"""', '""""""', 'file_name'], {}), '(\'[<>:"/\\\\|?*]\', \'\', file_name)\n', (5499, 5530), False, 'import time, subprocess, os.path, re, multiprocessing, threading\n'), ((5549, 5583), 're.subn', 're.subn', (['"""\\\\.{3,}"""', '"""…"""', 'file_name'], {}), "('\\\\.{3,}', '…', file_name)\n", (5556, 5583), False, 'import time, subprocess, os.path, re, multiprocessing, threading\n'), ((6726, 6739), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (6736, 6739), False, 'import time, subprocess, os.path, re, multiprocessing, threading\n'), ((6870, 6884), 'time.sleep', 'time.sleep', (['(10)'], {}), '(10)\n', (6880, 6884), False, 'import time, subprocess, os.path, re, multiprocessing, threading\n'), ((7935, 7948), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (7945, 7948), False, 'import time, subprocess, os.path, re, multiprocessing, threading\n'), ((8079, 8093), 'time.sleep', 'time.sleep', (['(10)'], {}), '(10)\n', (8089, 8093), False, 'import time, subprocess, os.path, re, multiprocessing, threading\n'), ((8948, 8986), 're.subn', 're.subn', (['"""[<>:"/\\\\|?*]"""', '""""""', 'file_name'], {}), '(\'[<>:"/\\\\|?*]\', \'\', file_name)\n', (8955, 8986), False, 'import time, subprocess, os.path, re, multiprocessing, threading\n'), ((9006, 9040), 're.subn', 're.subn', (['"""\\\\.{3,}"""', '"""…"""', 'file_name'], {}), "('\\\\.{3,}', '…', file_name)\n", (9013, 9040), False, 'import time, subprocess, os.path, re, multiprocessing, threading\n')] |
# This file is part of Indico.
# Copyright (C) 2002 - 2020 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from __future__ import unicode_literals
import uuid
from indico.util.locators import locator_property
from indico.util.string import format_repr, return_ascii
class EventLabel(object):
# TODO: convert this to a proper model in 2.3
id = None
title = None
color = None
def __init__(self, **kwargs):
if 'id' not in kwargs:
self.id = str(uuid.uuid4())
for name, value in kwargs.iteritems():
setattr(self, name, value)
def __eq__(self, other):
return isinstance(other, EventLabel) and other.id == self.id
@locator_property
def locator(self):
return {'event_label_id': self.id}
@return_ascii
def __repr__(self):
return format_repr(self, 'id', _text=self.title)
| [
"indico.util.string.format_repr",
"uuid.uuid4"
] | [((939, 980), 'indico.util.string.format_repr', 'format_repr', (['self', '"""id"""'], {'_text': 'self.title'}), "(self, 'id', _text=self.title)\n", (950, 980), False, 'from indico.util.string import format_repr, return_ascii\n'), ((593, 605), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (603, 605), False, 'import uuid\n')] |
import traceback
from alog import debug, info, error
from mykonos.core.core import Core
from mykonos.keywords.management_device import ManagementDevice
from mykonos.keywords.decorators import Parallel
class LocatorElement(Core):
def __init__(self):
self.device_mobile = self.device()
self.management_device = ManagementDevice()
@Parallel.device_check
def get_locator(self, device=None, *argument, **settings):
"""Access locator from device.
**selector support:**
* text, textContains, textMatches, textStartsWith
* className, classNameMatches
* description,descriptionContains,descriptionMatches,descriptionStartsWith
* checkable, checked, clickable, longClickable
* scrollable, enabled,focusable, focused, selected
* packageName, packageNameMatches
* resourceId, resourceIdMatches
* index, instance
**Example:**
|| ${locator}= Get Locator | text=sample text
With Device:
|| @{emulator} = | 192.168.1.1 | 192.168.1.2
|| ${locator}= Get Locator | text=sample text | devices_parallel=@{emulator}
"""
if device is not None:
get_device = self.management_device.scan_current_device(device)
return get_device(*argument, **settings)
else:
return self.device_mobile(*argument, **settings)
def get_child(self, parent, *argument, **settings):
"""Access child locator from device.
**Example:**
|| ${locator}= Get Locator | text=sample text
|| ${child}= Get Child | parent=${locator} text=sample text
"""
return parent.child(*argument, **settings)
def get_sibling(self, parent, *argument, **settings):
"""Access sibling locator from device.
**Example:**
|| ${locator}= Get Locator | text=sample text
|| ${sibling}= Get Sibiling | parent=${locator} text=sample text
"""
return parent.sibling(*argument, **settings)
def left_position(self, parent, *argument, **settings):
"""Access left position from device.
**Example:**
|| ${locator}= Get Locator | text=sample text
|| ${left}= Left Position | parent=${locator} text=sample text
"""
return parent.left(*argument, **settings)
def right_position(self, parent, *argument, **settings):
"""Access left position from device.
**Example:**
|| ${locator}= Get Locator | text=sample text
|| ${right}= Right Position | parent=${locator} text=sample text
"""
return parent.right(*argument, **settings)
def up_position(self, parent, *argument, **settings):
"""Access left position from device.
**Example:**
|| ${locator}= Get Locator | text=sample text
|| ${up}= Up Position | parent=${locator} text=sample text
"""
return parent.up(*argument, **settings)
def down_position(self, parent, *argument, **settings):
"""Access left position from device.
**Example:**
|| ${locator}= Get Locator | text=sample text
|| ${down}= Down Position | parent=${locator} text=sample text
"""
return parent.down(*argument, **settings)
@Parallel.device_check
def get_locator_by_index(self, device=None, *argument, **settings):
"""Get Element locator by index on device.
**Example:**
|| Get Locator By Index | text=sample_text | index=1
|| ${locator}= Get Locator | text=sample text
|| Get Locator By Index | locator=${locator} | index=1
"""
index = int(settings['index'])
del settings['index']
if 'locator' in settings:
locator = settings['locator']
del settings['locator']
return locator[index]
else:
if device is not None:
get_device = self.management_device.scan_current_device(device)
return get_device(*argument, **settings)[index]
else:
return self.device_mobile(*argument, **settings)[index]
def handlers(self, action, function):
"""Call customized function on device.
**Example:**
|| Handlers | action=on | function=sample_function
"""
if 'on' in action:
return self.device_mobile.handlers.on(function)
elif 'off' in action:
return self.device_mobile.handlers.off(function)
class WatcherElement(Core):
"""Class is used to perform some actions when selector cannot be found."""
def __init__(self):
self.device_mobile = self.device()
def __watcher_register(self, **settings):
name = settings['name']
del settings['name']
return self.device_mobile.watcher(name)
def watcher(self, **settings):
"""Watcher is registered when a selector cannot be find.
name=name of watcher
WHEN, className=sample_class
WHEN, packageName=sample_package
**Example:**
|| ${sample_watcher}=name=sample_watcher | className=sample_class
|| Click Element | watcher=${sample_watcher} | text=sample_text
"""
name_watcher = settings['name']
del settings['name']
self.__watcher = self.__watcher_register(name=name_watcher)
return self.__watcher.when(**settings)
def watcher_action(self, action, **settings):
"""Watcher Action is used to running the action on the watcher.
run=Force to run all watchers
remove=Remvoe watchers
reset=Reset all triggered watchers
list=List all watchers
triggered=Check if there is any watcher triggered
**Example:**
|| Watcher Action | action=run
|| Watcher Action | action=remove
|| Watcher Action | action=remove | name=sample_watcher
|| Watcher Action | action=reset
|| Watcher Action | action=list
|| Watcher Action | action=triggered
"""
if 'run' in action:
return self.device_mobile.watchers.run()
elif 'remove' in action:
if 'name' in settings:
name = settings['name']
del settings['name']
return self.device_mobile.watchers.remove(name)
else:
return self.device_mobile.watchers.remove()
elif 'list' in action:
return self.device_mobile.watchers
elif 'reset' in action:
return self.device_mobile.watchers.reset()
elif 'triggered' in action:
return self.device_mobile.watchers.triggered
| [
"mykonos.keywords.management_device.ManagementDevice"
] | [((332, 350), 'mykonos.keywords.management_device.ManagementDevice', 'ManagementDevice', ([], {}), '()\n', (348, 350), False, 'from mykonos.keywords.management_device import ManagementDevice\n')] |
import abc
from typing import NewType
from copy import copy
def _(): # make the registered classes variable invisible
__registered_classes = []
def _register(cls):
if hasattr(cls, 'do_turn'):
print(f'registering {cls.__name__}...')
__registered_classes.append(cls)
else:
print(f'class {cls.__name__} does not have a method do_turn')
def _get_registered_classes():
return copy(__registered_classes)
return _register, _get_registered_classes
register, get_registered_classes = _()
class PrisonerAction:
def __init__(self, name):
self.__name = name
def __str__(self):
return self.__name
def __repr__(self):
return str(self)
LOYAL = PrisonerAction('LOYAL')
BETRAY = PrisonerAction('BETRAY')
class Prisoner(abc.ABC):
"""
prisoner base class
all prisoner bots must inherit from the prisoner class
Note! constructor for this class cannot accept any arguments
Note! printing will not work when using do_turn
"""
def __init_subclass__(cls, **kwargs):
register(cls)
@abc.abstractmethod
def do_turn(self, history: list) -> PrisonerAction:
"""
runs a single turn and return's the prisoners choice
:param history: a list of all the history so fur, each event is consisted of two elements, the first is your
action that turn, and the second is your opponent's action.
:type history: list[tuple[int, int]]
:return: the action for this turn, either LOYAL or BETRAY
"""
| [
"copy.copy"
] | [((448, 474), 'copy.copy', 'copy', (['__registered_classes'], {}), '(__registered_classes)\n', (452, 474), False, 'from copy import copy\n')] |
"""Serializer de los miembros de un grupo familiar"""
# Django REST Framework
from rest_framework import serializers
# Serializers
from apis.users.serializers import UserModelSerializer
from apis.pacient.serializers import PacientModelSerializer
# Models
from apis.family_group.models import Memberships
class MembershipModelSerializer(serializers.ModelSerializer):
"""Modelo de serializer para los mimebros de una familia"""
user = UserModelSerializer(read_only=True)
affiliated_by = serializers.StringRelatedField()
pacient = PacientModelSerializer(read_only=True)
class Meta:
"""Clase Meta"""
model = Memberships
fields = (
'user',
'is_admin',
'affiliated', 'remaining_affiliates',
'affiliated_by',
)
read_only_fields = (
'user',
'affiliated', 'remainig_affiliates',
'affiliated_by'
)
class AddMemberSerializer(serializers.Serializer):
"""Serializer para agregar un familiar
al grupo creado por el familiar titular
"""
user = serializers.HiddenField(default=serializers.CurrentUserDefault())
def validate_user(self, data):
"""Verifica que el familiar ya fue registrado en grupo familiar"""
family_group = self.context['family_group']
user = data
q = Memberships.objects.filter(family_group=family_group, user=user)
if q.exists():
raise serializers.ValidationError('Este usuario ya fue registrado en el grupo familiar')
return data
def validate(self, data):
"""Verifica que el familiar titular pueda agregar nuevos familiares al grupo"""
family_group = self.context['family_group']
if family_group.members.count() >= family_group.family_limit:
raise serializers.ValidationError('El grupo familiar alcanzo el limite de afiliados')
return data
def create(self, validated_data):
"""Crea un nuevo afiliado al grupo familiar"""
family_group = self.context['family_group']
user = validated_data['user']
# Creacion del afiliado
member = Memberships.objects.create(
user=user,
pasient=user.pacient,
family_group=family_group,
affiliated_by=user.pacient
)
issuer_afiliated = Memberships.objects.get(user=user, family_group=family_group)
issuer_afiliated.remaining_affiliates -= 1
issuer_afiliated.save()
return member
| [
"apis.pacient.serializers.PacientModelSerializer",
"apis.family_group.models.Memberships.objects.filter",
"rest_framework.serializers.ValidationError",
"rest_framework.serializers.StringRelatedField",
"apis.users.serializers.UserModelSerializer",
"apis.family_group.models.Memberships.objects.create",
"r... | [((447, 482), 'apis.users.serializers.UserModelSerializer', 'UserModelSerializer', ([], {'read_only': '(True)'}), '(read_only=True)\n', (466, 482), False, 'from apis.users.serializers import UserModelSerializer\n'), ((503, 535), 'rest_framework.serializers.StringRelatedField', 'serializers.StringRelatedField', ([], {}), '()\n', (533, 535), False, 'from rest_framework import serializers\n'), ((550, 588), 'apis.pacient.serializers.PacientModelSerializer', 'PacientModelSerializer', ([], {'read_only': '(True)'}), '(read_only=True)\n', (572, 588), False, 'from apis.pacient.serializers import PacientModelSerializer\n'), ((1369, 1433), 'apis.family_group.models.Memberships.objects.filter', 'Memberships.objects.filter', ([], {'family_group': 'family_group', 'user': 'user'}), '(family_group=family_group, user=user)\n', (1395, 1433), False, 'from apis.family_group.models import Memberships\n'), ((2171, 2290), 'apis.family_group.models.Memberships.objects.create', 'Memberships.objects.create', ([], {'user': 'user', 'pasient': 'user.pacient', 'family_group': 'family_group', 'affiliated_by': 'user.pacient'}), '(user=user, pasient=user.pacient, family_group=\n family_group, affiliated_by=user.pacient)\n', (2197, 2290), False, 'from apis.family_group.models import Memberships\n'), ((2372, 2433), 'apis.family_group.models.Memberships.objects.get', 'Memberships.objects.get', ([], {'user': 'user', 'family_group': 'family_group'}), '(user=user, family_group=family_group)\n', (2395, 2433), False, 'from apis.family_group.models import Memberships\n'), ((1140, 1172), 'rest_framework.serializers.CurrentUserDefault', 'serializers.CurrentUserDefault', ([], {}), '()\n', (1170, 1172), False, 'from rest_framework import serializers\n'), ((1475, 1562), 'rest_framework.serializers.ValidationError', 'serializers.ValidationError', (['"""Este usuario ya fue registrado en el grupo familiar"""'], {}), "(\n 'Este usuario ya fue registrado en el grupo familiar')\n", (1502, 1562), False, 'from rest_framework import serializers\n'), ((1837, 1916), 'rest_framework.serializers.ValidationError', 'serializers.ValidationError', (['"""El grupo familiar alcanzo el limite de afiliados"""'], {}), "('El grupo familiar alcanzo el limite de afiliados')\n", (1864, 1916), False, 'from rest_framework import serializers\n')] |
import requests
import argparse
import sys
import os
import urllib3
import random
def user_agent():
useragent = [
"Mozilla/5.0 CK={ } (Windows NT 6.1; WOW64; Trident/7.0; rv:11.0) like Gecko",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.169 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/72.0.3626.121 Safari/537.36",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.157 Safari/537.36",
"Mozilla/5.0 (X11; Ubuntu; Linux i686; rv:24.0) Gecko/20100101 Firefox/24.0",
"Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9a1) Gecko/20070308 Minefield/3.0a1",
"Mozilla/5.0 (Linux; U; Android 2.2) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1",
"Mozilla/5.0 (Linux; Android 4.2.1; en-us; Nexus 5 Build/JOP40D) AppleWebKit/535.19 (KHTML, like Gecko; googleweblight) Chrome/38.0.1025.166 Mobile Safari/535.19",
"Mozilla/5.0 (Linux; Android 6.0.1; RedMi Note 5 Build/RB3N5C; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/68.0.3440.91 Mobile Safari/537.36",
]
return random.choice(useragent)
def console_clear():
try:
if sys.platform == "win32":
os.system("cls")
else:
os.system("clear")
return 0
except Exception:
print("could not clear console")
return 1
console_clear()
def file_writer(filename, data):
opnr = open(filename, "a+")
opnr.write(data)
opnr.close()
parse = argparse.ArgumentParser(description="Send request to a website using proxies to check if it's online How to: python proxyon.py proxies.txt https://www.google.com/")
parse.add_argument("proxy", type=str, help="Enter the list of proxies Example: proxies.txt")
parse.add_argument("web", type=str, help="Enter a website to send http requests using proxies.txt Example: https://www.google.com/")
args = parse.parse_args()
with open(args.proxy, "r") as opnr:
for x in opnr:
if x[0].strip().isdigit():
try:
req = requests.get(args.web, headers={"User-Agent": user_agent()}, timeout=10 ,proxies={"https": x.strip(), "http": x.strip()})
if req.status_code == 200:
file_writer("onlineProxies.txt", x+"\n")
print("\033[1;32;40m[+]Status code: {0} OK Proxy: {1}\033[1;0m".format(req.status_code, x))
else:
print("\033[1;36;40m[-]Status code: {0} Proxy: {1}\033[1;0m".format(req.status_code, x))
except Exception:
print("\033[1;36m[-]Error: {0}\033[1;0m".format(x))
pass
| [
"os.system",
"random.choice",
"argparse.ArgumentParser"
] | [((1677, 1851), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Send request to a website using proxies to check if it\'s online How to: python proxyon.py proxies.txt https://www.google.com/"""'}), '(description=\n "Send request to a website using proxies to check if it\'s online How to: python proxyon.py proxies.txt https://www.google.com/"\n )\n', (1700, 1851), False, 'import argparse\n'), ((1252, 1276), 'random.choice', 'random.choice', (['useragent'], {}), '(useragent)\n', (1265, 1276), False, 'import random\n'), ((1363, 1379), 'os.system', 'os.system', (['"""cls"""'], {}), "('cls')\n", (1372, 1379), False, 'import os\n'), ((1408, 1426), 'os.system', 'os.system', (['"""clear"""'], {}), "('clear')\n", (1417, 1426), False, 'import os\n')] |
import pyFT.FTError
import datetime as dt
strfRFC3339 = '%Y-%m-%dT%H:%M:%S.000Z'
def FTdateTime(val):
return dt.datetime.strftime(val,strfRFC3339)
| [
"datetime.datetime.strftime"
] | [((116, 154), 'datetime.datetime.strftime', 'dt.datetime.strftime', (['val', 'strfRFC3339'], {}), '(val, strfRFC3339)\n', (136, 154), True, 'import datetime as dt\n')] |
#for data cleaning and analysis
import pandas as pd
import numpy as np
from random import randint
#for visualization
import matplotlib.pyplot as plt
import seaborn as sns
#for directory-related functions
import os
import glob
import getpass
#for web-scraping baseball data
import pybaseball as pyb
#for drafting
import math
import random
#for clustering
from sklearn.cluster import MeanShift,estimate_bandwidth
from sklearn.model_selection import train_test_split, cross_validate
#import time to see how long the script runs for
import time
import datetime
from datetime import date, timedelta
#import tkinter to build GUIs
import tkinter as tk
from tkinter import filedialog
#for warnings
import warnings
warnings.filterwarnings("ignore")
#for progress bar
from tqdm import tqdm
#enter forecasting and drafting parameters
def entry():
root = tk.Tk()
root.geometry("400x300")
root.title('Select Forecasting and Drafting Parameters')
label_simulations = tk.Label(root, text='Choose the number of simulations for forecasting')
entry_simulations = tk.Entry(root)
label_num_competitors = tk.Label(root, text='Choose Number of Competitors')
entry_num_competitors = tk.Entry(root)
label_num_rounds = tk.Label(root, text='Choose the number of rounds in the draft')
entry_num_rounds = tk.Entry(root)
label_num_iterations = tk.Label(root, text="Choose the number of iterations for the Draft Agent's Exploration")
entry_num_iterations = tk.Entry(root)
label_simulations.pack()
entry_simulations.pack()
label_num_competitors.pack()
entry_num_competitors.pack()
label_num_rounds.pack()
entry_num_rounds.pack()
label_num_iterations.pack()
entry_num_iterations.pack()
def enter_params():
global simulations
global num_competitors
global num_rounds
global num_iterations
simulations = int(entry_simulations.get())
num_competitors = int(entry_num_competitors.get())
num_rounds = int(entry_num_rounds.get())
num_iterations = int(entry_num_iterations.get())
root.destroy()
def get_params():
global dateStore
dateStore = True
enter_params()
get_params_button = tk.Button(root, text='Submit', command= get_params)
get_params_button.pack()
root.mainloop()
return simulations, num_competitors, num_rounds, num_iterations
#allow the user to select a date range
def get_dates() :
root = tk.Tk()
root.geometry("400x300")
root.title('Select Start and End time')
label_start = tk.Label(root, text='Start Year: YYYY')
entry_start = tk.Entry(root)
label_end = tk.Label(root, text='End Year: YYYY')
entry_end = tk.Entry(root)
label_start.pack()
entry_start.pack()
label_end.pack()
entry_end.pack()
def enter_year():
global start_time
global end_time
start_time = datetime.datetime.strptime(entry_start.get(),'%Y')
end_time =datetime.datetime.strptime(entry_end.get(),'%Y')
root.destroy()
def get_year():
global dateStore
dateStore = True
enter_year()
get_year_button = tk.Button(root, text='Submit', command= get_year)
get_year_button.pack()
root.mainloop()
#get range of years
date_range = pd.date_range(start=start_time, end = end_time,freq='D')
#create dictionary to store years
years = {str(date.year) : date.year for date in date_range}
return years
#make a dictionary with a dataframe for each season for hitters, pitchers, and teams
def make_period_dicts(dictionary):
batter_df = {dic:pyb.batting_stats(int(dic), qual = False) for dic in dictionary.keys()}
pitcher_df = {dic:pyb.pitching_stats(int(dic), qual = False) for dic in dictionary.keys()}
return batter_df , pitcher_df
#forecaster class
class Forecaster:
def __init__(self, simulations, num_competitors, num_rounds, num_iterations,years):
self.user = getpass.getuser()
self.today = date.today().strftime("%m_%d_%y")
self.simulations = simulations
self.num_competitors = num_competitors
self.num_rounds = num_rounds
self.num_iterations = num_iterations
self.years = years
print('Downloading Data')
print('')
self.seasons_dict_batter, self.seasons_dict_pitcher = make_period_dicts(self.years)
#perform monte carlo full season forecast
def monte_carlo_forecast(self):
print('Constructing the Database')
print('')
#merge the frames together
def merge_dict(dfDict, onCols, how='outer', naFill=None):
keys = list(dfDict.keys())
for i in range(len(keys)):
key = keys[i]
df0 = dfDict[key]
cols = list(df0.columns)
valueCols = list(filter(lambda x: x not in (onCols), cols))
df0 = df0[onCols + valueCols]
df0.columns = onCols + [(s + '_' + key) for s in valueCols]
if (i == 0):
outDf = df0
else:
outDf = pd.merge(outDf, df0, how=how, on=onCols)
if (naFill != None):
outDf = outDf.fillna(naFill)
return(outDf)
#get the column names
def get_column_names(dictionary):
key_list = list(dictionary.keys())
columns_list = list(dictionary[key_list[0]].columns)
return columns_list
self.pitcher_columns_list, self.batter_columns_list = get_column_names(self.seasons_dict_pitcher), get_column_names(self.seasons_dict_batter)
#merge the seasons together
def merge_season_dicts():
self.merged_batter_seasons_dict = merge_dict(self.seasons_dict_batter, self.batter_columns_list, how = 'outer', naFill = None)
self.merged_pitcher_seasons_dict = merge_dict(self.seasons_dict_pitcher, self.pitcher_columns_list, how = 'outer', naFill = None)
return self.merged_batter_seasons_dict, self.merged_pitcher_seasons_dict
merge_season_dicts()
#make a dataframe for each hitter
def make_player_dicts(dataframe):
df = {name : dataframe[dataframe['Name']==name] for name in dataframe['Name']}
return df
self.batter_dict, self.pitcher_dict = make_player_dicts(self.merged_batter_seasons_dict), make_player_dicts(self.merged_pitcher_seasons_dict)
#get the current year
def get_year_names(dictionary):
keys_list = list(dictionary.keys())
return keys_list
self.years_list = get_year_names(self.years)
self.current_year = self.years_list[-1]
#get only the players who played in the current year
def filter_for_current_players(dictionary, year):
current_dict = {name : dictionary[name] for name in dictionary.keys() if dictionary[name]['Season'].values[-1]==int(year)}
return current_dict
self.current_pitcher_dict, self.current_batter_dict = filter_for_current_players(self.pitcher_dict, self.current_year), filter_for_current_players(self.batter_dict, self.current_year)
#raw stats for batters and pitchers
def stats():
batter_stats = ['1B', '2B','3B', 'HR','R','RBI','BB','SO','SB', 'IDfg']
pitcher_stats = ['W', 'IP', 'ER', 'SO', 'BB', 'SV', 'HLD', 'IDfg']
return batter_stats, pitcher_stats
self.batter_stats, self.pitcher_stats = stats()
#filter by these stats
def filter_for_current_stats(dictionary, stats):
current_dict = {name:dictionary[name][stats] for name in dictionary.keys()}
return current_dict
self.current_stat_batter, self.current_stat_pitcher = filter_for_current_stats(self.current_batter_dict, self.batter_stats), filter_for_current_stats(self.current_pitcher_dict, self.pitcher_stats)
#team names and their abbreviations
def teams_abbreviatons():
team_list = ['Diamondbacks-ARI', 'Braves-ATL', 'Orioles-BAL', 'Red Sox-BOS', 'Cubs-CHC',
'White Sox-CHW', 'Reds-CIN' , 'Indians-CLE' , 'Rockies-COL', 'Tigers-DET' ,
'Marlins-MIA' ,'Astros-HOU' ,'Royals-KCR' ,'Angels-LAA','Dodgers-LAD',
'Brewers-MIL' ,'Twins-MIN','Mets-NYM','Yankees-NYY','Athletics-OAK','Phillies-PHI',
'Pirates-PIT' ,'Padres-SDP' ,'Giants-SFG','Mariners-SEA', 'Cardinals-STL',
'Rays-TB' ,'Rangers-TEX' ,'Blue Jays-TOR' ,'Nationals-WSN']
return team_list
self.team_list = teams_abbreviatons()
#split the team names
def split_names(team_list) :
split_list = [team.split('-') for team in team_list]
return split_list
self.split_teams = split_names(self.team_list)
#create dict for team names
def create_dict(team_list):
teams_dict = {team[1]: team[0] for team in team_list}
return teams_dict
self.teams_dict = create_dict(self.split_teams)
#get a list of the teams
def get_team_name_lists(team_list):
team_list_full = [team.split('-')[0] for team in team_list]
team_list_abv = [team.split('-')[1] for team in team_list]
return team_list_full, team_list_abv
self.team_list_full, self.team_list_abv = get_team_name_lists(self.team_list)
#get all the schedules
def get_schedules(team_list_abv, years_list, team_list_full):
season_list = []
season_list = [{team_list_ful: {year_list:pyb.schedule_and_record(int(year_list), team_list_ab)}} for year_list in years_list for team_list_ab, team_list_ful in zip(team_list_abv, team_list_full)]
return season_list
self.season_list = get_schedules(self.team_list_abv, self.years_list, self.team_list_full)
#drop pitchers from the hitters list
def drop_pitchers(current_stat_batter, current_stat_pitcher):
for key in current_stat_pitcher.keys():
if key in current_stat_batter.keys() and key in current_stat_pitcher.keys():
del current_stat_batter[key]
return current_stat_batter
self.current_stat_batter = drop_pitchers(self.current_stat_batter, self.current_stat_pitcher)
#create averages for each player for each stat
def player_averager(dictionary):
average_players ={}
for key in dictionary.keys():
average_players.update({key : dictionary[key].mean().round().to_frame().transpose()})
average_players[key] = average_players[key].reset_index()
average_players[key].rename(columns = {'index': 'Name'}, inplace = True)
average_players[key]['Name']= key
return average_players
self.average_batters, self.average_pitchers = player_averager(self.current_stat_batter), player_averager(self.current_stat_pitcher)
#columns to merge on
def merge_columns(average_batters, average_pitchers):
#return list(average_batters['<NAME>'].columns), list(average_pitchers['<NAME>'].columns)
return list(average_batters[list(average_batters.keys())[0]].columns), list(average_pitchers[list(average_pitchers.keys())[0]].columns)
self.batter_columns, self.pitcher_columns = merge_columns(self.average_batters, self.average_pitchers)
#merge the average players to create the clusters
def average_merger(average_batters, batter_columns,average_pitchers, pitcher_columns):
return merge_dict(average_batters, batter_columns, how = 'outer', naFill = None), merge_dict(average_pitchers, pitcher_columns, how = 'outer', naFill = None)
self.merged_batter_df, self.merged_pitcher_df = average_merger(self.average_batters, self.batter_columns, self.average_pitchers, self.pitcher_columns)
#separate starting and relief pitchers and account for overlap
def separate_pitchers(merged_pitcher_df):
starting_pitchers = merged_pitcher_df[(merged_pitcher_df['SV'] ==0) &(merged_pitcher_df['HLD'] ==0) | (merged_pitcher_df['Name']=='<NAME>') | (merged_pitcher_df['Name']=='<NAME>')]
relief_pitchers = merged_pitcher_df[(merged_pitcher_df['SV'] >=1) & (merged_pitcher_df['SV'] <8) | (merged_pitcher_df['HLD']> 0) & (merged_pitcher_df['Name'] !='<NAME>') & (merged_pitcher_df['Name']!='<NAME>')]
closers = merged_pitcher_df[(merged_pitcher_df['SV'] >10) & (merged_pitcher_df['HLD'] >= 0) & (merged_pitcher_df['Name'] !='<NAME>') & (merged_pitcher_df['Name']!='<NAME>')]
return starting_pitchers, relief_pitchers, closers
self.starting_pitchers, self.relief_pitchers, self.closers = separate_pitchers(self.merged_pitcher_df)
#cluster players to obtain a prior distrbution for each stat
print('Clustering Players')
print('')
def mean_shift(data,quantile) :
#split the data
train = data.drop('Name', axis =1)
if 'Cluster Label' in train.columns:
train = data.drop(['Name', 'Cluster Label', 'IDfg'], axis =1)
else:
pass
#estimate the bandwith
bandwidth = estimate_bandwidth(train, quantile=quantile, n_samples=100000)
#instantiate the mean shift clustering object
ms = MeanShift(bandwidth = bandwidth, bin_seeding = True, cluster_all =True, n_jobs = None )
#fit the model to the training data
ms.fit(train)
#grab the cluster labels and centers
labels = ms.labels_
cluster_centers = ms.cluster_centers_
#find the number of unique labels
labels_unique = np.unique(labels)
n_clusters_ = len(labels_unique)
#find the clusters
cluster_finder = data
cluster_finder['Cluster Label'] = labels
#create the clusters
clusters = [cluster_finder[cluster_finder['Cluster Label']==label] for label in labels_unique]
#extract the summary statistics
cluster_describers = [cluster.describe() for cluster in clusters]
return cluster_finder, clusters, cluster_describers
self.cluster_finder_batter, self.clusters_batter, self.cluster_describers_batter = mean_shift(self.merged_batter_df,0.16)
self.cluster_finder_starting_pitcher, self.clusters_starting_pitcher, self.cluster_describers_starting_pitcher = mean_shift(self.starting_pitchers, 0.18)
self.cluster_finder_relief_pitcher, self.clusters_relief_pitcher, self.cluster_describers_relief_pitcher = mean_shift(self.relief_pitchers, 0.2)
self.cluster_finder_closer, self.clusters_closer, self.cluster_describer_closer = mean_shift(self.closers, 0.19)
#match the pitcher subsets properly
def subset_pitchers(dictionary, dataframe):
for key in dictionary.keys():
dictionary = {key: dictionary[key] for key in dataframe['Name']}
return dictionary
self.current_stat_starting = subset_pitchers(self.current_stat_pitcher, self.cluster_finder_starting_pitcher)
self.current_stat_relief = subset_pitchers(self.current_stat_pitcher, self.cluster_finder_relief_pitcher)
self.current_stat_closer = subset_pitchers(self.current_stat_pitcher, self.cluster_finder_closer)
#use the clusters to make distributions for rookies
#also use clusters for a similarity comparison for non-rookies
def player_matcher(dictionary,dataframe,columns):
for key in dictionary.keys() :
label = int(dataframe[dataframe['Name'] == key]['Cluster Label'])
dictionary[key].loc[key] = dataframe[dataframe['Cluster Label']==label][columns[1:]].mean().round()
return dictionary
self.full_batters = player_matcher(self.current_stat_batter, self.cluster_finder_batter,self.batter_columns)
self.full_starters = player_matcher(self.current_stat_starting, self.cluster_finder_starting_pitcher,self.pitcher_columns)
self.full_relievers = player_matcher(self.current_stat_relief, self.cluster_finder_relief_pitcher,self.pitcher_columns)
self.full_closers = player_matcher(self.current_stat_closer, self.cluster_finder_closer,self.pitcher_columns)
#sample over the player distributions
def sample_averager(dictionary,simulations):
sample_players = {}
sample_players_risk = {}
for key in tqdm(dictionary.keys()):
if len(dictionary[key]) > 1 :
for column in dictionary[key]:
if column == 'IDfg':
dictionary[key]= dictionary[key].replace([np.inf, -np.inf], np.nan).fillna(0) #if not needed, remove
randomizer = 0
means = 0
stdevs = 0
dictionary[key].loc[key + ' Mean',column] = means
dictionary[key].loc[key + ' Risk',column] = stdevs
sample_players.update({key : dictionary[key].loc[key + ' Mean'].to_frame().transpose()})
sample_players_risk.update({key: dictionary[key].loc[key + ' Risk'].to_frame().transpose()})
sample_players_risk[key]= sample_players_risk[key].replace([np.inf, -np.inf], np.nan).fillna(0) #if not needed, remove
sample_players[key] = sample_players[key].reset_index()
sample_players[key].rename(columns = {'index': 'Name'}, inplace = True)
sample_players_risk[key] = sample_players_risk[key].reset_index()
sample_players_risk[key].rename(columns = {'index': 'Name'}, inplace = True)
else:
dictionary[key]= dictionary[key].replace([np.inf, -np.inf], np.nan).fillna(0) #if not needed, remove
randomizer = [randint(int(0.9*np.mean(dictionary[key][column])) + int(0.1*min(dictionary[key][column])), int(0.1*np.mean(dictionary[key][column]))+ int(0.9*max(dictionary[key][column]))) for i in range(simulations)]
means = np.mean(randomizer)
stdevs = np.std(randomizer)
dictionary[key].loc[key + ' Mean',column] = means
dictionary[key].loc[key + ' Risk',column] = stdevs
sample_players.update({key : dictionary[key].loc[key + ' Mean'].to_frame().transpose()})
sample_players_risk.update({key: dictionary[key].loc[key + ' Risk'].to_frame().transpose()})
sample_players_risk[key]= sample_players_risk[key].replace([np.inf, -np.inf], np.nan).fillna(0) #if not needed, remove
sample_players[key] = sample_players[key].reset_index()
sample_players[key].rename(columns = {'index': 'Name'}, inplace = True)
sample_players_risk[key] = sample_players_risk[key].reset_index()
sample_players_risk[key].rename(columns = {'index': 'Name'}, inplace = True)
return sample_players, sample_players_risk
self.sample_batters, self.sample_batters_risk = sample_averager(self.full_batters, self.simulations)
self.sample_starting_pitchers, self.sample_starting_pitchers_risk = sample_averager(self.full_starters, self.simulations)
self.sample_relief_pitchers, self.sample_relief_pitchers_risk = sample_averager(self.full_relievers, self.simulations)
self.sample_closers, self.sample_closers_risk = sample_averager(self.full_closers, self.simulations)
#get the dataframes for fantasy points
#for point totals
self.sample_batters = merge_dict(self.sample_batters, self.batter_columns)
self.sample_starting_pitchers = merge_dict(self.sample_starting_pitchers, self.pitcher_columns)
self.sample_relief_pitchers = merge_dict(self.sample_relief_pitchers, self.pitcher_columns)
self.sample_closers = merge_dict(self.sample_closers, self.pitcher_columns)
#for risk
self.sample_batters_risk = merge_dict(self.sample_batters_risk, self.batter_columns)
self.sample_starting_pitchers_risk = merge_dict(self.sample_starting_pitchers_risk, self.pitcher_columns)
self.sample_relief_pitchers_risk = merge_dict(self.sample_relief_pitchers_risk, self.pitcher_columns)
self.sample_closers_risk = merge_dict(self.sample_closers_risk, self.pitcher_columns)
#calculate fantasy points for batters
def fantasy_batter_points(dataframe):
dataframe['Fantasy Total'] = 2*dataframe['1B'] + 4*dataframe['2B'] + 6*dataframe['3B'] + 8*dataframe['HR']+ 1*dataframe['BB'] + 1*dataframe['R']+ 1.5*dataframe['RBI'] -0.5*dataframe['SO'] + 3*dataframe['SB']
return dataframe
#for points
self.sample_batters = fantasy_batter_points(self.sample_batters)
#for risk
self.sample_batters_risk = fantasy_batter_points(self.sample_batters_risk)
#calculate fantasy points for pitchers
def fantasy_pitcher_points(dataframe):
dataframe['Fantasy Total'] = 0.99*dataframe['IP'] -3*dataframe['ER'] + 4*dataframe['W'] + 3*dataframe['SV']+ 3*dataframe['SO'] + 2*dataframe['HLD']
return dataframe
#for points
self.sample_starting_pitchers = fantasy_pitcher_points(self.sample_starting_pitchers)
self.sample_relief_pitchers = fantasy_pitcher_points(self.sample_relief_pitchers)
self.sample_closers = fantasy_pitcher_points(self.sample_closers)
#for risk
self.sample_starting_pitchers_risk = fantasy_pitcher_points(self.sample_starting_pitchers_risk)
self.sample_relief_pitchers_risk = fantasy_pitcher_points(self.sample_relief_pitchers_risk)
self.sample_closers_risk = fantasy_pitcher_points(self.sample_closers_risk)
print('')
print('Simulation Complete')
print('')
#naive risk adjusted scores
def risk_adjusted_scores(points, risk):
#get risk adjusted scores
risk_adjusted_score = []
for score, risk in zip(points['Fantasy Total'], risk['Fantasy Total']):
risk_adjusted_score.append(0.75*score - 0.25*risk)
#make new dataframe
risk_adjusted = pd.DataFrame({'IDfg': points['IDfg'],'Name':points['Name'].apply(lambda x : x.replace(' Mean', '')), 'Risk Adjusted Score': risk_adjusted_score})
return risk_adjusted
#hitters
self.risk_adjusted_batters = risk_adjusted_scores(self.sample_batters, self.sample_batters_risk)
self.risk_adjusted_batters['IDfg'] = self.merged_batter_df['IDfg']
#pitchers
self.risk_adjusted_starters = risk_adjusted_scores(self.sample_starting_pitchers, self.sample_starting_pitchers_risk)
self.risk_adjusted_relief = risk_adjusted_scores(self.sample_relief_pitchers, self.sample_relief_pitchers_risk)
self.risk_adjusted_closers = risk_adjusted_scores(self.sample_closers, self.sample_closers_risk)
#add positions
def add_fielding_positions(start_time, end_time, players):
#chadwick register for players who played in most recent season of analysis
#this will be used for cross-referencing player IDs
chadwick_register = pyb.chadwick_register()
#lahman database to grab positions
lahman = pyb.lahman.fielding()
lahman['key_bbref'] = lahman['playerID']
lahman = lahman.drop(columns = ['playerID'])
lahman = lahman.drop_duplicates('key_bbref')
#merge frames
merged = pd.merge(lahman,chadwick_register, on = 'key_bbref', how = 'outer')
merged = merged[['yearID','key_bbref', 'teamID','POS', 'key_fangraphs', 'name_first', 'name_last']]
merged['IDfg'] = merged['key_fangraphs']
merged.drop(columns = ['key_fangraphs'], inplace = True) #drop missing players for now, which is very few
#merge with player positions
players = pd.merge(players, merged, on = 'IDfg', how = 'left')
players = players[['Name', 'POS', 'Risk Adjusted Score']]
players.dropna(inplace = True)
#fix Ohtani
#we will find a way to add his pitching stats
def ohtani(x):
if x == 'P':
return 'SP'
else:
return x
players['POS'] = players['POS'].apply(lambda x : ohtani(x))
return players
self.risk_adjusted_batters = add_fielding_positions(start_time, end_time, self.risk_adjusted_batters)
#add pitcher positions
def add_pitching_positions(starters, relievers, closers):
#naive criteria to separate into RP and SP
starters['POS'] = ['SP' for i in range(0,len(starters))]
relievers['POS'] = ['RP' for i in range(0,len(relievers))]
closers['POS'] = ['RP' for i in range(0,len(closers))]
return starters, relievers, closers
self.risk_adjusted_starters, self.risk_adjusted_relief, self.risk_adjusted_closers = add_pitching_positions(self.risk_adjusted_starters, self.risk_adjusted_relief, self.risk_adjusted_closers)
#change IDs
def id_changer(players):
players['IDfg'] = [i for i in range(len(players))]
players = players[['IDfg', 'Name', 'POS', 'Risk Adjusted Score']]
return players
self.risk_adjusted_starters = id_changer(self.risk_adjusted_starters)
self.risk_adjusted_relief = id_changer(self.risk_adjusted_relief)
self.risk_adjusted_closers = id_changer(self.risk_adjusted_closers)
self.risk_adjusted_batters = id_changer(self.risk_adjusted_batters)
#combine all players
def combine_all_players(batters, starters, relievers, closers):
players = batters.append(starters)
players = players.append(closers)
players = players.append(relievers)
return players
self.all_players = combine_all_players(self.risk_adjusted_batters, self.risk_adjusted_starters, self.risk_adjusted_relief, self.risk_adjusted_closers)
#Adapt Drafting Technique from : https://github.com/ykeuter/ffl/blob/master/notebooks/mcts.ipynb
#create the draft state so we know who has been taken and who is available and who's turn it is
class DraftState:
def __init__(self, rosters, turns, freeagents, playerjm=None):
self.rosters = rosters
self.turns = turns
self.freeagents = freeagents
self.playerJustMoved = playerjm
#create weights the so algorithm can choose relative to which positions they have chosen from
def GetResult(self, playerjm):
""" Get the game result from the viewpoint of playerjm.
"""
if playerjm is None: return 0
pos_wgts = {
("SP"): [.9, .9, .9 ,.6, .6 ,.6],
("RP"): [.8, .7, .6, .5,.5],
("C"): [.6,.4],
("1B"): [.7,],
("2B"): [.7, .6],
("SS"): [.7, .6],
("3B"): [.7, .6],
("1B", "2B", "3B", "SS", "OF", "C"): [.6],
("1B", "2B", "3B", "SS"): [.6],
("OF"): [.7,.7,.7,.5,.4],
}
result = 0
# map the drafted players to the weights
for p in self.rosters[playerjm]:
max_wgt, _, max_pos, old_wgts = max(
((wgts[0], -len(lineup_pos), lineup_pos, wgts) for lineup_pos, wgts in pos_wgts.items()
if p.position in lineup_pos),
default=(0, 0, (), []))
if max_wgt > 0:
result += max_wgt * p.points
old_wgts.pop(0)
if not old_wgts:
pos_wgts.pop(max_pos)
# map the remaining weights to the top three free agents
for pos, wgts in pos_wgts.items():
result += np.mean([p.points for p in self.freeagents if p.position in pos][:3]) * sum(wgts)
return result
#possible moves for each state, given the position
def GetMoves(self):
""" Get all possible moves from this state.
"""
pos_max = {"SP": 6, "RP": 5, "1B": 1, "C":2, "2B":2 , "3B":2, "SS": 2, "OF":5}
if len(self.turns) == 0: return []
roster_positions = np.array([p.position for p in self.rosters[self.turns[0]]], dtype=str)
moves = [pos for pos, max_ in pos_max.items() if np.sum(roster_positions == pos) < max_]
return moves
#update states after each move
def DoMove(self, move):
""" Update a state by carrying out the given move.
Must update playerJustMoved.
"""
player = next(p for p in self.freeagents if p.position == move)
self.freeagents.remove(player)
rosterId = self.turns.pop(0)
self.rosters[rosterId].append(player)
self.playerJustMoved = rosterId
def Clone(self):
""" Create a deep clone of this game state.
"""
rosters = list(map(lambda r: r[:], self.rosters))
st = DraftState(rosters, self.turns[:], self.freeagents[:],
self.playerJustMoved)
return st
#create a player object with relevant attributes
class MLBPlayer:
def __init__(self, name, position, points):
self.name = name
self.position = position
self.points = points
def __repr__(self):
return "|".join([self.name, self.position, str(self.points)])
# This is a very simple implementation of the uct Monte Carlo Tree Search algorithm in Python 2.7.
# The function uct(rootstate, itermax, verbose = False) is towards the bottom of the code.
# It aims to have the clearest and simplest possible code, and for the sake of clarity, the code
# is orders of magnitude less efficient than it could be made, particularly by using a
# state.GetRandomMove() or state.DoRandomRollout() function.
#
# Written by <NAME>, <NAME>, <NAME> (University of York, UK) September 2012.
#
# Licence is granted to freely use and distribute for any sensible/legal purpose so long as this comment
# remains in any distributed code.
#
# For more information about Monte Carlo Tree Search check out our web site at www.mcts.ai
class Node:
""" A node in the game tree. Note wins is always from the viewpoint of playerJustMoved.
Crashes if state not specified.
"""
def __init__(self, move = None, parent = None, state = None):
self.move = move # the move that got us to this node - "None" for the root node
self.parentNode = parent # "None" for the root node
self.childNodes = []
self.wins = 0
self.visits = 0
self.untriedMoves = state.GetMoves() # future child nodes
self.playerJustMoved = state.playerJustMoved # the only part of the state that the Node needs later
def uctSelectChild(self):
""" Use the UCB1 formula to select a child node. Often a constant uctK is applied so we have
lambda c: c.wins/c.visits + uctK * sqrt(2*log(self.visits)/c.visits to vary the amount of
exploration versus exploitation.
"""
uctK = 1000 #200 #2000 #100 #20000
s = sorted(self.childNodes, key = lambda c: c.wins/c.visits + uctK * math.sqrt(2*math.log(self.visits)/c.visits))[-1]
return s
def AddChild(self, m, s):
""" Remove m from untriedMoves and add a new child node for this move.
Return the added child node
"""
n = Node(move = m, parent = self, state = s)
self.untriedMoves.remove(m)
self.childNodes.append(n)
return n
def Update(self, result):
""" Update this node - one additional visit and result additional wins. result must be from the viewpoint of playerJustmoved.
"""
self.visits += 1
self.wins += result
def uct(rootstate, itermax, verbose = False):
""" Conduct a uct search for itermax iterations starting from rootstate.
Return the best move from the rootstate.
"""
rootnode = Node(state = rootstate)
for i in range(itermax):
node = rootnode
state = rootstate.Clone()
# Select
while node.untriedMoves == [] and node.childNodes != []: # node is fully expanded and non-terminal
node = node.uctSelectChild()
state.DoMove(node.move)
# Expand
if node.untriedMoves != []: # if we can expand (i.e. state/node is non-terminal)
m = random.choice(node.untriedMoves)
state.DoMove(m)
node = node.AddChild(m,state) # add child and descend tree
# Rollout - this can often be made orders of magnitude quicker using a state.GetRandomMove() function
while state.GetMoves() != []: # while state is non-terminal
state.DoMove(random.choice(state.GetMoves()))
# Backpropagate
while node != None: # backpropagate from the expanded node and work back to the root node
node.Update(state.GetResult(node.playerJustMoved)) # state is terminal. Update node with result from POV of node.playerJustMoved
node = node.parentNode
return sorted(rootnode.childNodes, key = lambda c: c.visits)[-1].move # return the move that was most visited
class Drafter(DraftState):
def __init__(self, forecaster, draftstate = DraftState, mlbplayer = MLBPlayer, uct = uct):
self.forecaster = forecaster
self.draftstate = draftstate
self.mlbplayer = mlbplayer
self.uct = uct
#prepare the draft
def prepare_draft(self) :
#create position weights for drafting importance
self.draftstsate.GetResult = self.draftstate.GetResult()
#assign possible moves for each player at each state
self.draftstsate.GetMoves = self.draftstate.GetMoves()
#update states of the draft after each move
self.draftstsate.DoMove = self.draftstate.DoMove()
#create a deep clone of this game state
self.draftstsate.Clone = self.draftstate.Clone()
return self.draftstsate
#simulate a fantasy faseball draft
def draft(self) :
print('')
print('Drafting')
print('')
#import projections
self.forecaster.all_players.set_index('IDfg', inplace = True)
self.forecaster.mlb_players = self.forecaster.all_players
freeagents = [self.mlbplayer(*p) for p in self.forecaster.mlb_players.itertuples(index=False, name=None)]
#create draft competitors
num_competitors = self.forecaster.num_competitors
rosters = [[] for _ in range(num_competitors)] # empty rosters to start with
#create number of rounds and turns
num_rounds = self.forecaster.num_rounds
turns = []
# generate turns by snake order
for i in range(num_rounds):
turns += reversed(range(num_competitors)) if i % 2 else range(num_competitors)
#create draft states
state = self.draftstate(rosters, turns, freeagents)
iterations = self.forecaster.num_iterations
while state.GetMoves() != []:
move = self.uct(state, iterations)
print(move, end=".")
state.DoMove(move)
print('')
print('Draft Complete')
#draft results
self.draft_results = pd.DataFrame({"Team " + str(i + 1): r for i, r in enumerate(state.rosters)})
return self.draft_results
#convert the dataframes to excel sheets
def excel_converter(self):
#excel file
writer = pd.ExcelWriter(f'C:\\Users\\{self.forecaster.user}\\Downloads\\{end_time.year +1}_Projections_{self.forecaster.today}.xlsx')
#Drafting
self.draft_results.to_excel(writer, sheet_name = 'Mock Draft',index = False)
#full list
self.forecaster.all_players.to_excel(writer, sheet_name = 'All Players',index = False)
#risk-adjusted
self.forecaster.risk_adjusted_batters.to_excel(writer, sheet_name = 'Risk Adjusted Batters',index = False)
self.forecaster.risk_adjusted_starters.to_excel(writer, sheet_name = 'Risk Adjusted Starters',index = False)
self.forecaster.risk_adjusted_relief.to_excel(writer, sheet_name = 'Risk Adjusted Relief',index = False)
self.forecaster.risk_adjusted_closers.to_excel(writer, sheet_name = 'Risk Adjusted Closers',index = False)
#points
self.forecaster.sample_batters.drop(columns = ['IDfg'], inplace = True)
self.forecaster.sample_batters.to_excel(writer, sheet_name='Batters Projection',index = False)
self.forecaster.sample_starting_pitchers.drop(columns = ['IDfg'], inplace = True)
self.forecaster.sample_starting_pitchers.to_excel(writer, sheet_name='Starters Projection',index = False)
self.forecaster.sample_relief_pitchers.drop(columns = ['IDfg'], inplace = True)
self.forecaster.sample_relief_pitchers.to_excel(writer, sheet_name='Relievers Projection',index = False)
self.forecaster.sample_closers.drop(columns = ['IDfg'], inplace = True)
self.forecaster.sample_closers.to_excel(writer, sheet_name='Closers Projection',index = False)
#risk
self.forecaster.sample_batters_risk.drop(columns = ['IDfg'], inplace = True)
self.forecaster.sample_batters_risk.to_excel(writer, sheet_name='Batters Risk',index = False)
self.forecaster.sample_starting_pitchers_risk.drop(columns = ['IDfg'], inplace = True)
self.forecaster.sample_starting_pitchers_risk.to_excel(writer, sheet_name='Starters Risk',index = False)
self.forecaster.sample_relief_pitchers_risk.drop(columns = ['IDfg'], inplace = True)
self.forecaster.sample_relief_pitchers_risk.to_excel(writer, sheet_name='Relievers Risk',index = False)
self.forecaster.sample_closers_risk.drop(columns = ['IDfg'], inplace = True)
self.forecaster.sample_closers_risk.to_excel(writer, sheet_name='Closers Risk',index = False)
#clusters
self.forecaster.cluster_finder_batter.to_excel(writer, sheet_name = 'Batter Clusters',index = False)
self.forecaster.cluster_finder_starting_pitcher.to_excel(writer, sheet_name = 'Starting Clusters',index = False)
self.forecaster.cluster_finder_relief_pitcher.to_excel(writer, sheet_name = 'Relief Clusters',index = False)
self.forecaster.cluster_finder_closer.to_excel(writer, sheet_name = 'Closer Clusters',index = False)
#save file
writer.save()
return self.forecaster
#call the excel converter
def call_converter(self):
return self.excel_converter()
| [
"tkinter.Button",
"math.log",
"numpy.array",
"tkinter.Label",
"getpass.getuser",
"sklearn.cluster.MeanShift",
"pandas.ExcelWriter",
"pandas.date_range",
"numpy.mean",
"tkinter.Entry",
"random.choice",
"pybaseball.chadwick_register",
"pandas.merge",
"numpy.std",
"datetime.date.today",
"... | [((718, 751), 'warnings.filterwarnings', 'warnings.filterwarnings', (['"""ignore"""'], {}), "('ignore')\n", (741, 751), False, 'import warnings\n'), ((861, 868), 'tkinter.Tk', 'tk.Tk', ([], {}), '()\n', (866, 868), True, 'import tkinter as tk\n'), ((983, 1054), 'tkinter.Label', 'tk.Label', (['root'], {'text': '"""Choose the number of simulations for forecasting"""'}), "(root, text='Choose the number of simulations for forecasting')\n", (991, 1054), True, 'import tkinter as tk\n'), ((1079, 1093), 'tkinter.Entry', 'tk.Entry', (['root'], {}), '(root)\n', (1087, 1093), True, 'import tkinter as tk\n'), ((1122, 1173), 'tkinter.Label', 'tk.Label', (['root'], {'text': '"""Choose Number of Competitors"""'}), "(root, text='Choose Number of Competitors')\n", (1130, 1173), True, 'import tkinter as tk\n'), ((1202, 1216), 'tkinter.Entry', 'tk.Entry', (['root'], {}), '(root)\n', (1210, 1216), True, 'import tkinter as tk\n'), ((1240, 1303), 'tkinter.Label', 'tk.Label', (['root'], {'text': '"""Choose the number of rounds in the draft"""'}), "(root, text='Choose the number of rounds in the draft')\n", (1248, 1303), True, 'import tkinter as tk\n'), ((1327, 1341), 'tkinter.Entry', 'tk.Entry', (['root'], {}), '(root)\n', (1335, 1341), True, 'import tkinter as tk\n'), ((1369, 1462), 'tkinter.Label', 'tk.Label', (['root'], {'text': '"""Choose the number of iterations for the Draft Agent\'s Exploration"""'}), '(root, text=\n "Choose the number of iterations for the Draft Agent\'s Exploration")\n', (1377, 1462), True, 'import tkinter as tk\n'), ((1485, 1499), 'tkinter.Entry', 'tk.Entry', (['root'], {}), '(root)\n', (1493, 1499), True, 'import tkinter as tk\n'), ((2243, 2293), 'tkinter.Button', 'tk.Button', (['root'], {'text': '"""Submit"""', 'command': 'get_params'}), "(root, text='Submit', command=get_params)\n", (2252, 2293), True, 'import tkinter as tk\n'), ((2498, 2505), 'tkinter.Tk', 'tk.Tk', ([], {}), '()\n', (2503, 2505), True, 'import tkinter as tk\n'), ((2597, 2636), 'tkinter.Label', 'tk.Label', (['root'], {'text': '"""Start Year: YYYY"""'}), "(root, text='Start Year: YYYY')\n", (2605, 2636), True, 'import tkinter as tk\n'), ((2655, 2669), 'tkinter.Entry', 'tk.Entry', (['root'], {}), '(root)\n', (2663, 2669), True, 'import tkinter as tk\n'), ((2686, 2723), 'tkinter.Label', 'tk.Label', (['root'], {'text': '"""End Year: YYYY"""'}), "(root, text='End Year: YYYY')\n", (2694, 2723), True, 'import tkinter as tk\n'), ((2740, 2754), 'tkinter.Entry', 'tk.Entry', (['root'], {}), '(root)\n', (2748, 2754), True, 'import tkinter as tk\n'), ((3193, 3241), 'tkinter.Button', 'tk.Button', (['root'], {'text': '"""Submit"""', 'command': 'get_year'}), "(root, text='Submit', command=get_year)\n", (3202, 3241), True, 'import tkinter as tk\n'), ((3344, 3399), 'pandas.date_range', 'pd.date_range', ([], {'start': 'start_time', 'end': 'end_time', 'freq': '"""D"""'}), "(start=start_time, end=end_time, freq='D')\n", (3357, 3399), True, 'import pandas as pd\n'), ((4023, 4040), 'getpass.getuser', 'getpass.getuser', ([], {}), '()\n', (4038, 4040), False, 'import getpass\n'), ((29350, 29420), 'numpy.array', 'np.array', (['[p.position for p in self.rosters[self.turns[0]]]'], {'dtype': 'str'}), '([p.position for p in self.rosters[self.turns[0]]], dtype=str)\n', (29358, 29420), True, 'import numpy as np\n'), ((36721, 36856), 'pandas.ExcelWriter', 'pd.ExcelWriter', (['f"""C:\\\\Users\\\\{self.forecaster.user}\\\\Downloads\\\\{end_time.year + 1}_Projections_{self.forecaster.today}.xlsx"""'], {}), "(\n f'C:\\\\Users\\\\{self.forecaster.user}\\\\Downloads\\\\{end_time.year + 1}_Projections_{self.forecaster.today}.xlsx'\n )\n", (36735, 36856), True, 'import pandas as pd\n'), ((13438, 13500), 'sklearn.cluster.estimate_bandwidth', 'estimate_bandwidth', (['train'], {'quantile': 'quantile', 'n_samples': '(100000)'}), '(train, quantile=quantile, n_samples=100000)\n', (13456, 13500), False, 'from sklearn.cluster import MeanShift, estimate_bandwidth\n'), ((13577, 13656), 'sklearn.cluster.MeanShift', 'MeanShift', ([], {'bandwidth': 'bandwidth', 'bin_seeding': '(True)', 'cluster_all': '(True)', 'n_jobs': 'None'}), '(bandwidth=bandwidth, bin_seeding=True, cluster_all=True, n_jobs=None)\n', (13586, 13656), False, 'from sklearn.cluster import MeanShift, estimate_bandwidth\n'), ((13984, 14001), 'numpy.unique', 'np.unique', (['labels'], {}), '(labels)\n', (13993, 14001), True, 'import numpy as np\n'), ((24062, 24085), 'pybaseball.chadwick_register', 'pyb.chadwick_register', ([], {}), '()\n', (24083, 24085), True, 'import pybaseball as pyb\n'), ((24155, 24176), 'pybaseball.lahman.fielding', 'pyb.lahman.fielding', ([], {}), '()\n', (24174, 24176), True, 'import pybaseball as pyb\n'), ((24404, 24468), 'pandas.merge', 'pd.merge', (['lahman', 'chadwick_register'], {'on': '"""key_bbref"""', 'how': '"""outer"""'}), "(lahman, chadwick_register, on='key_bbref', how='outer')\n", (24412, 24468), True, 'import pandas as pd\n'), ((24831, 24879), 'pandas.merge', 'pd.merge', (['players', 'merged'], {'on': '"""IDfg"""', 'how': '"""left"""'}), "(players, merged, on='IDfg', how='left')\n", (24839, 24879), True, 'import pandas as pd\n'), ((33577, 33609), 'random.choice', 'random.choice', (['node.untriedMoves'], {}), '(node.untriedMoves)\n', (33590, 33609), False, 'import random\n'), ((4062, 4074), 'datetime.date.today', 'date.today', ([], {}), '()\n', (4072, 4074), False, 'from datetime import date, timedelta\n'), ((28910, 28979), 'numpy.mean', 'np.mean', (['[p.points for p in self.freeagents if p.position in pos][:3]'], {}), '([p.points for p in self.freeagents if p.position in pos][:3])\n', (28917, 28979), True, 'import numpy as np\n'), ((5141, 5181), 'pandas.merge', 'pd.merge', (['outDf', 'df0'], {'how': 'how', 'on': 'onCols'}), '(outDf, df0, how=how, on=onCols)\n', (5149, 5181), True, 'import pandas as pd\n'), ((29478, 29509), 'numpy.sum', 'np.sum', (['(roster_positions == pos)'], {}), '(roster_positions == pos)\n', (29484, 29509), True, 'import numpy as np\n'), ((18658, 18677), 'numpy.mean', 'np.mean', (['randomizer'], {}), '(randomizer)\n', (18665, 18677), True, 'import numpy as np\n'), ((18715, 18733), 'numpy.std', 'np.std', (['randomizer'], {}), '(randomizer)\n', (18721, 18733), True, 'import numpy as np\n'), ((32348, 32369), 'math.log', 'math.log', (['self.visits'], {}), '(self.visits)\n', (32356, 32369), False, 'import math\n'), ((18436, 18468), 'numpy.mean', 'np.mean', (['dictionary[key][column]'], {}), '(dictionary[key][column])\n', (18443, 18468), True, 'import numpy as np\n'), ((18519, 18551), 'numpy.mean', 'np.mean', (['dictionary[key][column]'], {}), '(dictionary[key][column])\n', (18526, 18551), True, 'import numpy as np\n')] |
from Oddschecker.Event import *
from Oddschecker import Tools as tl
from Oddschecker.Line import *
from Oddschecker.LineStorage import *
from typing import List
import pprint
pp = pprint.PrettyPrinter(indent=4)
class LineAggregator:
def __init__(self, event: Event, market_to_line_class: dict, sportsbook_codes: List[str]):
main_market = tl.get_json_dict(event.base_url + event.main_market_url + '?ajax=1')
page_id = main_market['data']['page']['id']
# print(page_id)
all_markets = tl.get_json_dict(event.base_url + "view-all-markets/" + str(page_id))
# pp.pprint(all_markets)
market_to_id = {}
for submarket in all_markets['bettingMarkets']:
for market in submarket['markets']:
market_to_id[market['name']] = market['id']
# pp.pprint(market_to_id)
line_storage = {}
for market_name in market_to_line_class.keys():
if market_name in market_to_id.keys():
market_page = tl.get_json_dict(f'{event.base_url}subevent-market/{market_to_id[market]}?'
f'currentPath={event.main_market_url}')
line_class = globals()[market_to_line_class[market_name]]
line_storage_class = globals()[market_to_line_class[market_name]]
line_storage[market_to_line_class[market_name] + 'Storage'] = line_storage_class()
for line in market_page['marketHolder']['market']['bets']:
new_line = line_class(line)
line_storage[market_to_line_class[market_name] + 'Storage'].add_line(new_line)
| [
"Oddschecker.Tools.get_json_dict",
"pprint.PrettyPrinter"
] | [((181, 211), 'pprint.PrettyPrinter', 'pprint.PrettyPrinter', ([], {'indent': '(4)'}), '(indent=4)\n', (201, 211), False, 'import pprint\n'), ((354, 422), 'Oddschecker.Tools.get_json_dict', 'tl.get_json_dict', (["(event.base_url + event.main_market_url + '?ajax=1')"], {}), "(event.base_url + event.main_market_url + '?ajax=1')\n", (370, 422), True, 'from Oddschecker import Tools as tl\n'), ((1017, 1138), 'Oddschecker.Tools.get_json_dict', 'tl.get_json_dict', (['f"""{event.base_url}subevent-market/{market_to_id[market]}?currentPath={event.main_market_url}"""'], {}), "(\n f'{event.base_url}subevent-market/{market_to_id[market]}?currentPath={event.main_market_url}'\n )\n", (1033, 1138), True, 'from Oddschecker import Tools as tl\n')] |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# This file was created using the DirectGUI Designer
from direct.gui import DirectGuiGlobals as DGG
from direct.gui.DirectFrame import DirectFrame
from DirectGuiExtension.DirectAutoSizer import DirectAutoSizer
from DirectGuiExtension.DirectBoxSizer import DirectBoxSizer
from direct.gui.DirectLabel import DirectLabel
from panda3d.core import (
LPoint3f,
LVecBase3f,
LVecBase4f,
TextNode
)
# We need showbase to make this script directly runnable
from direct.showbase.ShowBase import ShowBase
class GUI:
def __init__(self, rootParent=None):
self.frmMain = DirectFrame(
borderWidth=(2, 2),
frameColor=(1, 1, 1, 1),
frameSize=(0.0, 800.0, -600.0, 0.0),
hpr=LVecBase3f(0, 0, 0),
pos=LPoint3f(0, 0, 0),
parent=rootParent,
)
self.frmMain.setTransparency(0)
self.boxMain = DirectBoxSizer(
autoUpdateFrameSize=0,
frameSize=[0.0, 800.0, -600.0, 0.0],
hpr=LVecBase3f(0, 0, 0),
itemAlign=17,
orientation='vertical',
pos=LPoint3f(0, 0, 0),
)
self.boxMain.setTransparency(0)
self.autosizerMain = DirectAutoSizer(
frameSize=[0.0, 800.0, -600.0, 0.0],
hpr=LVecBase3f(0, 0, 0),
pos=LPoint3f(0, 0, 0),
parent=self.frmMain,
childUpdateSizeFunc=self.boxMain.refresh,
)
self.autosizerMain.setTransparency(0)
self.pg15691 = DirectLabel(
borderWidth=(2, 2),
frameSize=LVecBase4f(-27.6, 30, -2.7, 17.4),
hpr=LVecBase3f(0, 0, 0),
pos=LPoint3f(0, 0, -17.4),
scale=LVecBase3f(1, 1, 1),
text='Label',
text0_align=TextNode.A_center,
text0_scale=(24, 24),
text0_pos=(0, 0),
text0_fg=LVecBase4f(0, 0, 0, 1),
text0_bg=LVecBase4f(0, 0, 0, 0),
text0_wordwrap=None,
)
self.pg15691.setTransparency(0)
self.autosizerMain.setChild(self.boxMain)
self.boxMain.addItem(self.pg15691)
def show(self):
self.frmMain.show()
def hide(self):
self.frmMain.hide()
def destroy(self):
self.frmMain.destroy()
# Create a ShowBase instance to make this gui directly runnable
app = ShowBase()
GUI(app.pixel2d)
app.run()
| [
"panda3d.core.LVecBase3f",
"direct.showbase.ShowBase.ShowBase",
"panda3d.core.LVecBase4f",
"panda3d.core.LPoint3f"
] | [((2399, 2409), 'direct.showbase.ShowBase.ShowBase', 'ShowBase', ([], {}), '()\n', (2407, 2409), False, 'from direct.showbase.ShowBase import ShowBase\n'), ((778, 797), 'panda3d.core.LVecBase3f', 'LVecBase3f', (['(0)', '(0)', '(0)'], {}), '(0, 0, 0)\n', (788, 797), False, 'from panda3d.core import LPoint3f, LVecBase3f, LVecBase4f, TextNode\n'), ((815, 832), 'panda3d.core.LPoint3f', 'LPoint3f', (['(0)', '(0)', '(0)'], {}), '(0, 0, 0)\n', (823, 832), False, 'from panda3d.core import LPoint3f, LVecBase3f, LVecBase4f, TextNode\n'), ((1055, 1074), 'panda3d.core.LVecBase3f', 'LVecBase3f', (['(0)', '(0)', '(0)'], {}), '(0, 0, 0)\n', (1065, 1074), False, 'from panda3d.core import LPoint3f, LVecBase3f, LVecBase4f, TextNode\n'), ((1154, 1171), 'panda3d.core.LPoint3f', 'LPoint3f', (['(0)', '(0)', '(0)'], {}), '(0, 0, 0)\n', (1162, 1171), False, 'from panda3d.core import LPoint3f, LVecBase3f, LVecBase4f, TextNode\n'), ((1335, 1354), 'panda3d.core.LVecBase3f', 'LVecBase3f', (['(0)', '(0)', '(0)'], {}), '(0, 0, 0)\n', (1345, 1354), False, 'from panda3d.core import LPoint3f, LVecBase3f, LVecBase4f, TextNode\n'), ((1372, 1389), 'panda3d.core.LPoint3f', 'LPoint3f', (['(0)', '(0)', '(0)'], {}), '(0, 0, 0)\n', (1380, 1389), False, 'from panda3d.core import LPoint3f, LVecBase3f, LVecBase4f, TextNode\n'), ((1625, 1658), 'panda3d.core.LVecBase4f', 'LVecBase4f', (['(-27.6)', '(30)', '(-2.7)', '(17.4)'], {}), '(-27.6, 30, -2.7, 17.4)\n', (1635, 1658), False, 'from panda3d.core import LPoint3f, LVecBase3f, LVecBase4f, TextNode\n'), ((1676, 1695), 'panda3d.core.LVecBase3f', 'LVecBase3f', (['(0)', '(0)', '(0)'], {}), '(0, 0, 0)\n', (1686, 1695), False, 'from panda3d.core import LPoint3f, LVecBase3f, LVecBase4f, TextNode\n'), ((1713, 1734), 'panda3d.core.LPoint3f', 'LPoint3f', (['(0)', '(0)', '(-17.4)'], {}), '(0, 0, -17.4)\n', (1721, 1734), False, 'from panda3d.core import LPoint3f, LVecBase3f, LVecBase4f, TextNode\n'), ((1754, 1773), 'panda3d.core.LVecBase3f', 'LVecBase3f', (['(1)', '(1)', '(1)'], {}), '(1, 1, 1)\n', (1764, 1773), False, 'from panda3d.core import LPoint3f, LVecBase3f, LVecBase4f, TextNode\n'), ((1929, 1951), 'panda3d.core.LVecBase4f', 'LVecBase4f', (['(0)', '(0)', '(0)', '(1)'], {}), '(0, 0, 0, 1)\n', (1939, 1951), False, 'from panda3d.core import LPoint3f, LVecBase3f, LVecBase4f, TextNode\n'), ((1974, 1996), 'panda3d.core.LVecBase4f', 'LVecBase4f', (['(0)', '(0)', '(0)', '(0)'], {}), '(0, 0, 0, 0)\n', (1984, 1996), False, 'from panda3d.core import LPoint3f, LVecBase3f, LVecBase4f, TextNode\n')] |
import torch
import torch.nn as nn
import torch.nn.functional as F
from torchvision.models import resnet18, vgg16_bn
import omni_torch.networks.blocks as block
import researches.img2img.probaV_SR.pvsr_module as module
class CARN(nn.Module):
def __init__(self, inchannel, filters, scale, BN=nn.BatchNorm2d, s_MSE=False, trellis=False, img_bit=8):
super(CARN, self).__init__()
self.scale = scale
if s_MSE:
self.evaluator = Vgg16BN()
else:
self.evaluator = None
self.trellis = trellis
rgb_mean = 0.5
rgb_std = 1.0
self.sub_mean = module.MeanShift(2 ** img_bit, rgb_mean, rgb_std, channel=inchannel)
self.add_mean = module.MeanShift(2 ** img_bit, rgb_mean, rgb_std, sign=1, channel=1)
self.entry = nn.Conv2d(inchannel, filters, 3, 1, 1)
self.b1 = module.CarnBlock(filters, filters)
self.b2 = module.CarnBlock(filters, filters)
self.b3 = module.CarnBlock(filters, filters)
self.c1 = module.BasicBlock(filters * 2, filters, 1, 1, 0)
self.c2 = module.BasicBlock(filters * 3, filters, 1, 1, 0)
self.c3 = module.BasicBlock(filters * 4, filters, 1, 1, 0)
if trellis:
self.trellis = module.Trellis_Structure(filters=filters, depth=4, out_depth=filters)
self.upsample = module.UpsampleBlock(filters, scale=scale)
self.exit = nn.Conv2d(filters, 1, 3, 1, 1)
self.mae = nn.L1Loss()
self.s_mse_loss = nn.MSELoss()
def forward(self, x, y):
#x = self.sub_mean(x)
x = self.entry(x)
c0 = o0 = x
b1 = self.b1(o0)
c1 = torch.cat([c0, b1], dim=1)
o1 = self.c1(c1)
b2 = self.b2(o1)
c2 = torch.cat([c1, b2], dim=1)
o2 = self.c2(c2)
b3 = self.b3(o2)
c3 = torch.cat([c2, b3], dim=1)
o3 = self.c3(c3)
if self.trellis:
o3 = self.trellis(o3)[-1]
out = self.upsample(o3)
out = self.exit(out)
#out = self.add_mean(out)
mae = self.mae(out, y).unsqueeze_(0)
if self.evaluator:
s_mse_pred = self.evaluator(out)
s_mse_label = self.evaluator(y)
s_mse_loss = sum([self.s_mse_loss(s_pred, s_mse_label[i]) for i, s_pred in enumerate(s_mse_pred)])
return out, mae, s_mse_loss.unsqueeze_(0)
else:
return out, mae, torch.tensor([0])
class RDN(nn.Module):
def __init__(self, channel, rdb_number, upscale_factor, BN=nn.BatchNorm2d, s_MSE=False,
filters=64, group=1, trellis=False, img_bit=8):
super(RDN, self).__init__()
if s_MSE:
self.evaluator = Vgg16BN()
else:
self.evaluator = None
self.trellis = trellis
#self.group_conv1 = block.conv_block(channel, [filters * group, filters * group], kernel_sizes=[3, 3],
#stride=[1, 1], padding=[1, 1], groups=[group] * 2, name="block1", batch_norm=BN)
#self.conv1 = nn.Conv2d(filters * group, out_channels=filters, kernel_size=1, padding=0, stride=1)
rgb_mean = 0.5
rgb_std = 1.0
self.sub_mean = module.MeanShift(2**img_bit, rgb_mean, rgb_std, channel=channel)
self.add_mean = module.MeanShift(2**img_bit, rgb_mean,rgb_std, sign=1, channel=1)
self.SFF1 = nn.Conv2d(in_channels=channel, out_channels=filters, kernel_size=3, padding=1, stride=1)
self.SFF2 = nn.Conv2d(in_channels=filters, out_channels=filters, kernel_size=3, padding=1, stride=1)
self.RDB1 = RDB(nb_layers=rdb_number, input_dim=filters, growth_rate=filters)
self.RDB2 = RDB(nb_layers=rdb_number, input_dim=filters, growth_rate=filters)
self.RDB3 = RDB(nb_layers=rdb_number, input_dim=filters, growth_rate=filters)
self.GFF1 = nn.Conv2d(in_channels=filters * 3, out_channels=filters, kernel_size=1, padding=0)
self.GFF2 = nn.Conv2d(in_channels=filters, out_channels=filters, kernel_size=3, padding=1)
self.upconv = nn.Conv2d(in_channels=filters, out_channels=(filters * upscale_factor * upscale_factor),
kernel_size=3, padding=1)
self.pixelshuffle = nn.PixelShuffle(upscale_factor)
#self.conv2 = nn.Conv2d(in_channels=filters, out_channels=1, kernel_size=3, padding=1)
if trellis:
self.trellis = module.Trellis_Structure(filters=filters, depth=4, out_depth=1)
self.norm_conv1 = block.conv_block(filters, [filters, filters, 1], kernel_sizes=[3, 3, 1], stride=[1, 1, 1],
padding=[1, 1, 0], groups=[1] * 3, name="norm_conv1", batch_norm=None,
activation=None)
"""
self.norm_conv2 = block.conv_block(64, [64, 64, 32], kernel_sizes=[3, 1, 3], stride=[1, 1, 1],
padding=[1, 0, 1], groups=[1] * 3, name="norm_conv2", batch_norm=BN,
activation=None)
self.norm_conv3 = block.conv_block(32, [32, 16, 16, 1], kernel_sizes=[1, 3, 3, 3], stride=[1, 1, 1, 1],
padding=[0, 1, 1, 1], groups=[1] * 4, name="norm_conv3", batch_norm=BN,
activation=None)
"""
self.mae = nn.L1Loss()
self.s_mse_loss = nn.MSELoss()
def forward(self, x, y, train=True):
x = self.sub_mean(x)
#x = self.group_conv1(x)
#x = self.conv1(x)
f_ = self.SFF1(x)
f_0 = self.SFF2(f_)
f_1 = self.RDB1(f_0)
f_2 = self.RDB2(f_1)
f_3 = self.RDB3(f_2)
f_D = torch.cat((f_1, f_2, f_3), 1)
f_1x1 = self.GFF1(f_D)
f_GF = self.GFF2(f_1x1)
f_DF = f_GF + f_
f_upconv = self.upconv(f_DF)
f_upscale = self.pixelshuffle(f_upconv)
if self.trellis:
results = self.trellis(f_upscale)
out = results[-1]
else:
out = self.norm_conv1(f_upscale)
# f_conv2 = self.conv2(f_upscale)
#results = self.trellis(f_upscale)
#out = results[-1]
out = self.add_mean(out)
if train:
#mae = sum([self.mae(result, y) for result in results]).unsqueeze_(0)
mae = self.mae(out, y)
"""
out = self.norm_conv1(f_upscale)
out = self.norm_conv2(out)
out = self.norm_conv3(out)
mae = self.mae(out, y).unsqueeze_(0)
"""
if self.evaluator:
s_mse_pred = self.evaluator(out)
s_mse_label = self.evaluator(y)
s_mse_loss = sum([self.s_mse_loss(s_pred, s_mse_label[i]) for i, s_pred in enumerate(s_mse_pred)])
return out, mae, s_mse_loss.unsqueeze_(0)
else:
return out, mae, torch.tensor([0])
else:
return out, 0, 0
class BasicBlock(nn.Module):
def __init__(self, input_dim, output_dim):
super(BasicBlock, self).__init__()
self.ID = input_dim
self.conv = nn.Conv2d(in_channels=input_dim, out_channels=output_dim, kernel_size=3, padding=1, stride=1)
self.relu = nn.ReLU()
def forward(self, x):
out = self.conv(x)
out = self.relu(out)
return torch.cat((x, out), 1)
class RDB(nn.Module):
def __init__(self, nb_layers, input_dim, growth_rate):
super(RDB, self).__init__()
self.ID = input_dim
self.GR = growth_rate
self.layer = self._make_layer(nb_layers, input_dim, growth_rate)
self.conv1x1 = nn.Conv2d(in_channels=input_dim + nb_layers * growth_rate, \
out_channels=growth_rate, \
kernel_size=1, \
stride=1, \
padding=0)
def _make_layer(self, nb_layers, input_dim, growth_rate):
layers = []
for i in range(nb_layers):
layers.append(BasicBlock(input_dim + i * growth_rate, growth_rate))
return nn.Sequential(*layers)
def forward(self, x):
out = self.layer(x)
out = self.conv1x1(out)
return out + x
class Vgg16BN(nn.Module):
def __init__(self):
super(Vgg16BN, self).__init__()
vgg16 = vgg16_bn(pretrained=True)
net = list(vgg16.children())[0]
self.conv_block1 = nn.Sequential(*net[:7])
self.conv_block1.required_grad = False
self.conv_block2 = nn.Sequential(*net[7:14])
self.conv_block2.required_grad = False
"""
self.conv_block3 = nn.Sequential(*net[14:24])
self.conv_block3.required_grad = False
self.conv_block4 = nn.Sequential(*net[24:34])
self.conv_block4.required_grad = False
self.conv_block5 = nn.Sequential(*net[34:])
self.conv_block5.required_grad = False
"""
def forward(self, x):
def gram_matrix(x):
nelement = x.size(0) * x.size(2) * x.size(3)
return torch.mm(x.view(x.size(1), -1), torch.transpose(x.view(x.size(1), -1), 1, 0)) / nelement
# assert len(layers) == len(keys)
# In this scenario, input x is a grayscale image
x = x.repeat(1, 3, 1, 1)
out1 = self.conv_block1(x)
out2 = self.conv_block2(out1)
#out3 = self.conv_block3(out2)
# out4 = self.conv_block4(out3)
# out5 = self.conv_block5(out4)
return [out1, out2]#, out3] # , out4, out5
# return out1, gram_matrix(out2), gram_matrix(out3)
class ProbaV_basic(nn.Module):
def __init__(self, inchannel=3, BN=nn.BatchNorm2d, group=1, s_MSE=False, SA=True):
super(ProbaV_basic, self).__init__()
if s_MSE:
self.evaluator = Vgg16BN()
else:
self.evaluator = None
self.SA = SA
self.down_conv1 = block.conv_block(inchannel, [48 * group, 128 * group, 128 * group], kernel_sizes=[3, 3, 1],
stride=[2, 1, 1], padding=[1, 1, 0], groups=[group] * 3,
name="down_block1", batch_norm=BN)
self.down_conv2 = block.conv_block(128 * group, [256 * group, 256 * group, 256 * group], kernel_sizes=[3, 3, 1],
stride=[2, 1, 1], padding=[1, 1, 0], groups=[group] * 3, name="down_block2",
batch_norm=BN)
self.norm_conv1 = block.conv_block(256 * group, [256, 256, 256],
kernel_sizes=[3, 3, 3], stride=[1] * 3, padding=[2, 1, 1],
groups=[1] * 3, dilation=[2, 1, 1], name="norm_conv1", batch_norm=BN)
if SA:
self.self_attn = Self_Attn(256)
self.up_conv1 = block.conv_block(256, [256, 256, 256], kernel_sizes=[5, 3, 3], stride=[3, 1, 1],
padding=[1, 1, 1], groups=[1] * 3, name="up_block1", batch_norm=BN,
transpose=[True, False, False])
self.norm_conv2 = block.conv_block(256, [256, 256, 256], kernel_sizes=[1, 3, 3], stride=[1, 1, 1],
padding=[0, 1, 1], groups=[1] * 3, name="norm_conv2", batch_norm=BN)
self.up_conv2 = block.conv_block(256, [256, 256, 256], kernel_sizes=[4, 3, 3], stride=[2, 1, 1],
padding=[1, 1, 1], groups=[1] * 3, name="up_block2", batch_norm=BN,
transpose=[True, False, False])
self.norm_conv3 = block.conv_block(256, [256, 128, 128], kernel_sizes=[1, 3, 3], stride=[1, 1, 1],
padding=[0, 1, 1], groups=[1] * 3, name="norm_conv3", batch_norm=BN)
self.up_conv3 = block.conv_block(128, [128, 128, 128], kernel_sizes=[4, 3, 3], stride=[2, 1, 1],
padding=[1, 1, 1], groups=[1] * 3, name="up_block3", batch_norm=BN,
transpose=[True, False, False])
self.norm_conv4 = block.conv_block(128, [128, 64, 64], kernel_sizes=[1, 3, 3], stride=[1, 1, 1],
padding=[0, 1, 1], groups=[1] * 3, name="norm_conv4", batch_norm=BN,
activation=None)
self.norm_conv5 = block.conv_block(64, [64, 32, 32], kernel_sizes=[1, 3, 3], stride=[1, 1, 1],
padding=[0, 1, 1], groups=[1] * 3, name="norm_conv5", batch_norm=BN,
activation=None)
self.norm_conv6 = block.conv_block(32, [32, 16, 16, 1], kernel_sizes=[1, 3, 3, 3], stride=[1, 1, 1, 1],
padding=[0, 1, 1, 1], groups=[1] * 4, name="norm_conv6", batch_norm=BN,
activation=None)
def forward(self, x, y=None):
out = self.down_conv1(x)
out = self.down_conv2(out)
#out = self.down_conv3(out)
# out = self.down_conv4(out)
# out = self.down_conv5(out)
out = self.norm_conv1(out)
if self.SA:
out, attn_map = self.self_attn(out)
out = self.up_conv1(out)
out = self.norm_conv2(out)
out = self.up_conv2(out)
out = self.norm_conv3(out)
out = self.up_conv3(out)
out = self.norm_conv4(out)
out = self.norm_conv5(out)
out = self.norm_conv6(out)
#out = out / torch.max(out)
# out = self.sigmoid(out)
# out = self.up_conv5(out)
if self.evaluator:
s_mse_pred = self.evaluator(out)
s_mse_label = self.evaluator(y)
return [out] + s_mse_pred, [y] + s_mse_label
else:
return out, y
class Self_Attn(nn.Module):
""" Self attention Layer"""
def __init__(self, in_dim):
super().__init__()
self.query_conv = nn.Conv2d(in_channels=in_dim, out_channels=in_dim // 8, kernel_size=1)
self.key_conv = nn.Conv2d(in_channels=in_dim, out_channels=in_dim // 8, kernel_size=1)
self.value_conv = nn.Conv2d(in_channels=in_dim, out_channels=in_dim, kernel_size=1)
self.gamma = nn.Parameter(torch.zeros(1))
self.softmax = nn.Softmax(dim=-1) #
def forward(self, x):
"""
inputs :
x : input feature maps( B X C X W X H)
returns :
out : self attention value + input feature
attention: B X N X N (N is Width*Height)
"""
m_batchsize, C, width, height = x.size()
proj_query = self.query_conv(x).view(m_batchsize, -1, width * height).permute(0, 2, 1) # B X CX(N)
proj_key = self.key_conv(x).view(m_batchsize, -1, width * height) # B X C x (*W*H)
energy = torch.bmm(proj_query, proj_key) # transpose check
attention = self.softmax(energy) # BX (N) X (N)
proj_value = self.value_conv(x).view(m_batchsize, -1, width * height) # B X C X N
out = torch.bmm(proj_value, attention.permute(0, 2, 1))
out = out.view(m_batchsize, C, width, height)
out = self.gamma * out + x
return out, attention
if __name__ == "__main__":
x = torch.randn(3, 10, 128, 128)
gt = torch.randn(3, 64, 384, 384)
#net = ProbaV_basic(inchannel=10)
net = module.Trellis_Structure()
#net = CARN(10, 64, 3)
y = net(gt)
print(y.shape) | [
"researches.img2img.probaV_SR.pvsr_module.CarnBlock",
"torch.nn.ReLU",
"torch.nn.Sequential",
"torch.nn.L1Loss",
"torch.nn.MSELoss",
"torch.bmm",
"researches.img2img.probaV_SR.pvsr_module.BasicBlock",
"omni_torch.networks.blocks.conv_block",
"torchvision.models.vgg16_bn",
"torch.randn",
"researc... | [((15513, 15541), 'torch.randn', 'torch.randn', (['(3)', '(10)', '(128)', '(128)'], {}), '(3, 10, 128, 128)\n', (15524, 15541), False, 'import torch\n'), ((15551, 15579), 'torch.randn', 'torch.randn', (['(3)', '(64)', '(384)', '(384)'], {}), '(3, 64, 384, 384)\n', (15562, 15579), False, 'import torch\n'), ((15633, 15659), 'researches.img2img.probaV_SR.pvsr_module.Trellis_Structure', 'module.Trellis_Structure', ([], {}), '()\n', (15657, 15659), True, 'import researches.img2img.probaV_SR.pvsr_module as module\n'), ((621, 689), 'researches.img2img.probaV_SR.pvsr_module.MeanShift', 'module.MeanShift', (['(2 ** img_bit)', 'rgb_mean', 'rgb_std'], {'channel': 'inchannel'}), '(2 ** img_bit, rgb_mean, rgb_std, channel=inchannel)\n', (637, 689), True, 'import researches.img2img.probaV_SR.pvsr_module as module\n'), ((714, 782), 'researches.img2img.probaV_SR.pvsr_module.MeanShift', 'module.MeanShift', (['(2 ** img_bit)', 'rgb_mean', 'rgb_std'], {'sign': '(1)', 'channel': '(1)'}), '(2 ** img_bit, rgb_mean, rgb_std, sign=1, channel=1)\n', (730, 782), True, 'import researches.img2img.probaV_SR.pvsr_module as module\n'), ((813, 851), 'torch.nn.Conv2d', 'nn.Conv2d', (['inchannel', 'filters', '(3)', '(1)', '(1)'], {}), '(inchannel, filters, 3, 1, 1)\n', (822, 851), True, 'import torch.nn as nn\n'), ((879, 913), 'researches.img2img.probaV_SR.pvsr_module.CarnBlock', 'module.CarnBlock', (['filters', 'filters'], {}), '(filters, filters)\n', (895, 913), True, 'import researches.img2img.probaV_SR.pvsr_module as module\n'), ((932, 966), 'researches.img2img.probaV_SR.pvsr_module.CarnBlock', 'module.CarnBlock', (['filters', 'filters'], {}), '(filters, filters)\n', (948, 966), True, 'import researches.img2img.probaV_SR.pvsr_module as module\n'), ((985, 1019), 'researches.img2img.probaV_SR.pvsr_module.CarnBlock', 'module.CarnBlock', (['filters', 'filters'], {}), '(filters, filters)\n', (1001, 1019), True, 'import researches.img2img.probaV_SR.pvsr_module as module\n'), ((1038, 1086), 'researches.img2img.probaV_SR.pvsr_module.BasicBlock', 'module.BasicBlock', (['(filters * 2)', 'filters', '(1)', '(1)', '(0)'], {}), '(filters * 2, filters, 1, 1, 0)\n', (1055, 1086), True, 'import researches.img2img.probaV_SR.pvsr_module as module\n'), ((1105, 1153), 'researches.img2img.probaV_SR.pvsr_module.BasicBlock', 'module.BasicBlock', (['(filters * 3)', 'filters', '(1)', '(1)', '(0)'], {}), '(filters * 3, filters, 1, 1, 0)\n', (1122, 1153), True, 'import researches.img2img.probaV_SR.pvsr_module as module\n'), ((1172, 1220), 'researches.img2img.probaV_SR.pvsr_module.BasicBlock', 'module.BasicBlock', (['(filters * 4)', 'filters', '(1)', '(1)', '(0)'], {}), '(filters * 4, filters, 1, 1, 0)\n', (1189, 1220), True, 'import researches.img2img.probaV_SR.pvsr_module as module\n'), ((1371, 1413), 'researches.img2img.probaV_SR.pvsr_module.UpsampleBlock', 'module.UpsampleBlock', (['filters'], {'scale': 'scale'}), '(filters, scale=scale)\n', (1391, 1413), True, 'import researches.img2img.probaV_SR.pvsr_module as module\n'), ((1434, 1464), 'torch.nn.Conv2d', 'nn.Conv2d', (['filters', '(1)', '(3)', '(1)', '(1)'], {}), '(filters, 1, 3, 1, 1)\n', (1443, 1464), True, 'import torch.nn as nn\n'), ((1484, 1495), 'torch.nn.L1Loss', 'nn.L1Loss', ([], {}), '()\n', (1493, 1495), True, 'import torch.nn as nn\n'), ((1522, 1534), 'torch.nn.MSELoss', 'nn.MSELoss', ([], {}), '()\n', (1532, 1534), True, 'import torch.nn as nn\n'), ((1692, 1718), 'torch.cat', 'torch.cat', (['[c0, b1]'], {'dim': '(1)'}), '([c0, b1], dim=1)\n', (1701, 1718), False, 'import torch\n'), ((1791, 1817), 'torch.cat', 'torch.cat', (['[c1, b2]'], {'dim': '(1)'}), '([c1, b2], dim=1)\n', (1800, 1817), False, 'import torch\n'), ((1890, 1916), 'torch.cat', 'torch.cat', (['[c2, b3]'], {'dim': '(1)'}), '([c2, b3], dim=1)\n', (1899, 1916), False, 'import torch\n'), ((3278, 3344), 'researches.img2img.probaV_SR.pvsr_module.MeanShift', 'module.MeanShift', (['(2 ** img_bit)', 'rgb_mean', 'rgb_std'], {'channel': 'channel'}), '(2 ** img_bit, rgb_mean, rgb_std, channel=channel)\n', (3294, 3344), True, 'import researches.img2img.probaV_SR.pvsr_module as module\n'), ((3367, 3435), 'researches.img2img.probaV_SR.pvsr_module.MeanShift', 'module.MeanShift', (['(2 ** img_bit)', 'rgb_mean', 'rgb_std'], {'sign': '(1)', 'channel': '(1)'}), '(2 ** img_bit, rgb_mean, rgb_std, sign=1, channel=1)\n', (3383, 3435), True, 'import researches.img2img.probaV_SR.pvsr_module as module\n'), ((3454, 3547), 'torch.nn.Conv2d', 'nn.Conv2d', ([], {'in_channels': 'channel', 'out_channels': 'filters', 'kernel_size': '(3)', 'padding': '(1)', 'stride': '(1)'}), '(in_channels=channel, out_channels=filters, kernel_size=3, padding\n =1, stride=1)\n', (3463, 3547), True, 'import torch.nn as nn\n'), ((3563, 3656), 'torch.nn.Conv2d', 'nn.Conv2d', ([], {'in_channels': 'filters', 'out_channels': 'filters', 'kernel_size': '(3)', 'padding': '(1)', 'stride': '(1)'}), '(in_channels=filters, out_channels=filters, kernel_size=3, padding\n =1, stride=1)\n', (3572, 3656), True, 'import torch.nn as nn\n'), ((3930, 4016), 'torch.nn.Conv2d', 'nn.Conv2d', ([], {'in_channels': '(filters * 3)', 'out_channels': 'filters', 'kernel_size': '(1)', 'padding': '(0)'}), '(in_channels=filters * 3, out_channels=filters, kernel_size=1,\n padding=0)\n', (3939, 4016), True, 'import torch.nn as nn\n'), ((4033, 4111), 'torch.nn.Conv2d', 'nn.Conv2d', ([], {'in_channels': 'filters', 'out_channels': 'filters', 'kernel_size': '(3)', 'padding': '(1)'}), '(in_channels=filters, out_channels=filters, kernel_size=3, padding=1)\n', (4042, 4111), True, 'import torch.nn as nn\n'), ((4134, 4250), 'torch.nn.Conv2d', 'nn.Conv2d', ([], {'in_channels': 'filters', 'out_channels': '(filters * upscale_factor * upscale_factor)', 'kernel_size': '(3)', 'padding': '(1)'}), '(in_channels=filters, out_channels=filters * upscale_factor *\n upscale_factor, kernel_size=3, padding=1)\n', (4143, 4250), True, 'import torch.nn as nn\n'), ((4309, 4340), 'torch.nn.PixelShuffle', 'nn.PixelShuffle', (['upscale_factor'], {}), '(upscale_factor)\n', (4324, 4340), True, 'import torch.nn as nn\n'), ((4582, 4768), 'omni_torch.networks.blocks.conv_block', 'block.conv_block', (['filters', '[filters, filters, 1]'], {'kernel_sizes': '[3, 3, 1]', 'stride': '[1, 1, 1]', 'padding': '[1, 1, 0]', 'groups': '([1] * 3)', 'name': '"""norm_conv1"""', 'batch_norm': 'None', 'activation': 'None'}), "(filters, [filters, filters, 1], kernel_sizes=[3, 3, 1],\n stride=[1, 1, 1], padding=[1, 1, 0], groups=[1] * 3, name='norm_conv1',\n batch_norm=None, activation=None)\n", (4598, 4768), True, 'import omni_torch.networks.blocks as block\n'), ((5487, 5498), 'torch.nn.L1Loss', 'nn.L1Loss', ([], {}), '()\n', (5496, 5498), True, 'import torch.nn as nn\n'), ((5525, 5537), 'torch.nn.MSELoss', 'nn.MSELoss', ([], {}), '()\n', (5535, 5537), True, 'import torch.nn as nn\n'), ((5829, 5858), 'torch.cat', 'torch.cat', (['(f_1, f_2, f_3)', '(1)'], {}), '((f_1, f_2, f_3), 1)\n', (5838, 5858), False, 'import torch\n'), ((7266, 7363), 'torch.nn.Conv2d', 'nn.Conv2d', ([], {'in_channels': 'input_dim', 'out_channels': 'output_dim', 'kernel_size': '(3)', 'padding': '(1)', 'stride': '(1)'}), '(in_channels=input_dim, out_channels=output_dim, kernel_size=3,\n padding=1, stride=1)\n', (7275, 7363), True, 'import torch.nn as nn\n'), ((7380, 7389), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (7387, 7389), True, 'import torch.nn as nn\n'), ((7492, 7514), 'torch.cat', 'torch.cat', (['(x, out)', '(1)'], {}), '((x, out), 1)\n', (7501, 7514), False, 'import torch\n'), ((7788, 7913), 'torch.nn.Conv2d', 'nn.Conv2d', ([], {'in_channels': '(input_dim + nb_layers * growth_rate)', 'out_channels': 'growth_rate', 'kernel_size': '(1)', 'stride': '(1)', 'padding': '(0)'}), '(in_channels=input_dim + nb_layers * growth_rate, out_channels=\n growth_rate, kernel_size=1, stride=1, padding=0)\n', (7797, 7913), True, 'import torch.nn as nn\n'), ((8266, 8288), 'torch.nn.Sequential', 'nn.Sequential', (['*layers'], {}), '(*layers)\n', (8279, 8288), True, 'import torch.nn as nn\n'), ((8511, 8536), 'torchvision.models.vgg16_bn', 'vgg16_bn', ([], {'pretrained': '(True)'}), '(pretrained=True)\n', (8519, 8536), False, 'from torchvision.models import resnet18, vgg16_bn\n'), ((8604, 8627), 'torch.nn.Sequential', 'nn.Sequential', (['*net[:7]'], {}), '(*net[:7])\n', (8617, 8627), True, 'import torch.nn as nn\n'), ((8702, 8727), 'torch.nn.Sequential', 'nn.Sequential', (['*net[7:14]'], {}), '(*net[7:14])\n', (8715, 8727), True, 'import torch.nn as nn\n'), ((10086, 10278), 'omni_torch.networks.blocks.conv_block', 'block.conv_block', (['inchannel', '[48 * group, 128 * group, 128 * group]'], {'kernel_sizes': '[3, 3, 1]', 'stride': '[2, 1, 1]', 'padding': '[1, 1, 0]', 'groups': '([group] * 3)', 'name': '"""down_block1"""', 'batch_norm': 'BN'}), "(inchannel, [48 * group, 128 * group, 128 * group],\n kernel_sizes=[3, 3, 1], stride=[2, 1, 1], padding=[1, 1, 0], groups=[\n group] * 3, name='down_block1', batch_norm=BN)\n", (10102, 10278), True, 'import omni_torch.networks.blocks as block\n'), ((10382, 10577), 'omni_torch.networks.blocks.conv_block', 'block.conv_block', (['(128 * group)', '[256 * group, 256 * group, 256 * group]'], {'kernel_sizes': '[3, 3, 1]', 'stride': '[2, 1, 1]', 'padding': '[1, 1, 0]', 'groups': '([group] * 3)', 'name': '"""down_block2"""', 'batch_norm': 'BN'}), "(128 * group, [256 * group, 256 * group, 256 * group],\n kernel_sizes=[3, 3, 1], stride=[2, 1, 1], padding=[1, 1, 0], groups=[\n group] * 3, name='down_block2', batch_norm=BN)\n", (10398, 10577), True, 'import omni_torch.networks.blocks as block\n'), ((10681, 10864), 'omni_torch.networks.blocks.conv_block', 'block.conv_block', (['(256 * group)', '[256, 256, 256]'], {'kernel_sizes': '[3, 3, 3]', 'stride': '([1] * 3)', 'padding': '[2, 1, 1]', 'groups': '([1] * 3)', 'dilation': '[2, 1, 1]', 'name': '"""norm_conv1"""', 'batch_norm': 'BN'}), "(256 * group, [256, 256, 256], kernel_sizes=[3, 3, 3],\n stride=[1] * 3, padding=[2, 1, 1], groups=[1] * 3, dilation=[2, 1, 1],\n name='norm_conv1', batch_norm=BN)\n", (10697, 10864), True, 'import omni_torch.networks.blocks as block\n'), ((11033, 11221), 'omni_torch.networks.blocks.conv_block', 'block.conv_block', (['(256)', '[256, 256, 256]'], {'kernel_sizes': '[5, 3, 3]', 'stride': '[3, 1, 1]', 'padding': '[1, 1, 1]', 'groups': '([1] * 3)', 'name': '"""up_block1"""', 'batch_norm': 'BN', 'transpose': '[True, False, False]'}), "(256, [256, 256, 256], kernel_sizes=[5, 3, 3], stride=[3, 1,\n 1], padding=[1, 1, 1], groups=[1] * 3, name='up_block1', batch_norm=BN,\n transpose=[True, False, False])\n", (11049, 11221), True, 'import omni_torch.networks.blocks as block\n'), ((11322, 11475), 'omni_torch.networks.blocks.conv_block', 'block.conv_block', (['(256)', '[256, 256, 256]'], {'kernel_sizes': '[1, 3, 3]', 'stride': '[1, 1, 1]', 'padding': '[0, 1, 1]', 'groups': '([1] * 3)', 'name': '"""norm_conv2"""', 'batch_norm': 'BN'}), "(256, [256, 256, 256], kernel_sizes=[1, 3, 3], stride=[1, 1,\n 1], padding=[0, 1, 1], groups=[1] * 3, name='norm_conv2', batch_norm=BN)\n", (11338, 11475), True, 'import omni_torch.networks.blocks as block\n'), ((11537, 11725), 'omni_torch.networks.blocks.conv_block', 'block.conv_block', (['(256)', '[256, 256, 256]'], {'kernel_sizes': '[4, 3, 3]', 'stride': '[2, 1, 1]', 'padding': '[1, 1, 1]', 'groups': '([1] * 3)', 'name': '"""up_block2"""', 'batch_norm': 'BN', 'transpose': '[True, False, False]'}), "(256, [256, 256, 256], kernel_sizes=[4, 3, 3], stride=[2, 1,\n 1], padding=[1, 1, 1], groups=[1] * 3, name='up_block2', batch_norm=BN,\n transpose=[True, False, False])\n", (11553, 11725), True, 'import omni_torch.networks.blocks as block\n'), ((11826, 11979), 'omni_torch.networks.blocks.conv_block', 'block.conv_block', (['(256)', '[256, 128, 128]'], {'kernel_sizes': '[1, 3, 3]', 'stride': '[1, 1, 1]', 'padding': '[0, 1, 1]', 'groups': '([1] * 3)', 'name': '"""norm_conv3"""', 'batch_norm': 'BN'}), "(256, [256, 128, 128], kernel_sizes=[1, 3, 3], stride=[1, 1,\n 1], padding=[0, 1, 1], groups=[1] * 3, name='norm_conv3', batch_norm=BN)\n", (11842, 11979), True, 'import omni_torch.networks.blocks as block\n'), ((12043, 12231), 'omni_torch.networks.blocks.conv_block', 'block.conv_block', (['(128)', '[128, 128, 128]'], {'kernel_sizes': '[4, 3, 3]', 'stride': '[2, 1, 1]', 'padding': '[1, 1, 1]', 'groups': '([1] * 3)', 'name': '"""up_block3"""', 'batch_norm': 'BN', 'transpose': '[True, False, False]'}), "(128, [128, 128, 128], kernel_sizes=[4, 3, 3], stride=[2, 1,\n 1], padding=[1, 1, 1], groups=[1] * 3, name='up_block3', batch_norm=BN,\n transpose=[True, False, False])\n", (12059, 12231), True, 'import omni_torch.networks.blocks as block\n'), ((12332, 12505), 'omni_torch.networks.blocks.conv_block', 'block.conv_block', (['(128)', '[128, 64, 64]'], {'kernel_sizes': '[1, 3, 3]', 'stride': '[1, 1, 1]', 'padding': '[0, 1, 1]', 'groups': '([1] * 3)', 'name': '"""norm_conv4"""', 'batch_norm': 'BN', 'activation': 'None'}), "(128, [128, 64, 64], kernel_sizes=[1, 3, 3], stride=[1, 1, \n 1], padding=[0, 1, 1], groups=[1] * 3, name='norm_conv4', batch_norm=BN,\n activation=None)\n", (12348, 12505), True, 'import omni_torch.networks.blocks as block\n'), ((12609, 12779), 'omni_torch.networks.blocks.conv_block', 'block.conv_block', (['(64)', '[64, 32, 32]'], {'kernel_sizes': '[1, 3, 3]', 'stride': '[1, 1, 1]', 'padding': '[0, 1, 1]', 'groups': '([1] * 3)', 'name': '"""norm_conv5"""', 'batch_norm': 'BN', 'activation': 'None'}), "(64, [64, 32, 32], kernel_sizes=[1, 3, 3], stride=[1, 1, 1],\n padding=[0, 1, 1], groups=[1] * 3, name='norm_conv5', batch_norm=BN,\n activation=None)\n", (12625, 12779), True, 'import omni_torch.networks.blocks as block\n'), ((12884, 13066), 'omni_torch.networks.blocks.conv_block', 'block.conv_block', (['(32)', '[32, 16, 16, 1]'], {'kernel_sizes': '[1, 3, 3, 3]', 'stride': '[1, 1, 1, 1]', 'padding': '[0, 1, 1, 1]', 'groups': '([1] * 4)', 'name': '"""norm_conv6"""', 'batch_norm': 'BN', 'activation': 'None'}), "(32, [32, 16, 16, 1], kernel_sizes=[1, 3, 3, 3], stride=[1,\n 1, 1, 1], padding=[0, 1, 1, 1], groups=[1] * 4, name='norm_conv6',\n batch_norm=BN, activation=None)\n", (12900, 13066), True, 'import omni_torch.networks.blocks as block\n'), ((14206, 14276), 'torch.nn.Conv2d', 'nn.Conv2d', ([], {'in_channels': 'in_dim', 'out_channels': '(in_dim // 8)', 'kernel_size': '(1)'}), '(in_channels=in_dim, out_channels=in_dim // 8, kernel_size=1)\n', (14215, 14276), True, 'import torch.nn as nn\n'), ((14301, 14371), 'torch.nn.Conv2d', 'nn.Conv2d', ([], {'in_channels': 'in_dim', 'out_channels': '(in_dim // 8)', 'kernel_size': '(1)'}), '(in_channels=in_dim, out_channels=in_dim // 8, kernel_size=1)\n', (14310, 14371), True, 'import torch.nn as nn\n'), ((14398, 14463), 'torch.nn.Conv2d', 'nn.Conv2d', ([], {'in_channels': 'in_dim', 'out_channels': 'in_dim', 'kernel_size': '(1)'}), '(in_channels=in_dim, out_channels=in_dim, kernel_size=1)\n', (14407, 14463), True, 'import torch.nn as nn\n'), ((14538, 14556), 'torch.nn.Softmax', 'nn.Softmax', ([], {'dim': '(-1)'}), '(dim=-1)\n', (14548, 14556), True, 'import torch.nn as nn\n'), ((15091, 15122), 'torch.bmm', 'torch.bmm', (['proj_query', 'proj_key'], {}), '(proj_query, proj_key)\n', (15100, 15122), False, 'import torch\n'), ((1268, 1337), 'researches.img2img.probaV_SR.pvsr_module.Trellis_Structure', 'module.Trellis_Structure', ([], {'filters': 'filters', 'depth': '(4)', 'out_depth': 'filters'}), '(filters=filters, depth=4, out_depth=filters)\n', (1292, 1337), True, 'import researches.img2img.probaV_SR.pvsr_module as module\n'), ((4483, 4546), 'researches.img2img.probaV_SR.pvsr_module.Trellis_Structure', 'module.Trellis_Structure', ([], {'filters': 'filters', 'depth': '(4)', 'out_depth': '(1)'}), '(filters=filters, depth=4, out_depth=1)\n', (4507, 4546), True, 'import researches.img2img.probaV_SR.pvsr_module as module\n'), ((14498, 14512), 'torch.zeros', 'torch.zeros', (['(1)'], {}), '(1)\n', (14509, 14512), False, 'import torch\n'), ((2496, 2513), 'torch.tensor', 'torch.tensor', (['[0]'], {}), '([0])\n', (2508, 2513), False, 'import torch\n'), ((7028, 7045), 'torch.tensor', 'torch.tensor', (['[0]'], {}), '([0])\n', (7040, 7045), False, 'import torch\n')] |
#!/usr/bin/env python3
import functools
import itertools
import operator
import re
class _Topic:
def __init__(self, mediator):
self._mediator = mediator
def _get_published_data_for_topic(self, topic):
return [publisher(topic) for publisher in self._mediator._get_publishers_for_topic(topic)]
def _notify_subscribers_for_topic(self, subscribers, topic):
assert _Topic.is_single_topic(topic)
for subscriber in subscribers:
subscriber(topic)
@staticmethod
def is_multi_topic(*topics):
return all(map(lambda topic: isinstance(topic, _MultiTopic), topics))
@staticmethod
def is_single_topic(*topics):
return all(map(lambda topic: isinstance(topic, _SingleTopic), topics))
class _SingleTopic(_Topic):
def __init__(self, mediator, name):
super().__init__(mediator)
self._name = name
def __eq__(self, other):
return self._name == other._name
def __hash__(self):
return hash(self._name)
def __repr__(self):
return 'SingleTopic[{}]'.format(self._name)
def get_published_data(self):
return self._get_published_data_for_topic(self)
def matches(self, topic):
return self._name == topic._name
def notify_subscribers(self, *subscribers):
self._notify_subscribers_for_topic(subscribers, self)
class _MultiTopic(_Topic):
def __init__(self, mediator, name_pattern):
super().__init__(mediator)
self._name_pattern = name_pattern
def __eq__(self, other):
return self._name_pattern == other._name_pattern
def __hash__(self):
return hash(self._name_pattern)
def __repr__(self):
return 'MultiTopic[{}]'.format(self._name_pattern)
def get_published_data(self):
return itertools.chain.from_iterable(
self._get_published_data_for_topic(publisher_topic)
for publisher_topic in self._mediator._get_all_publisher_topics()
if self._name_pattern.fullmatch(publisher_topic._name)
)
def matches(self, topic):
return self._name_pattern.fullmatch(topic._name)
def notify_subscribers(self, *subscribers):
for publisher_topic in self._mediator._get_all_publisher_topics():
if self._name_pattern.fullmatch(publisher_topic._name):
self._notify_subscribers_for_topic(subscribers, publisher_topic)
class _Value:
def __init__(self, mediator, name):
self._mediator = mediator
self._published_topics = []
self._topic = mediator.new_single_topic(name)
def _notify_subscribers(self):
self._mediator.notify_subscribers(*self._published_topics)
def get_topic(self):
return self._topic
def publish(self, *topics):
assert _Topic.is_single_topic(*topics)
self._published_topics += topics
self._mediator.publish(lambda topic: self, *topics)
class _StaticValue(_Value):
def __init__(self, mediator, name, value):
super().__init__(mediator, name)
self._value = value
def get_value(self):
return self._value
def set_value(self, value):
self._value = value
self._notify_subscribers()
class _DynamicValue(_Value):
def __init__(self, mediator, name):
super().__init__(mediator, name)
self._subscribed_topics = []
def _get_subscribed_values(self):
return self._mediator.get_published_data(*self._subscribed_topics)
def get_value(self):
return functools.reduce(
operator.add,
[value.get_value() for value in self._get_subscribed_values()],
0
)
def subscribe(self, *topics):
self._subscribed_topics += topics
self._mediator.subscribe(lambda topic: self._notify_subscribers(), *topics)
class Mediator:
def __init__(self):
self._observers = []
self._publishers_by_topic = {}
self._subscribers_by_topic = {}
def _add_publisher(self, publisher, topic):
assert _Topic.is_single_topic(topic)
publishers = self._get_publishers_for_topic(topic, create_if_absent=True)
publishers.append(publisher)
self._notify_observers_that_publisher_added(publisher, topic)
def _add_subscriber(self, subscriber, topic):
subscribers = self._get_subscribers_for_topic(topic, create_if_absent=True)
subscribers.append(subscriber)
self._notify_observers_that_subscriber_added(subscriber, topic)
def _get_all_publisher_topics(self):
return self._publishers_by_topic.keys()
def _get_all_subscriber_topics(self):
return self._subscribers_by_topic.keys()
def _get_publishers_for_topic(self, topic, create_if_absent=False):
assert _Topic.is_single_topic(topic)
if create_if_absent:
return self._publishers_by_topic.setdefault(topic, [])
else:
return self._publishers_by_topic.get(topic, [])
def _get_subscribers_for_topic(self, topic, create_if_absent=False):
if create_if_absent:
return self._subscribers_by_topic.setdefault(topic, [])
else:
return self._subscribers_by_topic.get(topic, [])
def _notify_observers(self, action):
for observer in self._observers:
action(observer)
def _notify_observers_that_publisher_added(self, publisher, topic):
self._notify_observers(lambda observer: observer.publisher_added(publisher, topic))
def _notify_observers_that_subscriber_added(self, subscriber, topic):
self._notify_observers(lambda observer: observer.subscriber_added(subscriber, topic))
def _notify_observers_that_topic_published(self, topic):
self._notify_observers(lambda observer: observer.topic_published(topic))
def _notify_subscribers(self, topic):
assert _Topic.is_single_topic(topic)
self._notify_observers_that_topic_published(topic)
for subscriber_topic in self._get_all_subscriber_topics():
if subscriber_topic.matches(topic):
topic.notify_subscribers(*self._get_subscribers_for_topic(subscriber_topic))
def add_observer(self, observer):
self._observers.append(observer)
def get_published_data(self, *topics):
return itertools.chain.from_iterable([
topic.get_published_data() for topic in topics
])
def new_dynamic_value(self, name):
value = _DynamicValue(self, name)
value.publish(value.get_topic())
return value
def new_multi_topic(self, name_pattern):
return _MultiTopic(self, name_pattern)
def new_single_topic(self, name):
return _SingleTopic(self, name)
def new_static_value(self, name, value=0):
value = _StaticValue(self, name, value)
value.publish(value.get_topic())
return value
def notify_subscribers(self, *topics):
for topic in topics:
self._notify_subscribers(topic)
def publish(self, publisher, *topics):
for topic in topics:
self._add_publisher(publisher, topic)
self._notify_subscribers(topic)
def subscribe(self, subscriber, *topics):
for topic in topics:
self._add_subscriber(subscriber, topic)
topic.notify_subscribers(subscriber)
if __name__ == '__main__':
class Observer:
def publisher_added(self, publisher, topic):
print('[observer] added publisher "{}" for topic "{}"'.format(publisher, topic))
def subscriber_added(self, subscriber, topic):
print('[observer] added subscriber "{}" for topic "{}"'.format(subscriber, topic))
def topic_published(self, topic):
print('[observer] topic "{}" published'.format(topic))
def print_value(message, value):
print('[{}] {} = {}'.format(message, value.get_topic(), value.get_value()))
mediator = Mediator()
mediator.subscribe(
lambda topic: [
print_value('notification', value)
for value in mediator.get_published_data(topic)
],
mediator.new_multi_topic(re.compile('astr|aint|adex'))
)
# uncomment the following line to enable verbose Mediator logging
#mediator.add_observer(Observer())
# Case 1: composite value subscribes to component values
astr_base = mediator.new_static_value('astr_base', 14)
astr_adj = mediator.new_static_value('astr_adj', 2)
astr = mediator.new_dynamic_value('astr')
print_value('initial', astr)
astr.subscribe(astr_base.get_topic())
astr.subscribe(astr_adj.get_topic())
astr_adj.set_value(-1)
astr_base.set_value(10)
print_value('final', astr)
# Case 2: component values publish to composite value
aint_base = mediator.new_static_value('aint_base', 12)
aint_adj = mediator.new_static_value('aint_adj', 1)
aint = mediator.new_dynamic_value('aint')
print_value('initial', aint)
aint_contrib_topic = mediator.new_single_topic('aint_contrib')
aint.subscribe(aint_contrib_topic)
aint_base.publish(aint_contrib_topic)
aint_adj.publish(aint_contrib_topic)
aint_adj.set_value(-3)
aint_base.set_value(8)
print_value('final', aint)
# Case 3: composite value subscribes to pattern; component values publish to topics that match pattern
adex_base = mediator.new_static_value('adex_base', 13)
adex_adj = mediator.new_static_value('adex_adj', 3)
adex = mediator.new_dynamic_value('adex')
print_value('initial', adex)
adex.subscribe(mediator.new_multi_topic(re.compile('adex_.+')))
adex_adj.set_value(-2)
adex_base.set_value(9)
print_value('final', adex)
| [
"re.compile"
] | [((8155, 8183), 're.compile', 're.compile', (['"""astr|aint|adex"""'], {}), "('astr|aint|adex')\n", (8165, 8183), False, 'import re\n'), ((9606, 9627), 're.compile', 're.compile', (['"""adex_.+"""'], {}), "('adex_.+')\n", (9616, 9627), False, 'import re\n')] |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.14 on 2019-01-20 12:04
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [("crm", "0016_profile")]
operations = [
migrations.AddField(
model_name="profile",
name="qb_realm_id",
field=models.TextField(blank=True, default=b""),
)
]
| [
"django.db.models.TextField"
] | [((376, 417), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)', 'default': "b''"}), "(blank=True, default=b'')\n", (392, 417), False, 'from django.db import migrations, models\n')] |
# MIT License
#
# Copyright (c) 2018 <NAME>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import hou
import null_api
import hdaparmutils
class FKControl(null_api.Null):
target_node = None # type: hou.ObjNode
active_parms = ()
def __init__(self, target_node, mask=511, folder=None):
with hou.undos.group("Create FK control"):
self.target_node = target_node
name = self.target_node.name() + "_FK"
node = target_node.parent().createNode("null", name)
null_api.Null.__init__(self, node)
self.node.setFirstInput(self.target_node.inputs()[0])
self.node.setWorldTransform(self.target_node.worldTransform())
self.node.moveParmTransformIntoPreTransform()
self.node.parm("rOrd").set(self.target_node.evalParm("rOrd"))
if isinstance(mask, str):
mask = int(mask, 2)
self.connectparms(mask)
self.promoteTRS(folder=folder)
def connectparms(self, mask=511):
parm_mask = format(mask, "b").zfill(9)
parms = ("tx", "ty", "tz", "rx", "ry", "rz", "sx", "sy", "sz")
for i, p in enumerate(parms):
target_parm = self.target_node.parm(p)
if parm_mask[i] is "1" and not target_parm.isLocked() or target_parm.isHidden():
target_parm.set(self.node.parm(p))
self.active_parms += (self.node.parm(p),)
else:
target_parm.lock(True)
self.node.parm(p).lock(True)
def promoteTRS(self, mask=511, lock_unused=True, split_vectors=(), folder=None):
parms = (
self.node.parmTuple("t"),
self.node.parmTuple("r"),
self.node.parmTuple("s")
)
for p in parms:
hdaparmutils.promoteParm(p, folder=folder, apply_to_definition=False)
| [
"null_api.Null.__init__",
"hdaparmutils.promoteParm",
"hou.undos.group"
] | [((1357, 1393), 'hou.undos.group', 'hou.undos.group', (['"""Create FK control"""'], {}), "('Create FK control')\n", (1372, 1393), False, 'import hou\n'), ((1574, 1608), 'null_api.Null.__init__', 'null_api.Null.__init__', (['self', 'node'], {}), '(self, node)\n', (1596, 1608), False, 'import null_api\n'), ((2889, 2958), 'hdaparmutils.promoteParm', 'hdaparmutils.promoteParm', (['p'], {'folder': 'folder', 'apply_to_definition': '(False)'}), '(p, folder=folder, apply_to_definition=False)\n', (2913, 2958), False, 'import hdaparmutils\n')] |
import time
import json
import os
class LimitExceeded(RuntimeError):
pass
class ServerError(RuntimeError):
pass
class WebAPI:
""""""
URL = ''
def __init__(self, name):
# 100,000 API calls per day.
# 1 request per second
# 60 request per minute
self.max_api_call_day = 100000
self.start = None
self.name = name
# make sure we respect the T&C of valve and do not get banned
self.wait_time = 1
self.limiter = True
self.request_count = 0
def state_path(self):
return os.path.expanduser(f'~/.config/api/{self.name}')
def __enter__(self):
if os.path.exists(self.state_path()):
with open(self.state_path(), 'r') as f:
save = json.load(f)
self.start = save['start']
self.request_count = save['count']
return self
def __exit__(self, exc_type, exc_val, exc_tb):
os.makedirs(os.path.expanduser(f'~/.config/api'), exist_ok=True)
with open(self.state_path(), 'w') as f:
save = dict(
start=self.start,
count=self.request_count
)
json.dump(save, f)
def limit_stats(self):
return self.request_count / self.max_api_call_day
def handle_errors(self, response):
if response.status_code == 503:
time.sleep(30)
raise ServerError
if response.status_code != 200:
print(f'[API] Received `{response.reason}`')
time.sleep(30)
raise ServerError
def limit(self):
if self.limiter:
# sleep a second to never go over the 1 request per second limit
time.sleep(self.wait_time)
self.request_count += 1
if self.start is None:
self.start = time.time()
# reset the request count to 0 after a day
if time.time() - self.start > 24 * 60 * 60:
self.request_count = 0
if self.request_count > self.max_api_call_day:
raise LimitExceeded('Cannot make more requests today')
| [
"os.path.expanduser",
"time.sleep",
"json.load",
"time.time",
"json.dump"
] | [((583, 631), 'os.path.expanduser', 'os.path.expanduser', (['f"""~/.config/api/{self.name}"""'], {}), "(f'~/.config/api/{self.name}')\n", (601, 631), False, 'import os\n'), ((979, 1015), 'os.path.expanduser', 'os.path.expanduser', (['f"""~/.config/api"""'], {}), "(f'~/.config/api')\n", (997, 1015), False, 'import os\n'), ((1206, 1224), 'json.dump', 'json.dump', (['save', 'f'], {}), '(save, f)\n', (1215, 1224), False, 'import json\n'), ((1403, 1417), 'time.sleep', 'time.sleep', (['(30)'], {}), '(30)\n', (1413, 1417), False, 'import time\n'), ((1558, 1572), 'time.sleep', 'time.sleep', (['(30)'], {}), '(30)\n', (1568, 1572), False, 'import time\n'), ((1739, 1765), 'time.sleep', 'time.sleep', (['self.wait_time'], {}), '(self.wait_time)\n', (1749, 1765), False, 'import time\n'), ((779, 791), 'json.load', 'json.load', (['f'], {}), '(f)\n', (788, 791), False, 'import json\n'), ((1867, 1878), 'time.time', 'time.time', ([], {}), '()\n', (1876, 1878), False, 'import time\n'), ((1950, 1961), 'time.time', 'time.time', ([], {}), '()\n', (1959, 1961), False, 'import time\n')] |
from math import ceil
from PySide2.QtCore import QRect, QSize, Qt, QAbstractTableModel, QMimeData, QByteArray
from PySide2.QtGui import QPainter, QStandardItemModel, QStandardItem, QPen
from PySide2.QtWidgets import *
from models.constants import PropType, MimeType
from views.draftbar_element_view_ui import Ui_DraftElement
LIST_HEIGHT = 100
ELEMENT_COLUMN_NUMBER = 3
ELEMENT_SIZE = 30
class DraftbarElementView(QWidget):
"""view of draft element category in sidebar"""
def __init__(self, parent, category_name, element_model, element_controller):
super(DraftbarElementView, self).__init__(parent)
self._category = category_name
self._model = element_model
self._ctrl = element_controller
self._ui = Ui_DraftElement()
self._ui.setupUi(self)
"""connect widgets to controller"""
self._ui.dropdown_button.hide()
#self._ui.element_label.clicked.connect(self.toggle_list)
#self._ui.dropdown_button.clicked.connect(self.toggle_list)
"""listen for model event signals"""
"""initialize view"""
self._ui.element_label.setText(str(self._category))
list_model = ElementListModel(self)
list_model.set_model(self._model)
self._ui.element_list.setModel(list_model)
self.toggle_list()
def toggle_list(self):
"""show or hide element list"""
# todo toggle list
class ElementListModel(QAbstractTableModel):
def __init__(self, parent=None):
super(ElementListModel, self).__init__(parent)
self._thumbnail_size = QSize(ELEMENT_SIZE, ELEMENT_SIZE)
self._model = None
def set_model(self, model):
self._model = model
def rowCount(self, parent=None, *args, **kwargs):
return ceil(len(self._model) / ELEMENT_COLUMN_NUMBER)
def columnCount(self, parent=None, *args, **kwargs):
return ELEMENT_COLUMN_NUMBER
def flags(self, index):
"""disable items in last row without any elements"""
element_index = index.row() * ELEMENT_COLUMN_NUMBER + index.column()
if element_index >= len(self._model):
return Qt.NoItemFlags
else:
return Qt.ItemIsEnabled | Qt.ItemIsSelectable | Qt.ItemIsDragEnabled
def data(self, index, role=None):
element_index = index.row() * ELEMENT_COLUMN_NUMBER + index.column()
# last row might contain empty cells
if element_index >= len(self._model):
return
pix_map = self._model[element_index].icon.pixmap(self._thumbnail_size)
pix_map.scaled(self._thumbnail_size, Qt.KeepAspectRatio)
if role == Qt.DecorationRole:
return pix_map
def mimeData(self, indexes):
mime_data = QMimeData()
element_index = indexes[0].row() * ELEMENT_COLUMN_NUMBER + indexes[0].column()
# send name of process core to QGraphicsScene
data_array = QByteArray(bytes(self._model[element_index].name, 'UTF-8'))
mime_data.setData(MimeType.PROCESS_CORE.value, data_array)
return mime_data
| [
"PySide2.QtCore.QMimeData",
"views.draftbar_element_view_ui.Ui_DraftElement",
"PySide2.QtCore.QSize"
] | [((756, 773), 'views.draftbar_element_view_ui.Ui_DraftElement', 'Ui_DraftElement', ([], {}), '()\n', (771, 773), False, 'from views.draftbar_element_view_ui import Ui_DraftElement\n'), ((1590, 1623), 'PySide2.QtCore.QSize', 'QSize', (['ELEMENT_SIZE', 'ELEMENT_SIZE'], {}), '(ELEMENT_SIZE, ELEMENT_SIZE)\n', (1595, 1623), False, 'from PySide2.QtCore import QRect, QSize, Qt, QAbstractTableModel, QMimeData, QByteArray\n'), ((2756, 2767), 'PySide2.QtCore.QMimeData', 'QMimeData', ([], {}), '()\n', (2765, 2767), False, 'from PySide2.QtCore import QRect, QSize, Qt, QAbstractTableModel, QMimeData, QByteArray\n')] |
from django.test import TestCase
from restclients.models.gws import GroupUser
from restclients.exceptions import DataFailureException
from restclients.gws import GWS
from restclients.test import fdao_gws_override
@fdao_gws_override
class TestGwsTrumbaGroup(TestCase):
def test_get_group(self):
group = GWS().get_group_by_id('u_eventcal_sea_1013649-editor')
self.assertEquals(group.name, "u_eventcal_sea_1013649-editor")
self.assertEquals(group.uwregid, "143bc3d173d244f6a2c3ced159ba9c97")
self.assertEquals(group.title,
"College of Arts and Sciences calendar editor group")
self.assertEquals(
group.description,
"Specifying the editors who are able to add/edit/delete any event on the corresponding Seattle Trumba calendar")
self.assertIsNotNone(group.admins)
self.assertEquals(len(group.admins) , 1)
self.assertEquals(group.admins[0].user_type, GroupUser.GROUP_TYPE)
self.assertEquals(group.admins[0].name, "u_eventcal_support")
self.assertIsNotNone(group.updaters)
self.assertEquals(len(group.updaters) , 1)
self.assertEquals(group.updaters[0].user_type, GroupUser.GROUP_TYPE)
self.assertEquals(
group.updaters[0].name, "u_eventcal_sea_1013649-editor")
self.assertIsNotNone(group.readers)
self.assertEquals(len(group.readers) , 1)
self.assertEquals(group.readers[0].user_type, GroupUser.NONE_TYPE)
self.assertEquals(group.readers[0].name, "dc=all")
self.assertIsNotNone(group.optouts)
self.assertEquals(len(group.optouts) , 1)
self.assertEquals(group.optouts[0].user_type, GroupUser.NONE_TYPE)
self.assertEquals(group.optouts[0].name, "dc=all")
| [
"restclients.gws.GWS"
] | [((317, 322), 'restclients.gws.GWS', 'GWS', ([], {}), '()\n', (320, 322), False, 'from restclients.gws import GWS\n')] |
from urllib.request import urlopen
import json
import requests
from django.db import models
from crypto.settings import env
from django.db.utils import IntegrityError
NOT_EXIST_ERROR = "Crypto with that name and currency does not exist in database"
EXTERNAL_API_ERROR = "External api did not send correct response"
NOT_EXIST_API_ERROR = "Crypto with that name and currency does not exist in external API"
class Asset(models.Model):
class Meta:
app_label = 'crypto'
constraints = [
models.CheckConstraint(
name="crypto_asset_type",
check=models.Q(asset_type__in=('crypto', 'currency', 'metal'))
)
]
name = models.CharField(max_length=30, null=False, unique=True)
converterPLN = models.FloatField(default=0)
converterEUR = models.FloatField(default=0)
converterUSD = models.FloatField(default=0)
guidA = models.CharField(max_length=40, unique=True)
asset_type = models.CharField(max_length=30)
crypto_url = 'http://api.nomics.com/v1/currencies/ticker?key=' + env('API_KEY')
server_url = 'https://localhost:44378/api/Asset/'
@staticmethod
def seed_dev_data():
Asset.create_asset(guidA="3fa85f64-5717-4562-b3fc-2c963f66afa9", asset_type="crypto", name="BTC",
converterEUR=100000.25, converterPLN=500000.25, converterUSD=110000.25)
Asset.create_asset(guidA="4fa85f64-5717-4562-b3fc-2c963f66afa9", asset_type="crypto", name="ETH",
converterEUR=10, converterPLN=50, converterUSD=11)
Asset.create_asset(guidA="5fa85f64-5717-4562-b3fc-2c963f66afa9", asset_type="metal", name="GOLD",
converterEUR=120, converterPLN=600, converterUSD=130)
Asset.create_asset(guidA="6fa85f64-5717-4562-b3fc-2c963f66afa9", asset_type="metal", name="SILVER",
converterEUR=100000.25, converterPLN=500000.25, converterUSD=110000.25)
Asset.create_asset(guidA="7fa85f64-5717-4562-b3fc-2c963f66afa9", asset_type="currency", name="PLN",
converterEUR=100000.25, converterPLN=500000.25, converterUSD=110000.25)
Asset.create_asset(guidA="8fa85f64-5717-4562-b3fc-2c963f66afa9", asset_type="currency", name="USD",
converterEUR=10, converterPLN=50, converterUSD=11)
Asset.create_asset(guidA="9fa85f64-5717-4562-b3fc-2c963f66afa9", asset_type="currency", name="EUR",
converterEUR=120, converterPLN=600, converterUSD=130)
Asset.create_asset(guidA="2fa85f64-5717-4562-b3fc-2c963f66afa9", asset_type="currency", name="GBP",
converterEUR=100000.25, converterPLN=500000.25, converterUSD=110000.25)
@staticmethod
def create_asset(guidA, asset_type, name, converterEUR, converterPLN, converterUSD):
try:
Asset(guidA=guidA, asset_type=asset_type, name=name, converterEUR=converterEUR,
converterPLN=converterPLN, converterUSD=converterUSD).save()
except IntegrityError:
return
def update_asset_price(self, name, currency_code):
from crypto.models.Alert import Alert
try:
asset = Asset.objects.get(name=name)
except Asset.DoesNotExist:
return NOT_EXIST_ERROR
price = self.get_new_crypto_price(name, currency_code=currency_code)
if price != EXTERNAL_API_ERROR and price != NOT_EXIST_ERROR:
self.set_asset_price(asset, currency_code, price)
Alert().check_alert(asset, currency_code)
@staticmethod
def set_asset_price(asset, currency_code, price):
if currency_code == 'EUR':
asset.converterEUR = price
if currency_code == 'PLN':
asset.converterPLN = price
if currency_code == 'USD':
asset.converterUSD = price
asset.save()
def get_new_crypto_price(self, name, currency_code):
response = urlopen(self.crypto_url + '&ids=' + name + '&convert=' + currency_code)
if response.status != 200:
return EXTERNAL_API_ERROR #add logging later
data_table = json.loads(response.read())
if not data_table:
return NOT_EXIST_API_ERROR #add logging later
return float(data_table[0]['price'])
def update_asset_in_server(self, name):
try:
asset = Asset.objects.get(name=name)
except Asset.DoesNotExist:
return NOT_EXIST_ERROR
json_data = {
"id": asset.guidA,
"type": asset.asset_type,
"name": name,
"converterPLN": asset.converterPLN,
"converterEUR": asset.converterEUR,
"converterUSD": asset.converterUSD
}
response = requests.put(self.server_url+asset.guidA, json=json_data, verify=False)
return response
| [
"django.db.models.FloatField",
"urllib.request.urlopen",
"requests.put",
"crypto.settings.env",
"django.db.models.Q",
"crypto.models.Alert.Alert",
"django.db.models.CharField"
] | [((695, 751), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(30)', 'null': '(False)', 'unique': '(True)'}), '(max_length=30, null=False, unique=True)\n', (711, 751), False, 'from django.db import models\n'), ((771, 799), 'django.db.models.FloatField', 'models.FloatField', ([], {'default': '(0)'}), '(default=0)\n', (788, 799), False, 'from django.db import models\n'), ((819, 847), 'django.db.models.FloatField', 'models.FloatField', ([], {'default': '(0)'}), '(default=0)\n', (836, 847), False, 'from django.db import models\n'), ((867, 895), 'django.db.models.FloatField', 'models.FloatField', ([], {'default': '(0)'}), '(default=0)\n', (884, 895), False, 'from django.db import models\n'), ((908, 952), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(40)', 'unique': '(True)'}), '(max_length=40, unique=True)\n', (924, 952), False, 'from django.db import models\n'), ((970, 1001), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(30)'}), '(max_length=30)\n', (986, 1001), False, 'from django.db import models\n'), ((1072, 1086), 'crypto.settings.env', 'env', (['"""API_KEY"""'], {}), "('API_KEY')\n", (1075, 1086), False, 'from crypto.settings import env\n'), ((3988, 4059), 'urllib.request.urlopen', 'urlopen', (["(self.crypto_url + '&ids=' + name + '&convert=' + currency_code)"], {}), "(self.crypto_url + '&ids=' + name + '&convert=' + currency_code)\n", (3995, 4059), False, 'from urllib.request import urlopen\n'), ((4797, 4870), 'requests.put', 'requests.put', (['(self.server_url + asset.guidA)'], {'json': 'json_data', 'verify': '(False)'}), '(self.server_url + asset.guidA, json=json_data, verify=False)\n', (4809, 4870), False, 'import requests\n'), ((603, 659), 'django.db.models.Q', 'models.Q', ([], {'asset_type__in': "('crypto', 'currency', 'metal')"}), "(asset_type__in=('crypto', 'currency', 'metal'))\n", (611, 659), False, 'from django.db import models\n'), ((3553, 3560), 'crypto.models.Alert.Alert', 'Alert', ([], {}), '()\n', (3558, 3560), False, 'from crypto.models.Alert import Alert\n')] |
from unittest import TestCase
from parameterized import parameterized, parameterized_class, param
# demonstration of wolever/parameterized
# ref: https://github.com/wolever/parameterized
# see-also:
# - http://h-miyako.hatenablog.com/entry/2017/08/16/173000
# - https://qiita.com/nittyan/items/0152a3b93e17c177f5f5
class TestParameterizedExpandDemo(TestCase):
@parameterized.expand(
[(1, 2, 3, 6), (10, 20, 30, 60), (4, 5, 6, 15),]
)
def test_add_xyz(self, x, y, z, expected):
self.assertEqual(x + y + z, expected)
@parameterized.expand(
[
param(1, 2, 3, 6),
param(10, 20, expected=130),
param(10, 20, 70),
param(20, 30, z=50),
param(0, 0),
]
)
def test_add_xyz_with_param(self, x, y, z=100, expected=100):
self.assertEqual(x + y + z, expected)
@parameterized_class(
[
{"x": 1, "y": 2, "z": 3, "expected": 6},
{"x": 10, "y": 20, "expected": 330},
{"expected": 600},
]
)
class TestParameterizedClassDemo(TestCase):
x: int = 100
y: int = 200
z: int = 300
def test_add_xyz(self):
self.assertEqual(self.x + self.y + self.z, self.expected)
| [
"parameterized.parameterized_class",
"parameterized.param",
"parameterized.parameterized.expand"
] | [((908, 1030), 'parameterized.parameterized_class', 'parameterized_class', (["[{'x': 1, 'y': 2, 'z': 3, 'expected': 6}, {'x': 10, 'y': 20, 'expected': \n 330}, {'expected': 600}]"], {}), "([{'x': 1, 'y': 2, 'z': 3, 'expected': 6}, {'x': 10, 'y':\n 20, 'expected': 330}, {'expected': 600}])\n", (927, 1030), False, 'from parameterized import parameterized, parameterized_class, param\n'), ((381, 450), 'parameterized.parameterized.expand', 'parameterized.expand', (['[(1, 2, 3, 6), (10, 20, 30, 60), (4, 5, 6, 15)]'], {}), '([(1, 2, 3, 6), (10, 20, 30, 60), (4, 5, 6, 15)])\n', (401, 450), False, 'from parameterized import parameterized, parameterized_class, param\n'), ((617, 634), 'parameterized.param', 'param', (['(1)', '(2)', '(3)', '(6)'], {}), '(1, 2, 3, 6)\n', (622, 634), False, 'from parameterized import parameterized, parameterized_class, param\n'), ((649, 676), 'parameterized.param', 'param', (['(10)', '(20)'], {'expected': '(130)'}), '(10, 20, expected=130)\n', (654, 676), False, 'from parameterized import parameterized, parameterized_class, param\n'), ((691, 708), 'parameterized.param', 'param', (['(10)', '(20)', '(70)'], {}), '(10, 20, 70)\n', (696, 708), False, 'from parameterized import parameterized, parameterized_class, param\n'), ((723, 742), 'parameterized.param', 'param', (['(20)', '(30)'], {'z': '(50)'}), '(20, 30, z=50)\n', (728, 742), False, 'from parameterized import parameterized, parameterized_class, param\n'), ((757, 768), 'parameterized.param', 'param', (['(0)', '(0)'], {}), '(0, 0)\n', (762, 768), False, 'from parameterized import parameterized, parameterized_class, param\n')] |
import unittest
from datetime import datetime
from bpl_lib.network.Network import Network
from bpl_lib.time.Time import Time
class TestTime(unittest.TestCase):
def test_get_time_1(self):
Network.use("mainnet")
time = Time.get_time(datetime.utcfromtimestamp(1533122273))
self.assertIsNotNone(time)
self.assertIsInstance(time, int)
self.assertEqual(time, 43021073)
def test_get_real_time_1(self):
Network.use("mainnet")
date_time = datetime.utcfromtimestamp(1533122273)
time = Time.get_real_time(43021073)
self.assertIsNotNone(time)
self.assertIsInstance(time, str)
self.assertEqual(time, date_time.strftime("%Y-%m-%d %H:%M:%S"))
def test_get_slot_number(self):
timestamp = 43021073
slot_number = Time.get_slot_number(timestamp)
self.assertIsNotNone(slot_number)
self.assertIsInstance(slot_number, int)
self.assertEqual(slot_number, 2868071)
def test_get_slot_time(self):
slot_number = 2868071
slot_time = Time.get_slot_time(slot_number)
self.assertIsNotNone(slot_time)
self.assertIsInstance(slot_time, int)
self.assertEqual(slot_time, 43021065)
if __name__ == "__main__":
unittest.main()
| [
"datetime.datetime.utcfromtimestamp",
"bpl_lib.network.Network.Network.use",
"unittest.main",
"bpl_lib.time.Time.Time.get_slot_number",
"bpl_lib.time.Time.Time.get_real_time",
"bpl_lib.time.Time.Time.get_slot_time"
] | [((1271, 1286), 'unittest.main', 'unittest.main', ([], {}), '()\n', (1284, 1286), False, 'import unittest\n'), ((202, 224), 'bpl_lib.network.Network.Network.use', 'Network.use', (['"""mainnet"""'], {}), "('mainnet')\n", (213, 224), False, 'from bpl_lib.network.Network import Network\n'), ((456, 478), 'bpl_lib.network.Network.Network.use', 'Network.use', (['"""mainnet"""'], {}), "('mainnet')\n", (467, 478), False, 'from bpl_lib.network.Network import Network\n'), ((500, 537), 'datetime.datetime.utcfromtimestamp', 'datetime.utcfromtimestamp', (['(1533122273)'], {}), '(1533122273)\n', (525, 537), False, 'from datetime import datetime\n'), ((553, 581), 'bpl_lib.time.Time.Time.get_real_time', 'Time.get_real_time', (['(43021073)'], {}), '(43021073)\n', (571, 581), False, 'from bpl_lib.time.Time import Time\n'), ((819, 850), 'bpl_lib.time.Time.Time.get_slot_number', 'Time.get_slot_number', (['timestamp'], {}), '(timestamp)\n', (839, 850), False, 'from bpl_lib.time.Time import Time\n'), ((1074, 1105), 'bpl_lib.time.Time.Time.get_slot_time', 'Time.get_slot_time', (['slot_number'], {}), '(slot_number)\n', (1092, 1105), False, 'from bpl_lib.time.Time import Time\n'), ((254, 291), 'datetime.datetime.utcfromtimestamp', 'datetime.utcfromtimestamp', (['(1533122273)'], {}), '(1533122273)\n', (279, 291), False, 'from datetime import datetime\n')] |
from gevent import monkey, pool
monkey.patch_all()
import gevent
import time
from otils.oo import _OO
import os
class Worker(_OO):
def __init__(self, handler, data, **kwargs):
super(Worker, self).__init__()
self.handler = handler
self.data = data
for k, v in kwargs.items():
setattr(self, k, v)
self.filename = f'coroutine_worker_{self.begin}_{self.end}_{int(time.time() * 100000)}'
def run(self):
ident = os.getpid()
self.orun(ident)
def start(workers, num, timeout=None):
po = pool.Pool(num)
g = []
for w in workers:
g.append(po.spawn(w.run))
gevent.joinall(g, timeout=timeout)
| [
"gevent.monkey.patch_all",
"gevent.joinall",
"gevent.pool.Pool",
"os.getpid",
"time.time"
] | [((35, 53), 'gevent.monkey.patch_all', 'monkey.patch_all', ([], {}), '()\n', (51, 53), False, 'from gevent import monkey, pool\n'), ((593, 607), 'gevent.pool.Pool', 'pool.Pool', (['num'], {}), '(num)\n', (602, 607), False, 'from gevent import monkey, pool\n'), ((683, 717), 'gevent.joinall', 'gevent.joinall', (['g'], {'timeout': 'timeout'}), '(g, timeout=timeout)\n', (697, 717), False, 'import gevent\n'), ((501, 512), 'os.getpid', 'os.getpid', ([], {}), '()\n', (510, 512), False, 'import os\n'), ((438, 449), 'time.time', 'time.time', ([], {}), '()\n', (447, 449), False, 'import time\n')] |
# Generated by Django 3.0.8 on 2020-08-12 17:52
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('backend', '0017_participant_familymember'),
]
operations = [
migrations.AddField(
model_name='group',
name='more_owners',
field=models.ManyToManyField(default=None, help_text="Ein weiteres Array Feld, in dem das FrontEnd E-Mails ablegen kann, falls für die jeweilige Mail ein Benutzer gefunden wird, wird er hier verlinkt und erhält dann automatisch 'Owner' Rechte auf die Gruppe", null=True, related_name='more_grps', to=settings.AUTH_USER_MODEL, verbose_name='Weitere Besitzer'),
),
migrations.AddField(
model_name='participant',
name='group',
field=models.ForeignKey(default=1, help_text='Die Gruppierung des Teilnehmers', on_delete=django.db.models.deletion.PROTECT, related_name='participants', to='backend.Group', verbose_name='Gruppierung'),
preserve_default=False,
),
]
| [
"django.db.migrations.swappable_dependency",
"django.db.models.ManyToManyField",
"django.db.models.ForeignKey"
] | [((227, 284), 'django.db.migrations.swappable_dependency', 'migrations.swappable_dependency', (['settings.AUTH_USER_MODEL'], {}), '(settings.AUTH_USER_MODEL)\n', (258, 284), False, 'from django.db import migrations, models\n'), ((477, 843), 'django.db.models.ManyToManyField', 'models.ManyToManyField', ([], {'default': 'None', 'help_text': '"""Ein weiteres Array Feld, in dem das FrontEnd E-Mails ablegen kann, falls für die jeweilige Mail ein Benutzer gefunden wird, wird er hier verlinkt und erhält dann automatisch \'Owner\' Rechte auf die Gruppe"""', 'null': '(True)', 'related_name': '"""more_grps"""', 'to': 'settings.AUTH_USER_MODEL', 'verbose_name': '"""Weitere Besitzer"""'}), '(default=None, help_text=\n "Ein weiteres Array Feld, in dem das FrontEnd E-Mails ablegen kann, falls für die jeweilige Mail ein Benutzer gefunden wird, wird er hier verlinkt und erhält dann automatisch \'Owner\' Rechte auf die Gruppe"\n , null=True, related_name=\'more_grps\', to=settings.AUTH_USER_MODEL,\n verbose_name=\'Weitere Besitzer\')\n', (499, 843), False, 'from django.db import migrations, models\n'), ((953, 1157), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'default': '(1)', 'help_text': '"""Die Gruppierung des Teilnehmers"""', 'on_delete': 'django.db.models.deletion.PROTECT', 'related_name': '"""participants"""', 'to': '"""backend.Group"""', 'verbose_name': '"""Gruppierung"""'}), "(default=1, help_text='Die Gruppierung des Teilnehmers',\n on_delete=django.db.models.deletion.PROTECT, related_name=\n 'participants', to='backend.Group', verbose_name='Gruppierung')\n", (970, 1157), False, 'from django.db import migrations, models\n')] |
# Configuration file for the Sphinx documentation builder.
#
# This file only contains a selection of the most common options. For a full
# list see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
from importlib import import_module
from pathlib import Path
import qtgallery
from jinja2.filters import FILTERS
import napari
release = napari.__version__
if "dev" in release:
version = "dev"
else:
version = release
# -- Project information -----------------------------------------------------
project = 'napari'
copyright = '2022, The napari team'
author = 'The napari team'
# -- General configuration ---------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
autosummary_generate = True
autosummary_imported_members = True
comments_config = {'hypothesis': False, 'utterances': False}
# execution_allow_errors = False
# execution_excludepatterns = []
# execution_in_temp = False
# execution_timeout = 30
extensions = [
"sphinx.ext.napoleon",
"sphinx.ext.autodoc",
"sphinx.ext.autosummary",
"sphinx.ext.intersphinx",
"sphinx_external_toc",
"sphinx_tabs.tabs",
'myst_nb',
# "sphinx_comments",
"sphinx_panels",
"sphinx.ext.viewcode",
"sphinx_gallery.gen_gallery",
]
external_toc_path = "_toc.yml"
external_toc_exclude_missing = False
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'napari'
html_theme_options = {
"external_links": [
{"name": "napari hub", "url": "https://napari-hub.org"}
],
"github_url": "https://github.com/napari/napari",
"navbar_start": ["navbar-project"],
"navbar_end": ["navbar-icon-links"],
}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
html_logo = "images/logo.png"
html_sourcelink_suffix = ''
html_title = 'napari'
html_css_files = [
'custom.css',
]
intersphinx_mapping = {
'python': ['https://docs.python.org/3', None],
'numpy': ['https://numpy.org/doc/stable/', None],
'napari_plugin_engine': [
'https://napari-plugin-engine.readthedocs.io/en/latest/',
'https://napari-plugin-engine.readthedocs.io/en/latest/objects.inv',
],
'magicgui': [
'https://napari.org/magicgui/',
'https://napari.org/magicgui/objects.inv',
],
}
jupyter_cache = ''
jupyter_execute_notebooks = 'auto'
myst_enable_extensions = [
'colon_fence',
'dollarmath',
'substitution',
'tasklist',
]
nb_output_stderr = 'show'
panels_add_bootstrap_css = False
pygments_style = 'solarized-dark'
suppress_warnings = ['myst.header', 'etoc.toctree']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = [
'_build',
'Thumbs.db',
'.DS_Store',
'.jupyter_cache',
'jupyter_execute',
]
napoleon_custom_sections = [('Events', 'params_style')]
def reset_napari_theme(gallery_conf, fname):
from napari.settings import get_settings
settings = get_settings()
settings.appearance.theme = 'dark'
qtgallery.reset_qapp(gallery_conf, fname)
sphinx_gallery_conf = {
'examples_dirs': '../examples', # path to your example scripts
'gallery_dirs': 'gallery', # path to where to save gallery generated output
'filename_pattern': '/*.py',
'ignore_pattern': 'README.rst|/*_.py',
'default_thumb_file': Path(__file__).parent.parent
/ 'napari'
/ 'resources'
/ 'logo.png',
'plot_gallery': True,
'download_all_examples': False,
'min_reported_time': 10,
'only_warn_on_example_error': True,
'image_scrapers': (qtgallery.qtscraper,),
'reset_modules': (reset_napari_theme,),
}
def setup(app):
"""Ignore .ipynb files.
Prevents sphinx from complaining about multiple files found for document
when generating the gallery.
"""
app.registry.source_suffix.pop(".ipynb", None)
def get_attributes(item, obj, modulename):
"""Filters attributes to be used in autosummary.
Fixes import errors when documenting inherited attributes with autosummary.
"""
module = import_module(modulename)
if hasattr(getattr(module, obj), item):
return f"~{obj}.{item}"
else:
return ""
FILTERS["get_attributes"] = get_attributes
| [
"importlib.import_module",
"qtgallery.reset_qapp",
"napari.settings.get_settings",
"pathlib.Path"
] | [((4022, 4036), 'napari.settings.get_settings', 'get_settings', ([], {}), '()\n', (4034, 4036), False, 'from napari.settings import get_settings\n'), ((4080, 4121), 'qtgallery.reset_qapp', 'qtgallery.reset_qapp', (['gallery_conf', 'fname'], {}), '(gallery_conf, fname)\n', (4100, 4121), False, 'import qtgallery\n'), ((5120, 5145), 'importlib.import_module', 'import_module', (['modulename'], {}), '(modulename)\n', (5133, 5145), False, 'from importlib import import_module\n'), ((4399, 4413), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (4403, 4413), False, 'from pathlib import Path\n')] |
from confluent_kafka import TopicPartition as GroupPartitionMetadata
from confluent_kafka.admin import BrokerMetadata
from confluent_kafka.admin import GroupMember as GroupMemberMetadata
from confluent_kafka.admin import GroupMetadata, PartitionMetadata, TopicMetadata
from kaskade.kafka.models import (
Broker,
Group,
GroupMember,
GroupPartition,
Partition,
Topic,
)
def metadata_to_broker(metadata: BrokerMetadata) -> Broker:
return Broker(id=metadata.id, host=metadata.host, port=metadata.port)
def metadata_to_group(metadata: GroupMetadata) -> Group:
return Group(
id=metadata.id,
broker=metadata_to_broker(metadata.broker),
state=metadata.state,
members=[],
partitions=[],
)
def metadata_to_group_member(metadata: GroupMemberMetadata) -> GroupMember:
return GroupMember(
id=metadata.id,
group="",
client_id=metadata.client_id,
client_host=metadata.client_host,
)
def metadata_to_group_partition(metadata: GroupPartitionMetadata) -> GroupPartition:
return GroupPartition(
id=metadata.partition,
topic=metadata.topic,
offset=metadata.offset,
group="",
high=0,
low=0,
)
def metadata_to_partition(metadata: PartitionMetadata) -> Partition:
return Partition(
id=metadata.id,
leader=metadata.leader,
replicas=metadata.replicas,
isrs=metadata.isrs,
high=0,
low=0,
)
def metadata_to_topic(metadata: TopicMetadata) -> Topic:
name = metadata.topic
return Topic(
name=name,
groups=[],
partitions=[],
)
| [
"kaskade.kafka.models.Partition",
"kaskade.kafka.models.GroupPartition",
"kaskade.kafka.models.Broker",
"kaskade.kafka.models.Topic",
"kaskade.kafka.models.GroupMember"
] | [((466, 528), 'kaskade.kafka.models.Broker', 'Broker', ([], {'id': 'metadata.id', 'host': 'metadata.host', 'port': 'metadata.port'}), '(id=metadata.id, host=metadata.host, port=metadata.port)\n', (472, 528), False, 'from kaskade.kafka.models import Broker, Group, GroupMember, GroupPartition, Partition, Topic\n'), ((850, 955), 'kaskade.kafka.models.GroupMember', 'GroupMember', ([], {'id': 'metadata.id', 'group': '""""""', 'client_id': 'metadata.client_id', 'client_host': 'metadata.client_host'}), "(id=metadata.id, group='', client_id=metadata.client_id,\n client_host=metadata.client_host)\n", (861, 955), False, 'from kaskade.kafka.models import Broker, Group, GroupMember, GroupPartition, Partition, Topic\n'), ((1089, 1202), 'kaskade.kafka.models.GroupPartition', 'GroupPartition', ([], {'id': 'metadata.partition', 'topic': 'metadata.topic', 'offset': 'metadata.offset', 'group': '""""""', 'high': '(0)', 'low': '(0)'}), "(id=metadata.partition, topic=metadata.topic, offset=metadata\n .offset, group='', high=0, low=0)\n", (1103, 1202), False, 'from kaskade.kafka.models import Broker, Group, GroupMember, GroupPartition, Partition, Topic\n'), ((1335, 1452), 'kaskade.kafka.models.Partition', 'Partition', ([], {'id': 'metadata.id', 'leader': 'metadata.leader', 'replicas': 'metadata.replicas', 'isrs': 'metadata.isrs', 'high': '(0)', 'low': '(0)'}), '(id=metadata.id, leader=metadata.leader, replicas=metadata.\n replicas, isrs=metadata.isrs, high=0, low=0)\n', (1344, 1452), False, 'from kaskade.kafka.models import Broker, Group, GroupMember, GroupPartition, Partition, Topic\n'), ((1599, 1641), 'kaskade.kafka.models.Topic', 'Topic', ([], {'name': 'name', 'groups': '[]', 'partitions': '[]'}), '(name=name, groups=[], partitions=[])\n', (1604, 1641), False, 'from kaskade.kafka.models import Broker, Group, GroupMember, GroupPartition, Partition, Topic\n')] |
# Evolutionary Learning Strategy Implementation
# Learn more from https://blog.openai.com/evolution-strategies/
import gym
import numpy as np
from gym import wrappers
# GLOBAL SETTINGS
RNG_SEED = 8
ENVIRONMENT = "LunarLander-v2"
POPULATION_SIZE = 100 # Population size
GENERATION_LIMIT = 100 # Max number of generations
DISPLAY_WEIGHTS = False # Help debug weight update
RENDER = True # Render the generation representative
sigma = 0.1 # Noise standard deviation
alpha = 0.00025 # Learning rate
# Limit steps to enforce stopping early
LIMIT_STEPS = True
STEPS_LIMIT = 255 # Perform the DO_NOTHING_ACTION when step surpass
DO_NOTHING_ACTION = 0 # Action to feed in to do nothing
# Upload to OpenAI
UPLOAD = False
UPLOAD_GENERATION_INTERVAL = 10 # Generate a video at this interval
SESSION_FOLDER = "/tmp/LunarLander-experiment-1"
API_KEY = ""
# Success Mode (Settings to pass OpenAI's requirement)
SUCCESS_MODE = True
SUCCESS_THRESHOLD = 200
CONSECUTIVE_TARGET = 100
def extract_move(action):
return np.argmax(action)
def record_interval(n):
global UPLOAD_GENERATION_INTERVAL
global POPULATION_SIZE
episode_interval = (POPULATION_SIZE + 1) * UPLOAD_GENERATION_INTERVAL
return n % episode_interval == 0
def run_episode(environment, weight, render=False):
global LIMIT_STEPS
global STEPS_LIMIT
obs = environment.reset()
episode_reward = 0
done = False
step = 0
if LIMIT_STEPS:
max_steps = STEPS_LIMIT
else:
max_steps = env.spec.tags.get(
'wrapper_config.TimeLimit.max_episode_steps')
while not done:
if render:
environment.render()
if step < max_steps:
action = np.matmul(weight.T, obs)
move = extract_move(action)
else:
move = DO_NOTHING_ACTION
obs, reward, done, info = environment.step(move)
step += 1
episode_reward += reward
return episode_reward
env = gym.make(ENVIRONMENT)
if UPLOAD:
if SUCCESS_MODE:
env = wrappers.Monitor(env, SESSION_FOLDER)
else:
env = wrappers.Monitor(env, SESSION_FOLDER,
video_callable=record_interval)
env.seed(RNG_SEED)
np.random.seed(RNG_SEED)
input_size = env.observation_space.shape[0]
try:
output_size = env.action_space.shape[0]
except AttributeError:
output_size = env.action_space.n
# Initial weights
W = np.zeros((input_size, output_size))
for gen in range(GENERATION_LIMIT):
# Measure performance per generation
gen_eval = run_episode(env, W, RENDER and not UPLOAD)
# Success mode enabled
success_count = 1
if SUCCESS_MODE:
track_success = [gen_eval]
curr_mean = np.mean(track_success)
while success_count < CONSECUTIVE_TARGET and curr_mean >= \
SUCCESS_THRESHOLD:
gen_eval = run_episode(env, W)
track_success.append(gen_eval)
curr_mean = np.mean(track_success)
success_count += 1
gen_eval = curr_mean
# Keep track of Returns
R = np.zeros(POPULATION_SIZE)
# Generate noise
N = np.random.randn(POPULATION_SIZE, input_size, output_size)
for j in range(POPULATION_SIZE):
W_ = W + sigma * N[j]
R[j] = run_episode(env, W_)
# Update weights
# Summation of episode_weight * episode_reward
weighted_weights = np.matmul(N.T, R).T
new_W = W + (alpha / (POPULATION_SIZE * sigma)) * weighted_weights
if DISPLAY_WEIGHTS:
print(W)
W = new_W
gen_mean = np.mean(R)
if SUCCESS_MODE:
out = "Generation {}, Success Count: {}, Success Mean: {}, " \
"Population Mean: {}"
out = out.format(gen, success_count, gen_eval, gen_mean)
else:
out = "Generation {}, Return: {}, Population Mean: {}"
out = out.format(gen, gen_eval, gen_mean)
print(out)
env.close()
if UPLOAD:
gym.upload(SESSION_FOLDER, api_key=API_KEY)
| [
"numpy.mean",
"gym.upload",
"numpy.argmax",
"numpy.zeros",
"numpy.matmul",
"numpy.random.seed",
"gym.wrappers.Monitor",
"numpy.random.randn",
"gym.make"
] | [((1961, 1982), 'gym.make', 'gym.make', (['ENVIRONMENT'], {}), '(ENVIRONMENT)\n', (1969, 1982), False, 'import gym\n'), ((2212, 2236), 'numpy.random.seed', 'np.random.seed', (['RNG_SEED'], {}), '(RNG_SEED)\n', (2226, 2236), True, 'import numpy as np\n'), ((2414, 2449), 'numpy.zeros', 'np.zeros', (['(input_size, output_size)'], {}), '((input_size, output_size))\n', (2422, 2449), True, 'import numpy as np\n'), ((1019, 1036), 'numpy.argmax', 'np.argmax', (['action'], {}), '(action)\n', (1028, 1036), True, 'import numpy as np\n'), ((3068, 3093), 'numpy.zeros', 'np.zeros', (['POPULATION_SIZE'], {}), '(POPULATION_SIZE)\n', (3076, 3093), True, 'import numpy as np\n'), ((3123, 3180), 'numpy.random.randn', 'np.random.randn', (['POPULATION_SIZE', 'input_size', 'output_size'], {}), '(POPULATION_SIZE, input_size, output_size)\n', (3138, 3180), True, 'import numpy as np\n'), ((3541, 3551), 'numpy.mean', 'np.mean', (['R'], {}), '(R)\n', (3548, 3551), True, 'import numpy as np\n'), ((3911, 3954), 'gym.upload', 'gym.upload', (['SESSION_FOLDER'], {'api_key': 'API_KEY'}), '(SESSION_FOLDER, api_key=API_KEY)\n', (3921, 3954), False, 'import gym\n'), ((2029, 2066), 'gym.wrappers.Monitor', 'wrappers.Monitor', (['env', 'SESSION_FOLDER'], {}), '(env, SESSION_FOLDER)\n', (2045, 2066), False, 'from gym import wrappers\n'), ((2091, 2160), 'gym.wrappers.Monitor', 'wrappers.Monitor', (['env', 'SESSION_FOLDER'], {'video_callable': 'record_interval'}), '(env, SESSION_FOLDER, video_callable=record_interval)\n', (2107, 2160), False, 'from gym import wrappers\n'), ((2712, 2734), 'numpy.mean', 'np.mean', (['track_success'], {}), '(track_success)\n', (2719, 2734), True, 'import numpy as np\n'), ((3380, 3397), 'numpy.matmul', 'np.matmul', (['N.T', 'R'], {}), '(N.T, R)\n', (3389, 3397), True, 'import numpy as np\n'), ((1703, 1727), 'numpy.matmul', 'np.matmul', (['weight.T', 'obs'], {}), '(weight.T, obs)\n', (1712, 1727), True, 'import numpy as np\n'), ((2948, 2970), 'numpy.mean', 'np.mean', (['track_success'], {}), '(track_success)\n', (2955, 2970), True, 'import numpy as np\n')] |
import versioneer
from setuptools import setup, find_packages
import os
DESCRIPTION = "Inference of transcription factor motif activity from single cell RNA-seq data."
with open("README.md") as f:
long_description = f.read()
setup(
name="pananse",
version=versioneer.get_version(),
long_description = long_description,
long_description_content_type = 'text/markdown',
description=DESCRIPTION,
author="<NAME>",
author_email="<EMAIL>",
url="https://github.com/qxuchn/PANANSE/",
license="MIT",
packages=find_packages(),
scripts=["scripts/pananse"],
include_package_data=True,
zip_safe=False,
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: MIT License",
"Operating System :: MacOS :: MacOS X",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3",
"Topic :: Scientific/Engineering :: Bio-Informatics",
],
install_requires=[
"six"
],
)
| [
"setuptools.find_packages",
"versioneer.get_version"
] | [((271, 295), 'versioneer.get_version', 'versioneer.get_version', ([], {}), '()\n', (293, 295), False, 'import versioneer\n'), ((547, 562), 'setuptools.find_packages', 'find_packages', ([], {}), '()\n', (560, 562), False, 'from setuptools import setup, find_packages\n')] |
##
## Software PI-Net: Pose Interacting Network for Multi-Person Monocular 3D Pose Estimation
## Copyright Inria and UPC
## Year 2021
## Contact : <EMAIL>
##
## The software PI-Net is provided under MIT License.
##
import json
with open("../data/MuPoTS-3D.json",'r') as f1:
data_dict = json.load(f1)
print("data keys:", data_dict.keys())
bbox_root_list = []
for i in range(len(data_dict['annotations'])):
bbox_root_dict = {}
bbox_root_dict['image_id'] = data_dict['annotations'][i]['image_id']
bbox_root_dict['bbox'] = data_dict['annotations'][i]['bbox']
bbox_root_dict['root_cam'] = data_dict['annotations'][i]['keypoints_cam'][14] #pelvis
bbox_root_list.append(bbox_root_dict)
with open("bbox_root_mupots_gt.json",'w') as f2:
json.dump(bbox_root_list, f2)
| [
"json.load",
"json.dump"
] | [((291, 304), 'json.load', 'json.load', (['f1'], {}), '(f1)\n', (300, 304), False, 'import json\n'), ((766, 795), 'json.dump', 'json.dump', (['bbox_root_list', 'f2'], {}), '(bbox_root_list, f2)\n', (775, 795), False, 'import json\n')] |
import re
from collections import defaultdict
from advent import AdventProblem
def preprocess(line):
match = re.match(r"([\w\s]+) bags contain (.+)\.", line)
constituents = re.findall(r"(\d+) ([\w\s]+) bags?", match.group(2))
return (match.group(1), constituents)
def part_1(pairings):
contained_by = defaultdict(list)
for bag, constituents in pairings:
for n, c in constituents:
contained_by[c].append(bag)
num_bags = 0
bags_to_visit = ["shiny gold"]
visited = set()
while len(bags_to_visit) > 0:
curr_bag = bags_to_visit.pop()
if curr_bag in visited:
continue
visited.add(curr_bag)
num_bags += 1
bags_to_visit += contained_by[curr_bag]
return num_bags - 1
def part_2(pairings):
contains = {}
for bag, constituents in pairings:
contains[bag] = [(c, int(n)) for n, c in constituents]
num_bags = 0
bags_to_open = [("shiny gold", 1)]
while len(bags_to_open) > 0:
bag, mult = bags_to_open.pop()
num_bags += mult
bags_to_open += [(c, n * mult) for c, n in contains[bag]]
return num_bags - 1
if __name__ == '__main__':
part1 = AdventProblem(7, 1, preprocess)
part1.add_solution(part_1)
part1.run()
part2 = AdventProblem(7, 2, preprocess)
part2.add_solution(part_2)
part2.run()
| [
"advent.AdventProblem",
"re.match",
"collections.defaultdict"
] | [((116, 166), 're.match', 're.match', (['"""([\\\\w\\\\s]+) bags contain (.+)\\\\."""', 'line'], {}), "('([\\\\w\\\\s]+) bags contain (.+)\\\\.', line)\n", (124, 166), False, 'import re\n'), ((321, 338), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (332, 338), False, 'from collections import defaultdict\n'), ((1204, 1235), 'advent.AdventProblem', 'AdventProblem', (['(7)', '(1)', 'preprocess'], {}), '(7, 1, preprocess)\n', (1217, 1235), False, 'from advent import AdventProblem\n'), ((1296, 1327), 'advent.AdventProblem', 'AdventProblem', (['(7)', '(2)', 'preprocess'], {}), '(7, 2, preprocess)\n', (1309, 1327), False, 'from advent import AdventProblem\n')] |
import os
import sys
import time
import subprocess
import logging.config
from logging import getLogger
from flask import Flask, request, flash, jsonify, redirect, url_for
from flask_sqlalchemy import SQLAlchemy
from flask_login import LoginManager, current_user
import flask_socketio
from flask_socketio import SocketIO
# print(flask_socketio.__version__)
# exit()
import pymysql.err
import sqlalchemy.exc
from sqlalchemy import event
from sqlalchemy.engine import Engine
from .settings import Settings
root_folder = os.path.join(os.path.split(os.path.abspath(__file__))[0], "..")
logging.config.fileConfig(
fname=os.path.join(root_folder, "app", "logging.conf"),
disable_existing_loggers=False,
defaults={'logfilename': os.path.join(root_folder, "logs", "app", "crwiz.log")}
)
getLogger("slurk").info("Logging config loaded. Starting up Slurk...")
DEBUG = False
socketio = SocketIO(ping_interval=5, ping_timeout=120, async_mode="gevent")
app = Flask(__name__)
app.config.from_object('config')
db = SQLAlchemy(app)
settings = Settings.from_object('config')
login_manager = LoginManager()
from .models.room import Room
from .models.token import Token
from .models.layout import Layout
from .models.permission import Permissions
from .models.state_history import StateHistory
from .models.task import Task
from .models.log import Log
# Try to connect to the database. If it cannot succeed after a
# few attempts then the program will finish
connected_to_db = False
attempts = 0
while not connected_to_db:
try:
if settings.drop_database_on_startup:
db.drop_all()
db.create_all()
connected_to_db = True
except (pymysql.err.OperationalError, pymysql.err.InternalError,
sqlalchemy.exc.InternalError, sqlalchemy.exc.OperationalError,
ConnectionRefusedError):
if attempts >= 10:
getLogger("crwiz").critical(
f"Unable to connect to database at {settings.database_url} "
f"after {attempts} attempts. Shutting down the program...")
sys.exit(4)
else:
getLogger("crwiz").warning(
f"Unable to connect to database at '{settings.database_url}'. "
f"Retrying...")
time.sleep(1)
attempts += 1
except Exception as ex:
getLogger("crwiz").critical(
f"There was an unexpected exception: {ex}", exc_info=True)
getLogger("crwiz").critical(f"Shutting down the program...")
sys.exit(4)
from .api import api as api_blueprint
from .api import close_rooms
from .login import login as login_blueprint
from .chat import chat as chat_blueprint
from .portal import portal as portal_blueprint
app.register_blueprint(api_blueprint)
app.register_blueprint(portal_blueprint)
app.register_blueprint(login_blueprint)
app.register_blueprint(chat_blueprint)
login_manager.init_app(app)
login_manager.login_view = 'login.index'
socketio.init_app(app)
@event.listens_for(Engine, "connect")
def set_sqlite_pragma(dbapi_connection, _connection_record):
if settings.database_url.startswith('sqlite://'):
cursor = dbapi_connection.cursor()
cursor.execute("PRAGMA foreign_keys=ON")
cursor.close()
@app.before_request
def before_request():
if request.endpoint and request.endpoint.startswith("api."):
return
if not current_user.is_authenticated \
and request.endpoint not in ["login.index", "static", "portal.index", "admin.index"]:
# if DEBUG, then redirect to the portal to perform an auto login
if DEBUG:
return redirect(url_for("portal.index"))
# otherwise say it's unauthorised
return login_manager.unauthorized()
if not Room.query.get("admin_room"):
db.session.add(Room(name="admin_room",
label="Admin Room",
layout=Layout.from_json_file("default"),
static=True))
db.session.add(Token(room_name='admin_room',
id='00000000-0000-0000-0000-000000000000' if settings.debug else None,
permissions=Permissions(
user_query=True,
user_log_event=True,
user_room_join=True,
user_room_leave=True,
message_text=True,
message_image=True,
message_command=True,
message_broadcast=True,
room_query=True,
room_log_query=True,
room_create=True,
room_update=True,
room_delete=True,
layout_query=True,
layout_create=True,
layout_update=True,
task_create=True,
task_query=True,
task_update=True,
token_generate=True,
token_query=True,
token_invalidate=True,
token_update=True,
)))
db.session.commit()
getLogger("slurk").debug("Generating admin room and token...")
admin_token = Token.query.order_by(Token.date_created).first().id
getLogger("slurk").debug(f"Admin token: ['{admin_token}']")
sys.stdout.flush()
from .crwiz import init_crwiz, logger_crwiz, stop_bots
init_crwiz()
logger_crwiz.info("Ready")
| [
"logging.getLogger",
"flask_login.LoginManager",
"sqlalchemy.event.listens_for",
"flask.Flask",
"flask.request.endpoint.startswith",
"os.path.join",
"flask_socketio.SocketIO",
"time.sleep",
"flask.url_for",
"sys.exit",
"os.path.abspath",
"flask_sqlalchemy.SQLAlchemy",
"sys.stdout.flush"
] | [((893, 957), 'flask_socketio.SocketIO', 'SocketIO', ([], {'ping_interval': '(5)', 'ping_timeout': '(120)', 'async_mode': '"""gevent"""'}), "(ping_interval=5, ping_timeout=120, async_mode='gevent')\n", (901, 957), False, 'from flask_socketio import SocketIO\n'), ((966, 981), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (971, 981), False, 'from flask import Flask, request, flash, jsonify, redirect, url_for\n'), ((1021, 1036), 'flask_sqlalchemy.SQLAlchemy', 'SQLAlchemy', (['app'], {}), '(app)\n', (1031, 1036), False, 'from flask_sqlalchemy import SQLAlchemy\n'), ((1095, 1109), 'flask_login.LoginManager', 'LoginManager', ([], {}), '()\n', (1107, 1109), False, 'from flask_login import LoginManager, current_user\n'), ((2991, 3027), 'sqlalchemy.event.listens_for', 'event.listens_for', (['Engine', '"""connect"""'], {}), "(Engine, 'connect')\n", (3008, 3027), False, 'from sqlalchemy import event\n'), ((5541, 5559), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (5557, 5559), False, 'import sys\n'), ((623, 671), 'os.path.join', 'os.path.join', (['root_folder', '"""app"""', '"""logging.conf"""'], {}), "(root_folder, 'app', 'logging.conf')\n", (635, 671), False, 'import os\n'), ((795, 813), 'logging.getLogger', 'getLogger', (['"""slurk"""'], {}), "('slurk')\n", (804, 813), False, 'from logging import getLogger\n'), ((3330, 3365), 'flask.request.endpoint.startswith', 'request.endpoint.startswith', (['"""api."""'], {}), "('api.')\n", (3357, 3365), False, 'from flask import Flask, request, flash, jsonify, redirect, url_for\n'), ((5480, 5498), 'logging.getLogger', 'getLogger', (['"""slurk"""'], {}), "('slurk')\n", (5489, 5498), False, 'from logging import getLogger\n'), ((549, 574), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (564, 574), False, 'import os\n'), ((738, 791), 'os.path.join', 'os.path.join', (['root_folder', '"""logs"""', '"""app"""', '"""crwiz.log"""'], {}), "(root_folder, 'logs', 'app', 'crwiz.log')\n", (750, 791), False, 'import os\n'), ((2522, 2533), 'sys.exit', 'sys.exit', (['(4)'], {}), '(4)\n', (2530, 2533), False, 'import sys\n'), ((5349, 5367), 'logging.getLogger', 'getLogger', (['"""slurk"""'], {}), "('slurk')\n", (5358, 5367), False, 'from logging import getLogger\n'), ((2079, 2090), 'sys.exit', 'sys.exit', (['(4)'], {}), '(4)\n', (2087, 2090), False, 'import sys\n'), ((2269, 2282), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (2279, 2282), False, 'import time\n'), ((3639, 3662), 'flask.url_for', 'url_for', (['"""portal.index"""'], {}), "('portal.index')\n", (3646, 3662), False, 'from flask import Flask, request, flash, jsonify, redirect, url_for\n'), ((2345, 2363), 'logging.getLogger', 'getLogger', (['"""crwiz"""'], {}), "('crwiz')\n", (2354, 2363), False, 'from logging import getLogger\n'), ((2453, 2471), 'logging.getLogger', 'getLogger', (['"""crwiz"""'], {}), "('crwiz')\n", (2462, 2471), False, 'from logging import getLogger\n'), ((1885, 1903), 'logging.getLogger', 'getLogger', (['"""crwiz"""'], {}), "('crwiz')\n", (1894, 1903), False, 'from logging import getLogger\n'), ((2117, 2135), 'logging.getLogger', 'getLogger', (['"""crwiz"""'], {}), "('crwiz')\n", (2126, 2135), False, 'from logging import getLogger\n')] |
from PyQt5.QtCore import QRect, Qt
from PyQt5.QtWidgets import QWidget, QVBoxLayout, QPushButton, QHBoxLayout, QLabel, QSizePolicy
from xu.compa.Parapluie import Parapluie
from xu.compa.Parapluie.src.StickyWindow.PSticky import PSticky
class PMessage(PSticky):
def __init__(self, parent: QWidget, rect: QRect = None):
super().__init__(parent)
if rect is None:
x = parent.x() + parent.width() / 2 - 150
y = parent.y() + parent.height() / 2 - 50
window_rect = QRect(x, y, 300, 100)
else:
x = rect.x() + rect.width() / 2 - 150
y = rect.y() + rect.height() / 2 - 100
window_rect = QRect(x, y, 300, 100)
self.setGeometry(window_rect)
self.action = QHBoxLayout()
self.action.addStretch()
self.action.addStretch()
self.text = QLabel()
self.text.setWordWrap(True)
self.text.setStyleSheet("color:#666666;")
layout = QVBoxLayout()
layout.setSpacing(18)
layout.addWidget(self.text, alignment=Qt.AlignTop)
layout.addLayout(self.action)
layout.setContentsMargins(12, 12, 12, 0)
widget = QWidget()
widget.setLayout(layout)
widget.setContentsMargins(0, 8, 0, 0)
widget.setObjectName(Parapluie.Object_Raised_Off)
self.setCentralWidget(widget)
def addButton(self, text, tpe, action=None, data=None):
apply = QPushButton()
apply.setText(text)
if action:
if data is None:
apply.clicked.connect(action)
else:
apply.clicked.connect(lambda: action(text, data))
if tpe == Parapluie.Button_Positive:
apply.setObjectName(Parapluie.Object_OptimizeButton)
elif tpe == Parapluie.Button_Negative:
apply.setObjectName(Parapluie.Object_NegativeButton)
self.action.insertWidget(1, apply, alignment=Qt.AlignCenter)
def setMessage(self, message):
self.text.setText(message)
def initInformation(self, message, title="Notice"):
self.setWindowTitle(title.upper())
self.setMessage(message)
self.addButton("CLOSE", Parapluie.Button_Neutral, lambda: self.completeDestroy(0))
self.setSizePolicy(QSizePolicy.Expanding, QSizePolicy.Fixed)
def initWarning(self, message, title="Warning!!!", negative="CONTINUE"):
self.setWindowTitle(title.upper())
self.setMessage(message)
self.addButton("CLOSE", Parapluie.Button_Neutral, lambda: self.completeDestroy(0))
self.addButton(negative, Parapluie.Button_Negative, lambda: self.completeDestroy(1))
self.setSizePolicy(QSizePolicy.Expanding, QSizePolicy.Fixed)
def initQuestion(self, message, option: list, title="Choose a option..."):
self.setWindowTitle(title.upper())
self.setMessage(message)
for o in option:
if isinstance(o, str):
self.addButton(o, Parapluie.Button_Positive, self.onSelectedOption, option.index(o))
elif isinstance(o, dict):
if "text" in o and "type" in o:
self.addButton(o["text"], o["type"], self.onSelectedOption, option.index(o))
self.setSizePolicy(QSizePolicy.Expanding, QSizePolicy.Fixed)
def onSelectedOption(self, text, data):
self.completeDestroy(data)
| [
"PyQt5.QtWidgets.QWidget",
"PyQt5.QtWidgets.QHBoxLayout",
"PyQt5.QtCore.QRect",
"PyQt5.QtWidgets.QLabel",
"PyQt5.QtWidgets.QVBoxLayout",
"PyQt5.QtWidgets.QPushButton"
] | [((764, 777), 'PyQt5.QtWidgets.QHBoxLayout', 'QHBoxLayout', ([], {}), '()\n', (775, 777), False, 'from PyQt5.QtWidgets import QWidget, QVBoxLayout, QPushButton, QHBoxLayout, QLabel, QSizePolicy\n'), ((865, 873), 'PyQt5.QtWidgets.QLabel', 'QLabel', ([], {}), '()\n', (871, 873), False, 'from PyQt5.QtWidgets import QWidget, QVBoxLayout, QPushButton, QHBoxLayout, QLabel, QSizePolicy\n'), ((978, 991), 'PyQt5.QtWidgets.QVBoxLayout', 'QVBoxLayout', ([], {}), '()\n', (989, 991), False, 'from PyQt5.QtWidgets import QWidget, QVBoxLayout, QPushButton, QHBoxLayout, QLabel, QSizePolicy\n'), ((1186, 1195), 'PyQt5.QtWidgets.QWidget', 'QWidget', ([], {}), '()\n', (1193, 1195), False, 'from PyQt5.QtWidgets import QWidget, QVBoxLayout, QPushButton, QHBoxLayout, QLabel, QSizePolicy\n'), ((1448, 1461), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', ([], {}), '()\n', (1459, 1461), False, 'from PyQt5.QtWidgets import QWidget, QVBoxLayout, QPushButton, QHBoxLayout, QLabel, QSizePolicy\n'), ((518, 539), 'PyQt5.QtCore.QRect', 'QRect', (['x', 'y', '(300)', '(100)'], {}), '(x, y, 300, 100)\n', (523, 539), False, 'from PyQt5.QtCore import QRect, Qt\n'), ((681, 702), 'PyQt5.QtCore.QRect', 'QRect', (['x', 'y', '(300)', '(100)'], {}), '(x, y, 300, 100)\n', (686, 702), False, 'from PyQt5.QtCore import QRect, Qt\n')] |
"""
Onoda 2012 ICA- and PCA-based algorithm
See: Careful seeding method based on independent components analysis for
k-means clustering
http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.663.5343&rep=rep1&type=pdf#page=53
"""
from abc import abstractmethod
import numpy as np
from initialisations.base import Initialisation
class Onoda(Initialisation):
"""Base class for the two Onoda 2012 initialisation algorithms"""
def _find_centroids(self, components) -> np.array:
"""Step 1b from the algorithms"""
centroids = []
for component in components:
distances = [self._calc_distance(x, component) for x in self._data]
centroids.append(self._data[np.argmin(distances)])
return np.array(centroids)
@staticmethod
def _calc_distance(row, component):
"""Used in Step 1b from the algorithms"""
mag = np.linalg.norm
return np.dot(component, row) / (mag(component) * mag(row))
@staticmethod
@abstractmethod
def _find_components() -> np.array:
"""Each algorithm must implement this"""
def find_centers(self):
"""Main method"""
return self._find_centroids(self._find_components())
| [
"numpy.argmin",
"numpy.array",
"numpy.dot"
] | [((755, 774), 'numpy.array', 'np.array', (['centroids'], {}), '(centroids)\n', (763, 774), True, 'import numpy as np\n'), ((930, 952), 'numpy.dot', 'np.dot', (['component', 'row'], {}), '(component, row)\n', (936, 952), True, 'import numpy as np\n'), ((716, 736), 'numpy.argmin', 'np.argmin', (['distances'], {}), '(distances)\n', (725, 736), True, 'import numpy as np\n')] |
import KratosMultiphysics.KratosUnittest as KratosUnittest
import KratosMultiphysics
import KratosMultiphysics.TrilinosApplication as KratosTrilinos
class TestTrilinosMatrix(KratosUnittest.TestCase):
def test_resize(self):
comm = KratosTrilinos.CreateEpetraCommunicator(KratosMultiphysics.DataCommunicator.GetDefault())
space = KratosTrilinos.TrilinosSparseSpace()
pb = space.CreateEmptyVectorPointer(comm)
space.ResizeVector(pb,2)
n = space.Size(pb.GetReference())
self.assertEqual(n,2)
if __name__ == '__main__':
KratosUnittest.main() | [
"KratosMultiphysics.TrilinosApplication.TrilinosSparseSpace",
"KratosMultiphysics.KratosUnittest.main",
"KratosMultiphysics.DataCommunicator.GetDefault"
] | [((576, 597), 'KratosMultiphysics.KratosUnittest.main', 'KratosUnittest.main', ([], {}), '()\n', (595, 597), True, 'import KratosMultiphysics.KratosUnittest as KratosUnittest\n'), ((350, 386), 'KratosMultiphysics.TrilinosApplication.TrilinosSparseSpace', 'KratosTrilinos.TrilinosSparseSpace', ([], {}), '()\n', (384, 386), True, 'import KratosMultiphysics.TrilinosApplication as KratosTrilinos\n'), ((284, 332), 'KratosMultiphysics.DataCommunicator.GetDefault', 'KratosMultiphysics.DataCommunicator.GetDefault', ([], {}), '()\n', (330, 332), False, 'import KratosMultiphysics\n')] |
#lib import
import sys, os, random, time
sys.path.append('..\\..\\')
import derpapi
import pygame
RUN = False
#screen init
try:
pygame.init()
except:
pygame.display.init()
def main():
global RUN, random, derpapi
#place main code here
HS = derpapi.retrieve('HISCORE')
if HS == None:
HS = 0
screen = pygame.display.set_mode([480, 320], pygame.NOFRAME)
screen.fill((230,230,230))
try:
font = pygame.font.Font('..\\..\\font_main.otf', 32)
except:
font = pygame.font.Font('font_main.otf', 32)
RUN = True
spd = 0.3
paddle_x = 0
cp = [240, 160]
cpd = [random.choice([-1 * spd, spd]), random.choice([-1 * spd, spd])]
lt = 0
lst = 0
s_mult = 1
score = 0
lives = 5
lost_one = False
pause = pygame.image.load('pause.png')
paused = 1
while RUN:
pygame.time.Clock().tick(300)
for event in pygame.event.get():
if event.type == pygame.MOUSEMOTION and paused % 2 == 1:
if event.pos[0] <= 20:
paddle_x = 0
elif event.pos[0] >= 460:
paddle_x = 440
else:
paddle_x = event.pos[0] - 20
if event.type == pygame.KEYDOWN:
if event.unicode == 'x':
pygame.display.quit()
sys.exit()
if event.unicode == 'p':
paused += 1
if event.type == pygame.MOUSEBUTTONDOWN:
if derpapi.collision((215, 10), (50, 50), event.pos):
paused += 1
if paused % 2 == 0:
screen.fill((200,200,200))
screen.blit(pygame.Surface((40, 10)), (paddle_x, 300))
screen.blit(pause, (215, 10))
pygame.draw.circle(screen, (0,0,0), (int(cp[0]), int(cp[1])), 5)
screen.blit(font.render(str(score), True, (0,0,0)), (10, 10))
screen.blit(font.render('HI ' + str(HS), True, (0,0,0)), (10, 40))
screen.blit(font.render(str(lives), True, (0,0,0)), (450, 10))
pygame.display.flip()
else:
screen.fill((230,230,230))
screen.blit(pygame.Surface((40, 10)), (paddle_x, 300))
screen.blit(pause, (215, 10))
pygame.draw.circle(screen, (0,0,0), (int(cp[0]), int(cp[1])), 5)
screen.blit(font.render(str(score), True, (0,0,0)), (10, 10))
screen.blit(font.render('HI ' + str(HS), True, (0,0,0)), (10, 40))
screen.blit(font.render(str(lives), True, (0,0,0)), (450, 10))
pygame.display.flip()
cp = [cp[0] + cpd[0], cp[1] + cpd[1]]
if cp[0] <= 5 or cp[0] >= 475:
cpd[0] = cpd[0] * -1
if cp[1] <= 5 or cp[1] >= 315:
cpd[1] = cpd[1] * -1
if derpapi.collision((paddle_x, 300), (40, 10), (cp[0], cp[1] + 5)) and round(time.time()) != lst:
cpd[1] = cpd[1] * -1
score += s_mult
lst = round(time.time())
if round(time.time()) % 30 == 0 and round(time.time()) != lt:
lt = round(time.time())
cpd = [cpd[0] * 1.1, cpd[1] * 1.1]
s_mult += 1
print('SPD' + str(cpd[0] * 1.1))
if cp[1] >= 310 and not lost_one:
lost_one = True
lives -= 1
elif cp[1] < 280:
lost_one = False
if lives == 0:
RUN = False
if score > int(HS):
HS = score
screen.fill((230,230,230))
screen.blit(font.render('GAME OVER', True, (0,0,0)), (10, 10))
screen.blit(font.render('Your score is ' + str(score), True, (0,0,0)), (10, 60))
if HS == score:
screen.blit(font.render('NEW HIGHSCORE', True, (0,0,0)), (10, 100))
derpapi.store("HISCORE", HS)
else:
screen.blit(font.render('HI ' + HS, True, (0,0,0)), (10, 100))
pygame.display.flip()
time.sleep(4)
main()
pygame.display.quit()
| [
"random.choice",
"sys.exit",
"derpapi.retrieve",
"pygame.init",
"pygame.display.init",
"pygame.event.get",
"pygame.display.set_mode",
"pygame.display.flip",
"derpapi.collision",
"pygame.Surface",
"time.time",
"time.sleep",
"pygame.time.Clock",
"pygame.display.quit",
"derpapi.store",
"p... | [((43, 70), 'sys.path.append', 'sys.path.append', (['"""..\\\\..\\\\"""'], {}), "('..\\\\..\\\\')\n", (58, 70), False, 'import sys, os, random, time\n'), ((3929, 3950), 'pygame.display.quit', 'pygame.display.quit', ([], {}), '()\n', (3948, 3950), False, 'import pygame\n'), ((143, 156), 'pygame.init', 'pygame.init', ([], {}), '()\n', (154, 156), False, 'import pygame\n'), ((274, 301), 'derpapi.retrieve', 'derpapi.retrieve', (['"""HISCORE"""'], {}), "('HISCORE')\n", (290, 301), False, 'import derpapi\n'), ((348, 399), 'pygame.display.set_mode', 'pygame.display.set_mode', (['[480, 320]', 'pygame.NOFRAME'], {}), '([480, 320], pygame.NOFRAME)\n', (371, 399), False, 'import pygame\n'), ((806, 836), 'pygame.image.load', 'pygame.image.load', (['"""pause.png"""'], {}), "('pause.png')\n", (823, 836), False, 'import pygame\n'), ((3864, 3885), 'pygame.display.flip', 'pygame.display.flip', ([], {}), '()\n', (3883, 3885), False, 'import pygame\n'), ((3890, 3903), 'time.sleep', 'time.sleep', (['(4)'], {}), '(4)\n', (3900, 3903), False, 'import sys, os, random, time\n'), ((170, 191), 'pygame.display.init', 'pygame.display.init', ([], {}), '()\n', (189, 191), False, 'import pygame\n'), ((454, 499), 'pygame.font.Font', 'pygame.font.Font', (['"""..\\\\..\\\\font_main.otf"""', '(32)'], {}), "('..\\\\..\\\\font_main.otf', 32)\n", (470, 499), False, 'import pygame\n'), ((641, 671), 'random.choice', 'random.choice', (['[-1 * spd, spd]'], {}), '([-1 * spd, spd])\n', (654, 671), False, 'import sys, os, random, time\n'), ((673, 703), 'random.choice', 'random.choice', (['[-1 * spd, spd]'], {}), '([-1 * spd, spd])\n', (686, 703), False, 'import sys, os, random, time\n'), ((929, 947), 'pygame.event.get', 'pygame.event.get', ([], {}), '()\n', (945, 947), False, 'import pygame\n'), ((3751, 3779), 'derpapi.store', 'derpapi.store', (['"""HISCORE"""', 'HS'], {}), "('HISCORE', HS)\n", (3764, 3779), False, 'import derpapi\n'), ((526, 563), 'pygame.font.Font', 'pygame.font.Font', (['"""font_main.otf"""', '(32)'], {}), "('font_main.otf', 32)\n", (542, 563), False, 'import pygame\n'), ((2049, 2070), 'pygame.display.flip', 'pygame.display.flip', ([], {}), '()\n', (2068, 2070), False, 'import pygame\n'), ((2533, 2554), 'pygame.display.flip', 'pygame.display.flip', ([], {}), '()\n', (2552, 2554), False, 'import pygame\n'), ((879, 898), 'pygame.time.Clock', 'pygame.time.Clock', ([], {}), '()\n', (896, 898), False, 'import pygame\n'), ((1494, 1543), 'derpapi.collision', 'derpapi.collision', (['(215, 10)', '(50, 50)', 'event.pos'], {}), '((215, 10), (50, 50), event.pos)\n', (1511, 1543), False, 'import derpapi\n'), ((1659, 1683), 'pygame.Surface', 'pygame.Surface', (['(40, 10)'], {}), '((40, 10))\n', (1673, 1683), False, 'import pygame\n'), ((2143, 2167), 'pygame.Surface', 'pygame.Surface', (['(40, 10)'], {}), '((40, 10))\n', (2157, 2167), False, 'import pygame\n'), ((2766, 2830), 'derpapi.collision', 'derpapi.collision', (['(paddle_x, 300)', '(40, 10)', '(cp[0], cp[1] + 5)'], {}), '((paddle_x, 300), (40, 10), (cp[0], cp[1] + 5))\n', (2783, 2830), False, 'import derpapi\n'), ((1312, 1333), 'pygame.display.quit', 'pygame.display.quit', ([], {}), '()\n', (1331, 1333), False, 'import pygame\n'), ((1350, 1360), 'sys.exit', 'sys.exit', ([], {}), '()\n', (1358, 1360), False, 'import sys, os, random, time\n'), ((2950, 2961), 'time.time', 'time.time', ([], {}), '()\n', (2959, 2961), False, 'import sys, os, random, time\n'), ((3061, 3072), 'time.time', 'time.time', ([], {}), '()\n', (3070, 3072), False, 'import sys, os, random, time\n'), ((2841, 2852), 'time.time', 'time.time', ([], {}), '()\n', (2850, 2852), False, 'import sys, os, random, time\n'), ((3017, 3028), 'time.time', 'time.time', ([], {}), '()\n', (3026, 3028), False, 'import sys, os, random, time\n'), ((2984, 2995), 'time.time', 'time.time', ([], {}), '()\n', (2993, 2995), False, 'import sys, os, random, time\n')] |
from datetime import date
print('{} DESAFIO 32 {}'.format('='*10, '='*10))
ano = int(input('Digite um ano qualquer (Ou 0 se quiser a análise do ano atual): '))
if ano == 0:
ano = date.today().year
if ano % 4 == 0 and ano != 100 or ano % 400 == 0:
print('O ano {} é BISSEXTO!'.format(ano))
else:
print('O ano {} NÃO é BISSEXTO!'.format(ano)) | [
"datetime.date.today"
] | [((183, 195), 'datetime.date.today', 'date.today', ([], {}), '()\n', (193, 195), False, 'from datetime import date\n')] |
import re
import platform
import os
import time
import json
import datetime
from .website import websiteText
from .UpdateJson import UpdateJsonLanguage
def EnglishFunction(Balthazar, text, languages):
websiteText(Balthazar, text, languages)
if "hello" in text:
Balthazar.speak("hello, how are you?", languages)
elif "goodbye" in text:
Balthazar.speak("Goodbye Master", languages)
return 84
if "who are you" in text:
Balthazar.speak("I am Balthazar, a voice assistance software created by <NAME>", languages)
elif "language" in text:
if "French" in text:
languages = 1
UpdateJsonLanguage(languages)
Balthazar.speak("J'ai changer la langue en Français", languages)
else:
languages = 0
UpdateJsonLanguage(languages)
Balthazar.speak("I change the language in English", languages)
elif "what is your name" in text:
Balthazar.speak("My name is Balthazar", languages)
elif 'play music' in text:
music_dir = './' #Your Path
songs = os.listdir(music_dir)
print(songs)
os.startfile(os.path.join(music_dir, songs[0]))
elif 'time' in text:
strTime = datetime.datetime.now().strftime("%H:%M:%S")
Balthazar.speak(f"Sir, the time is {strTime}", languages)
else:
Balthazar.speak("Sorry, i don't understand my master", languages)
return languages | [
"datetime.datetime.now",
"os.listdir",
"os.path.join"
] | [((1104, 1125), 'os.listdir', 'os.listdir', (['music_dir'], {}), '(music_dir)\n', (1114, 1125), False, 'import os\n'), ((1172, 1205), 'os.path.join', 'os.path.join', (['music_dir', 'songs[0]'], {}), '(music_dir, songs[0])\n', (1184, 1205), False, 'import os\n'), ((1251, 1274), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (1272, 1274), False, 'import datetime\n')] |
import numpy as np
from math import sqrt
from numba import njit, prange, jitclass
from src.Common import ParticleType
from typing import Tuple
@jitclass([])
class TimeStep:
def __init__(self):
pass
def compute(self, J: int, pA: np.array, gamma_c: float = 0.25, gamma_f: float = 0.25) -> Tuple[float, float, float]:
"""
Computes the minimum time-step
Parameters
----------
J: int
length of the particle array
pA: np.array
Particle array, should have particle_dtype as dtype
gamma_c: float
cfl factor for the courant condition, default 0.4
gamma_f: float
cfl factor for the force condition, default 0.25
Returns
-------
Minimum time-step
Time-step based on courant condition
Time-step based on force condition
"""
min_h, max_c, max_a2 = self.computeVars(J, pA)
c = self.courant(gamma_c, min_h, max_c)
f = self.force(gamma_f, min_h, max_a2)
return min(c, f), c, f
def courant(self, cfl, h_min, c_max) -> float:
""" Timestep due to courant condition. """
return cfl * h_min / c_max
def force(self, cfl, min_h, max_a) -> float:
""" Time-step due to force. """
if max_a < 1e-12:
return 1e10
else:
return cfl * sqrt(min_h / max_a)
def computeVars(self, J: int, pA: np.array):
"""
Computes the minimum h, maximum speed of sound, and maximum acceleration for a given particle array.
Parameters
----------
J: int
Length of particle array
pA: np.array
Particle array
Returns
-------
a tuple with
minimum h, maximum speed of sound, and maximum acceleration
"""
h = []; c = []; a2 = []
# Outside of compute loop so prange can be used.
for j in prange(J):
if pA[j]['label'] == ParticleType.Fluid:
h.append(pA[j]['h'])
c.append(pA[j]['c'])
a2.append(pA[j]['ax'] * pA[j]['ax'] + pA[j]['ay'] * pA[j]['ay'])
# Find the maximum this can not be done parallel.
min_h = np.min(np.array(h))
max_c = np.max(np.array(c))
max_a2 = np.max(np.array(a2))
return min_h, max_c, max_a2 | [
"numba.prange",
"numpy.array",
"math.sqrt",
"numba.jitclass"
] | [((145, 157), 'numba.jitclass', 'jitclass', (['[]'], {}), '([])\n', (153, 157), False, 'from numba import njit, prange, jitclass\n'), ((2085, 2094), 'numba.prange', 'prange', (['J'], {}), '(J)\n', (2091, 2094), False, 'from numba import njit, prange, jitclass\n'), ((2387, 2398), 'numpy.array', 'np.array', (['h'], {}), '(h)\n', (2395, 2398), True, 'import numpy as np\n'), ((2424, 2435), 'numpy.array', 'np.array', (['c'], {}), '(c)\n', (2432, 2435), True, 'import numpy as np\n'), ((2461, 2473), 'numpy.array', 'np.array', (['a2'], {}), '(a2)\n', (2469, 2473), True, 'import numpy as np\n'), ((1463, 1482), 'math.sqrt', 'sqrt', (['(min_h / max_a)'], {}), '(min_h / max_a)\n', (1467, 1482), False, 'from math import sqrt\n')] |
from django.urls import path
from bookbar.auth_app.views import UserRegistrationView, UserLoginView, logout_user, ChangeUserPasswordView, \
ChangeUserEmailView
urlpatterns = (
path('register/', UserRegistrationView.as_view(), name='register user'),
path('login/', UserLoginView.as_view(), name='login user'),
path('logout/', logout_user, name='logout user'),
path('change-email/<int:pk>/', ChangeUserEmailView.as_view(), name='change email'),
path('change-password/', ChangeUserPasswordView.as_view(), name='change password'),
)
| [
"bookbar.auth_app.views.ChangeUserEmailView.as_view",
"bookbar.auth_app.views.UserRegistrationView.as_view",
"bookbar.auth_app.views.UserLoginView.as_view",
"django.urls.path",
"bookbar.auth_app.views.ChangeUserPasswordView.as_view"
] | [((327, 375), 'django.urls.path', 'path', (['"""logout/"""', 'logout_user'], {'name': '"""logout user"""'}), "('logout/', logout_user, name='logout user')\n", (331, 375), False, 'from django.urls import path\n'), ((204, 234), 'bookbar.auth_app.views.UserRegistrationView.as_view', 'UserRegistrationView.as_view', ([], {}), '()\n', (232, 234), False, 'from bookbar.auth_app.views import UserRegistrationView, UserLoginView, logout_user, ChangeUserPasswordView, ChangeUserEmailView\n'), ((278, 301), 'bookbar.auth_app.views.UserLoginView.as_view', 'UserLoginView.as_view', ([], {}), '()\n', (299, 301), False, 'from bookbar.auth_app.views import UserRegistrationView, UserLoginView, logout_user, ChangeUserPasswordView, ChangeUserEmailView\n'), ((412, 441), 'bookbar.auth_app.views.ChangeUserEmailView.as_view', 'ChangeUserEmailView.as_view', ([], {}), '()\n', (439, 441), False, 'from bookbar.auth_app.views import UserRegistrationView, UserLoginView, logout_user, ChangeUserPasswordView, ChangeUserEmailView\n'), ((494, 526), 'bookbar.auth_app.views.ChangeUserPasswordView.as_view', 'ChangeUserPasswordView.as_view', ([], {}), '()\n', (524, 526), False, 'from bookbar.auth_app.views import UserRegistrationView, UserLoginView, logout_user, ChangeUserPasswordView, ChangeUserEmailView\n')] |
import iraf
no = iraf.no
yes = iraf.yes
from axe import axesrc
# Point to default parameter file for task
_parfile = 'axe$fcubeprep.par'
_taskname = 'fcubeprep'
######
# Set up Python IRAF interface here
######
def fcubeprep_iraf(grism_image,
segm_image,
filter_info,
AB_zero,
dim_info,
interpol,
useMdriz):
# properly format the strings
grism_image = axesrc.straighten_string(grism_image)
segm_image = axesrc.straighten_string(segm_image)
filter_info = axesrc.straighten_string(filter_info)
dim_info = axesrc.straighten_string(dim_info)
# transform the IF booleans to python
if AB_zero == yes:
AB_zero = True
else:
AB_zero = False
if useMdriz == no:
useMdriz = False
else:
useMdriz = True
# check whether something should be done
if grism_image != None and segm_image != None and filter_info != None:
# call the main function
axesrc.fcubeprep(grism_image=grism_image,
segm_image=segm_image,
filter_info=filter_info,
AB_zero=AB_zero,
dim_info=dim_info,
interpol=interpol,
useMdriz=useMdriz)
else:
# print the help
iraf.help(_taskname)
parfile = iraf.osfn(_parfile)
multid = iraf.IrafTaskFactory(taskname=_taskname, value=parfile,
pkgname=PkgName, pkgbinary=PkgBinary,
function=fcubeprep_iraf)
| [
"iraf.IrafTaskFactory",
"axe.axesrc.fcubeprep",
"iraf.help",
"iraf.osfn",
"axe.axesrc.straighten_string"
] | [((1449, 1468), 'iraf.osfn', 'iraf.osfn', (['_parfile'], {}), '(_parfile)\n', (1458, 1468), False, 'import iraf\n'), ((1478, 1600), 'iraf.IrafTaskFactory', 'iraf.IrafTaskFactory', ([], {'taskname': '_taskname', 'value': 'parfile', 'pkgname': 'PkgName', 'pkgbinary': 'PkgBinary', 'function': 'fcubeprep_iraf'}), '(taskname=_taskname, value=parfile, pkgname=PkgName,\n pkgbinary=PkgBinary, function=fcubeprep_iraf)\n', (1498, 1600), False, 'import iraf\n'), ((480, 517), 'axe.axesrc.straighten_string', 'axesrc.straighten_string', (['grism_image'], {}), '(grism_image)\n', (504, 517), False, 'from axe import axesrc\n'), ((536, 572), 'axe.axesrc.straighten_string', 'axesrc.straighten_string', (['segm_image'], {}), '(segm_image)\n', (560, 572), False, 'from axe import axesrc\n'), ((591, 628), 'axe.axesrc.straighten_string', 'axesrc.straighten_string', (['filter_info'], {}), '(filter_info)\n', (615, 628), False, 'from axe import axesrc\n'), ((647, 681), 'axe.axesrc.straighten_string', 'axesrc.straighten_string', (['dim_info'], {}), '(dim_info)\n', (671, 681), False, 'from axe import axesrc\n'), ((1058, 1230), 'axe.axesrc.fcubeprep', 'axesrc.fcubeprep', ([], {'grism_image': 'grism_image', 'segm_image': 'segm_image', 'filter_info': 'filter_info', 'AB_zero': 'AB_zero', 'dim_info': 'dim_info', 'interpol': 'interpol', 'useMdriz': 'useMdriz'}), '(grism_image=grism_image, segm_image=segm_image,\n filter_info=filter_info, AB_zero=AB_zero, dim_info=dim_info, interpol=\n interpol, useMdriz=useMdriz)\n', (1074, 1230), False, 'from axe import axesrc\n'), ((1416, 1436), 'iraf.help', 'iraf.help', (['_taskname'], {}), '(_taskname)\n', (1425, 1436), False, 'import iraf\n')] |
# Generated by Django 2.1.8 on 2019-04-28 10:47
from django.db import migrations, models
import mainApp.models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='ProblemPost',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=100)),
('body', models.TextField(max_length=100000)),
('example_in', models.TextField(max_length=1000)),
('example_out', models.TextField(max_length=1000)),
('time_limit', models.PositiveSmallIntegerField(validators=[mainApp.models.isValidTimeLimit])),
('created_date', models.DateTimeField(auto_now_add=True)),
],
),
]
| [
"django.db.models.TextField",
"django.db.models.AutoField",
"django.db.models.DateTimeField",
"django.db.models.PositiveSmallIntegerField",
"django.db.models.CharField"
] | [((329, 422), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (345, 422), False, 'from django.db import migrations, models\n'), ((447, 479), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (463, 479), False, 'from django.db import migrations, models\n'), ((507, 542), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': '(100000)'}), '(max_length=100000)\n', (523, 542), False, 'from django.db import migrations, models\n'), ((576, 609), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': '(1000)'}), '(max_length=1000)\n', (592, 609), False, 'from django.db import migrations, models\n'), ((644, 677), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': '(1000)'}), '(max_length=1000)\n', (660, 677), False, 'from django.db import migrations, models\n'), ((711, 789), 'django.db.models.PositiveSmallIntegerField', 'models.PositiveSmallIntegerField', ([], {'validators': '[mainApp.models.isValidTimeLimit]'}), '(validators=[mainApp.models.isValidTimeLimit])\n', (743, 789), False, 'from django.db import migrations, models\n'), ((825, 864), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (845, 864), False, 'from django.db import migrations, models\n')] |
from setuptools import setup, find_packages
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='robotframework-snowlibrary',
version='1.4.0',
url='',
license='Apache License 2.0',
author='<NAME>',
author_email='<EMAIL>',
long_description=long_description,
package_dir={'': 'src'},
packages=find_packages('src', exclude=['contrib', 'docs', 'tests']),
install_requires=['requests', 'bs4','robotframework', 'docutils',
'robotremoteserver', 'pysnow', 'robotframework-sshlibrary', 'rstr'],
description='A Robot Framework Library with keywords for testing ServiceNow.',
# Test requirements
tests_require=['pytest'],
)
| [
"os.path.dirname",
"setuptools.find_packages",
"os.path.join"
] | [((109, 131), 'os.path.dirname', 'path.dirname', (['__file__'], {}), '(__file__)\n', (121, 131), False, 'from os import path\n'), ((144, 173), 'os.path.join', 'path.join', (['here', '"""README.rst"""'], {}), "(here, 'README.rst')\n", (153, 173), False, 'from os import path\n'), ((475, 533), 'setuptools.find_packages', 'find_packages', (['"""src"""'], {'exclude': "['contrib', 'docs', 'tests']"}), "('src', exclude=['contrib', 'docs', 'tests'])\n", (488, 533), False, 'from setuptools import setup, find_packages\n')] |
import numpy as np
import tensorflow as tf
from pytorch2onnx import AntiSpoofPredict
import cv2
import torch
import time
#load pytorch
device_id = 0
model_path = "./resources/anti_spoof_models/2020-09-28-13-11_Anti_Spoofing_1.2_112x112_model_iter-150.pth"
anti_model = AntiSpoofPredict(device_id, model_path)
dummy_img = cv2.imread("./datasets/RGB_Images/1.2_112x112/test_caffee_model/0/1599816416115_69.png")
dummy_output = anti_model.predict(dummy_img)
print("dummy_output_pytorch", dummy_output)
inputx = anti_model.transform_input(dummy_img)
inputx = inputx.permute(0, 2, 3, 1).numpy()
print(inputx.shape)
# Load TFLite model and allocate tensors.
interpreter = tf.lite.Interpreter(model_path="model.tflite")
interpreter.allocate_tensors()
# Get input and output tensors.
input_details = interpreter.get_input_details()
output_details = interpreter.get_output_details()
# Test model on random input data.
input_shape = input_details[0]['shape']
print(input_details)
input_data = np.array(np.random.random_sample(input_shape), dtype=np.float32)
print(input_data.shape)
# input_data = dummy_input.numpy()
interpreter.set_tensor(input_details[0]['index'], inputx)
# start_time = mills()
interpreter.invoke()
start_time = time.time()
# The function `get_tensor()` returns a copy of the tensor data.
# Use `tensor()` in order to get a pointer to the tensor.
output_data = interpreter.get_tensor(output_details[0]['index'])
end_time = time.time()
# end_time = mills()
print("Time taken to one inference in milliseconds", end_time - start_time)
print("output of model",output_data) | [
"tensorflow.lite.Interpreter",
"numpy.random.random_sample",
"time.time",
"cv2.imread",
"pytorch2onnx.AntiSpoofPredict"
] | [((270, 309), 'pytorch2onnx.AntiSpoofPredict', 'AntiSpoofPredict', (['device_id', 'model_path'], {}), '(device_id, model_path)\n', (286, 309), False, 'from pytorch2onnx import AntiSpoofPredict\n'), ((323, 421), 'cv2.imread', 'cv2.imread', (['"""./datasets/RGB_Images/1.2_112x112/test_caffee_model/0/1599816416115_69.png"""'], {}), "(\n './datasets/RGB_Images/1.2_112x112/test_caffee_model/0/1599816416115_69.png'\n )\n", (333, 421), False, 'import cv2\n'), ((671, 717), 'tensorflow.lite.Interpreter', 'tf.lite.Interpreter', ([], {'model_path': '"""model.tflite"""'}), "(model_path='model.tflite')\n", (690, 717), True, 'import tensorflow as tf\n'), ((1230, 1241), 'time.time', 'time.time', ([], {}), '()\n', (1239, 1241), False, 'import time\n'), ((1441, 1452), 'time.time', 'time.time', ([], {}), '()\n', (1450, 1452), False, 'import time\n'), ((999, 1035), 'numpy.random.random_sample', 'np.random.random_sample', (['input_shape'], {}), '(input_shape)\n', (1022, 1035), True, 'import numpy as np\n')] |
import os
import numpy as np
def read_data(input_path, english_only, english_indices=None):
"""Returns the list of signatures, labels, and ids. These can have English-only signatures if specified."""
if english_indices is None:
english_indices = []
sig_list = []
lab_list = [] # Genuine = 0, Forged = 1
id_list = []
if english_only:
input_files = [i for i in sorted(os.listdir(input_path)) if int(i.split('S')[0].replace('U','')) in english_indices]
else:
input_files = sorted(os.listdir(input_path))
for file in input_files:
data = np.genfromtxt(input_path + file, skip_header=1)
data = np.delete(data, 2, axis=1) # drop the timestamp column
try:
sig_list.append(data)
id_list.append(file.split('.')[0])
if int(file.split('S')[1].replace('.TXT', '')) < 21:
lab_list.append(0) # Genuine
else:
lab_list.append(1) # Forged
except:
print(file)
return sig_list, lab_list, id_list
def normalize_data(data, skipcols=None):
"""Normalizes the data so that all features are in the range [0,1]"""
rows = 0
for i in range(len(data)):
rows += data[i].shape[0]
rows2 = 0
data_all = np.empty((rows, data[0].shape[1]))
for i in range(len(data)):
data_all[rows2:rows2+data[i].shape[0], :] = data[i]
rows2 += data[i].shape[0]
data_norm = []
for i in range(len(data)):
data_norm_sig = np.zeros((data[i].shape[0], data[i].shape[1]))
for f in range(data[i].shape[1]):
if f in skipcols:
data_norm_sig[:, f] = data[i][:, f]
else:
data_norm_sig[:, f] = (data[i][:, f] - data_all[:, f].min()) / data_all[:, f].ptp()
data_norm.append(data_norm_sig)
return data_norm
def merge_timesteps(x, timesteps_to_merge):
"""Combines multiple timesteps of raw signature data into a single timestep.
E.g., if timesteps_to_merge is 3, then each 3 rows will now be concatenated into 1,
meaning there will be 3*num_features in the row."""
x_merged = []
for x_i in x:
x_i_merged = []
start_index = 0
end_index = timesteps_to_merge
while end_index < x_i.shape[0]:
x_i_merged.append(np.concatenate(x_i[start_index:end_index]))
start_index = start_index + timesteps_to_merge
end_index = end_index + timesteps_to_merge
# Accounting for the very last part of the sequence
x_i_merged.append(np.concatenate(x_i[x_i.shape[0] - timesteps_to_merge:x_i.shape[0], :]))
x_merged.append(np.array(x_i_merged))
return x_merged
def split_sequences(x, y, names, window_length, window_stride):
"""Splits all input sequences into subsequences by moving along a window of given window_length and stride."""
split_x = []
split_y = []
split_ids = []
for (x_i, y_i, id_i) in zip(x, y, names):
start_index = 0
end_index = window_length
while end_index < x_i.shape[0]:
split_x.append(x_i[start_index:end_index, :])
split_y.append(y_i)
split_ids.append(id_i)
start_index = start_index + window_stride
end_index = end_index + window_stride
# Accounting for the very last part of the sequence
split_x.append(x_i[x_i.shape[0]-window_length:x_i.shape[0], :])
split_y.append(y_i)
split_ids.append(id_i)
return np.array(split_x), np.array(split_y), np.array(split_ids)
def split_train_test(x, y, names, train_percentage=0.75):
"""Randomly splitting the data to train and test sets (by IDs) using the given percentage split."""
np.random.seed(0) # Setting this for reproducible results
subjects = [i.split('S')[0] for i in names]
unique_subjects = list(set(subjects))
train_subjects = np.random.choice(unique_subjects, size=int(len(unique_subjects)*train_percentage), replace=False)
train_indices = [i for i, e in enumerate(subjects) if e in train_subjects]
test_indices = [i for i, e in enumerate(subjects) if e not in train_subjects]
x_train = x[train_indices]
y_train = y[train_indices]
x_test = x[test_indices]
y_test = y[test_indices]
return x_train, x_test, y_train, y_test
if __name__ == "__main__":
INPUT_PATH = '../datasets/Task2/'
OUTPUT_PATH = '../datasets/processed'
eng_indices = [2, 4, 6, 8, 10, 12, 13, 15, 18, 20, 22, 24, 25, 28, 30, 32, 33, 34, 35, 40] # English signature numbers
eng_only = True # Whether to only consider English or English and Chinese signatures
stride = 20 # How far to move the window for creating fixed-length subsequences with each signature
length = 25 # How big each window is for the fixed-length sequences
merge_num = 3 # How many rows to concatenate into a single row -- see function for more details
train_test_split = 0.75 # This is how much of the data will be used for TRAINING, the rest is for testing (split by ID)
normalize = True # Whether you want to normalize the data or not
signatures, labels, ids = read_data(INPUT_PATH, english_only=eng_only, english_indices=eng_indices)
if normalize:
signatures_normalized = normalize_data(signatures, skipcols=[2])
signatures_merged = merge_timesteps(x=signatures_normalized, timesteps_to_merge=merge_num)
else:
signatures_merged = merge_timesteps(x=signatures, timesteps_to_merge=merge_num)
signatures_subsequences, labels_subsequences, ids_subsequences = split_sequences(x=signatures_merged, y=labels, names=ids, window_length=length, window_stride=stride)
signatures_train, signatures_test, labels_train, labels_test = split_train_test(x=signatures_subsequences, y=labels_subsequences, names=ids_subsequences, train_percentage=0.75)
| [
"os.listdir",
"numpy.delete",
"numpy.array",
"numpy.zeros",
"numpy.empty",
"numpy.random.seed",
"numpy.concatenate",
"numpy.genfromtxt"
] | [((1179, 1213), 'numpy.empty', 'np.empty', (['(rows, data[0].shape[1])'], {}), '((rows, data[0].shape[1]))\n', (1187, 1213), True, 'import numpy as np\n'), ((3460, 3477), 'numpy.random.seed', 'np.random.seed', (['(0)'], {}), '(0)\n', (3474, 3477), True, 'import numpy as np\n'), ((570, 617), 'numpy.genfromtxt', 'np.genfromtxt', (['(input_path + file)'], {'skip_header': '(1)'}), '(input_path + file, skip_header=1)\n', (583, 617), True, 'import numpy as np\n'), ((628, 654), 'numpy.delete', 'np.delete', (['data', '(2)'], {'axis': '(1)'}), '(data, 2, axis=1)\n', (637, 654), True, 'import numpy as np\n'), ((1403, 1449), 'numpy.zeros', 'np.zeros', (['(data[i].shape[0], data[i].shape[1])'], {}), '((data[i].shape[0], data[i].shape[1]))\n', (1411, 1449), True, 'import numpy as np\n'), ((3235, 3252), 'numpy.array', 'np.array', (['split_x'], {}), '(split_x)\n', (3243, 3252), True, 'import numpy as np\n'), ((3254, 3271), 'numpy.array', 'np.array', (['split_y'], {}), '(split_y)\n', (3262, 3271), True, 'import numpy as np\n'), ((3273, 3292), 'numpy.array', 'np.array', (['split_ids'], {}), '(split_ids)\n', (3281, 3292), True, 'import numpy as np\n'), ((507, 529), 'os.listdir', 'os.listdir', (['input_path'], {}), '(input_path)\n', (517, 529), False, 'import os\n'), ((2373, 2443), 'numpy.concatenate', 'np.concatenate', (['x_i[x_i.shape[0] - timesteps_to_merge:x_i.shape[0], :]'], {}), '(x_i[x_i.shape[0] - timesteps_to_merge:x_i.shape[0], :])\n', (2387, 2443), True, 'import numpy as np\n'), ((2464, 2484), 'numpy.array', 'np.array', (['x_i_merged'], {}), '(x_i_merged)\n', (2472, 2484), True, 'import numpy as np\n'), ((2153, 2195), 'numpy.concatenate', 'np.concatenate', (['x_i[start_index:end_index]'], {}), '(x_i[start_index:end_index])\n', (2167, 2195), True, 'import numpy as np\n'), ((391, 413), 'os.listdir', 'os.listdir', (['input_path'], {}), '(input_path)\n', (401, 413), False, 'import os\n')] |
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: v3/asset/ip/v4/geolocation/geolocation.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='v3/asset/ip/v4/geolocation/geolocation.proto',
package='v3.asset.ip.v4.geolocation',
syntax='proto2',
serialized_options=None,
serialized_pb=_b('\n,v3/asset/ip/v4/geolocation/geolocation.proto\x12\x1av3.asset.ip.v4.geolocation\"\x94\x02\n\x07Message\x12\x0c\n\x04host\x18\x01 \x02(\t\x12\x0c\n\x04mask\x18\x02 \x01(\t\x12\x12\n\x07version\x18\x03 \x02(\x05:\x01\x34\x12\x11\n\tcontinent\x18\x05 \x01(\t\x12\x16\n\x0e\x63ontinent_code\x18\x06 \x01(\t\x12\x0f\n\x07\x63ountry\x18\x07 \x01(\t\x12\x14\n\x0c\x63ountry_code\x18\x08 \x01(\t\x12\x0e\n\x06region\x18\t \x01(\t\x12\x13\n\x0bregion_name\x18\n \x01(\t\x12\x0c\n\x04\x63ity\x18\x0b \x01(\t\x12\x0b\n\x03zip\x18\x0c \x01(\t\x12\x10\n\x08latitude\x18\r \x01(\x02\x12\x11\n\tlongitude\x18\x0e \x01(\x02\x12\x10\n\x08timezone\x18\x0f \x01(\t\x12\x10\n\x08\x64istrict\x18\x10 \x01(\t')
)
_MESSAGE = _descriptor.Descriptor(
name='Message',
full_name='v3.asset.ip.v4.geolocation.Message',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='host', full_name='v3.asset.ip.v4.geolocation.Message.host', index=0,
number=1, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='mask', full_name='v3.asset.ip.v4.geolocation.Message.mask', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='version', full_name='v3.asset.ip.v4.geolocation.Message.version', index=2,
number=3, type=5, cpp_type=1, label=2,
has_default_value=True, default_value=4,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='continent', full_name='v3.asset.ip.v4.geolocation.Message.continent', index=3,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='continent_code', full_name='v3.asset.ip.v4.geolocation.Message.continent_code', index=4,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='country', full_name='v3.asset.ip.v4.geolocation.Message.country', index=5,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='country_code', full_name='v3.asset.ip.v4.geolocation.Message.country_code', index=6,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='region', full_name='v3.asset.ip.v4.geolocation.Message.region', index=7,
number=9, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='region_name', full_name='v3.asset.ip.v4.geolocation.Message.region_name', index=8,
number=10, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='city', full_name='v3.asset.ip.v4.geolocation.Message.city', index=9,
number=11, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='zip', full_name='v3.asset.ip.v4.geolocation.Message.zip', index=10,
number=12, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='latitude', full_name='v3.asset.ip.v4.geolocation.Message.latitude', index=11,
number=13, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='longitude', full_name='v3.asset.ip.v4.geolocation.Message.longitude', index=12,
number=14, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='timezone', full_name='v3.asset.ip.v4.geolocation.Message.timezone', index=13,
number=15, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='district', full_name='v3.asset.ip.v4.geolocation.Message.district', index=14,
number=16, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=77,
serialized_end=353,
)
DESCRIPTOR.message_types_by_name['Message'] = _MESSAGE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
Message = _reflection.GeneratedProtocolMessageType('Message', (_message.Message,), dict(
DESCRIPTOR = _MESSAGE,
__module__ = 'v3.asset.ip.v4.geolocation.geolocation_pb2'
# @@protoc_insertion_point(class_scope:v3.asset.ip.v4.geolocation.Message)
))
_sym_db.RegisterMessage(Message)
# @@protoc_insertion_point(module_scope)
| [
"google.protobuf.symbol_database.Default",
"google.protobuf.descriptor.FieldDescriptor"
] | [((468, 494), 'google.protobuf.symbol_database.Default', '_symbol_database.Default', ([], {}), '()\n', (492, 494), True, 'from google.protobuf import symbol_database as _symbol_database\n'), ((2352, 2699), 'google.protobuf.descriptor.FieldDescriptor', '_descriptor.FieldDescriptor', ([], {'name': '"""version"""', 'full_name': '"""v3.asset.ip.v4.geolocation.Message.version"""', 'index': '(2)', 'number': '(3)', 'type': '(5)', 'cpp_type': '(1)', 'label': '(2)', 'has_default_value': '(True)', 'default_value': '(4)', 'message_type': 'None', 'enum_type': 'None', 'containing_type': 'None', 'is_extension': '(False)', 'extension_scope': 'None', 'serialized_options': 'None', 'file': 'DESCRIPTOR'}), "(name='version', full_name=\n 'v3.asset.ip.v4.geolocation.Message.version', index=2, number=3, type=5,\n cpp_type=1, label=2, has_default_value=True, default_value=4,\n message_type=None, enum_type=None, containing_type=None, is_extension=\n False, extension_scope=None, serialized_options=None, file=DESCRIPTOR)\n", (2379, 2699), True, 'from google.protobuf import descriptor as _descriptor\n')] |
'''
a simple script for generating uniform randoms and
jackknife regions assuming the KiDS-1000 mask
'''
import fitsio
import numpy as np
import healpy as hp
import kmeans_radec
from kmeans_radec import KMeans, kmeans_sample
from astropy.table import Table
lens = fitsio.read("lens.fits", columns = ["ra_gal", "dec_gal", "observed_redshift_gal"])
ra, dec = lens["ra_gal"], lens["dec_gal"]
ra_min, dec_min, ra_max, dec_max = 0, 0, 90, 90
Nr= 50000000
ran_ra = np.random.uniform(0,360,Nr)
ran_dec = np.degrees(np.arcsin(np.random.uniform(-1,1,Nr)))
ran_mask = (ran_ra > ra_min)&(ran_ra < ra_max)&(ran_dec > dec_min)&(ran_dec < dec_max)
ran_ra, ran_dec = ran_ra[ran_mask], ran_dec[ran_mask]
randoms = {'ra': ran_ra,
'dec': ran_dec}
coord = np.vstack([randoms['ra'], randoms['dec']]).T
ncen = 100
km = kmeans_sample(coord, ncen, maxiter=30, tol=1.0e-4)
labels = km.find_nearest(coord)
table = Table([coord[:,0], coord[:,1], labels], names=('RA', 'DEC', 'JK_LABEL'))
table.write('flagship_randoms_v2.fits', format='fits')
np.savetxt("flagship_jk_centers_v2.txt", km.centers)
| [
"astropy.table.Table",
"fitsio.read",
"numpy.vstack",
"numpy.savetxt",
"numpy.random.uniform",
"kmeans_radec.kmeans_sample"
] | [((266, 351), 'fitsio.read', 'fitsio.read', (['"""lens.fits"""'], {'columns': "['ra_gal', 'dec_gal', 'observed_redshift_gal']"}), "('lens.fits', columns=['ra_gal', 'dec_gal', 'observed_redshift_gal']\n )\n", (277, 351), False, 'import fitsio\n'), ((464, 493), 'numpy.random.uniform', 'np.random.uniform', (['(0)', '(360)', 'Nr'], {}), '(0, 360, Nr)\n', (481, 493), True, 'import numpy as np\n'), ((817, 867), 'kmeans_radec.kmeans_sample', 'kmeans_sample', (['coord', 'ncen'], {'maxiter': '(30)', 'tol': '(0.0001)'}), '(coord, ncen, maxiter=30, tol=0.0001)\n', (830, 867), False, 'from kmeans_radec import KMeans, kmeans_sample\n'), ((910, 984), 'astropy.table.Table', 'Table', (['[coord[:, 0], coord[:, 1], labels]'], {'names': "('RA', 'DEC', 'JK_LABEL')"}), "([coord[:, 0], coord[:, 1], labels], names=('RA', 'DEC', 'JK_LABEL'))\n", (915, 984), False, 'from astropy.table import Table\n'), ((1038, 1090), 'numpy.savetxt', 'np.savetxt', (['"""flagship_jk_centers_v2.txt"""', 'km.centers'], {}), "('flagship_jk_centers_v2.txt', km.centers)\n", (1048, 1090), True, 'import numpy as np\n'), ((756, 798), 'numpy.vstack', 'np.vstack', (["[randoms['ra'], randoms['dec']]"], {}), "([randoms['ra'], randoms['dec']])\n", (765, 798), True, 'import numpy as np\n'), ((523, 551), 'numpy.random.uniform', 'np.random.uniform', (['(-1)', '(1)', 'Nr'], {}), '(-1, 1, Nr)\n', (540, 551), True, 'import numpy as np\n')] |
# Generated by Django 2.0.5 on 2018-05-12 23:42
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Board',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('fen', models.CharField(default='rnbqkbnr/pppppppp/8/8/8/8/PPPPPPPP/RNBQKBNR w KQkq - 0 1', max_length=256)),
('date_started', models.DateTimeField(auto_now_add=True)),
('date_ended', models.DateTimeField(null=True)),
('is_won', models.NullBooleanField()),
('created_by', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Move',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('to_position', models.CharField(max_length=5)),
('created', models.DateTimeField(auto_now_add=True)),
('board', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='moves', to='board.Board')),
],
),
]
| [
"django.db.models.ForeignKey",
"django.db.models.NullBooleanField",
"django.db.models.AutoField",
"django.db.models.DateTimeField",
"django.db.migrations.swappable_dependency",
"django.db.models.CharField"
] | [((247, 304), 'django.db.migrations.swappable_dependency', 'migrations.swappable_dependency', (['settings.AUTH_USER_MODEL'], {}), '(settings.AUTH_USER_MODEL)\n', (278, 304), False, 'from django.db import migrations, models\n'), ((434, 527), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (450, 527), False, 'from django.db import migrations, models\n'), ((550, 655), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""rnbqkbnr/pppppppp/8/8/8/8/PPPPPPPP/RNBQKBNR w KQkq - 0 1"""', 'max_length': '(256)'}), "(default=\n 'rnbqkbnr/pppppppp/8/8/8/8/PPPPPPPP/RNBQKBNR w KQkq - 0 1', max_length=256)\n", (566, 655), False, 'from django.db import migrations, models\n'), ((686, 725), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (706, 725), False, 'from django.db import migrations, models\n'), ((759, 790), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'null': '(True)'}), '(null=True)\n', (779, 790), False, 'from django.db import migrations, models\n'), ((820, 845), 'django.db.models.NullBooleanField', 'models.NullBooleanField', ([], {}), '()\n', (843, 845), False, 'from django.db import migrations, models\n'), ((879, 975), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': 'settings.AUTH_USER_MODEL'}), '(on_delete=django.db.models.deletion.CASCADE, to=settings.\n AUTH_USER_MODEL)\n', (896, 975), False, 'from django.db import migrations, models\n'), ((1100, 1193), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (1116, 1193), False, 'from django.db import migrations, models\n'), ((1224, 1254), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(5)'}), '(max_length=5)\n', (1240, 1254), False, 'from django.db import migrations, models\n'), ((1285, 1324), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (1305, 1324), False, 'from django.db import migrations, models\n'), ((1353, 1460), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""moves"""', 'to': '"""board.Board"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='moves', to='board.Board')\n", (1370, 1460), False, 'from django.db import migrations, models\n')] |
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from abc import ABC, abstractmethod
import logging
import sys
import openstack
import urllib3
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
LOG = logging.getLogger('openstack_exporter.exporter')
openstack.enable_logging(debug=False, http_debug=False, stream=sys.stdout)
class BaseCollector(ABC):
def __init__(self, openstack_config):
self.config = openstack_config
self.region = self.config['region']
self.client = self._connect()
def _connect(self):
"""Connect to the OpenStack Service."""
LOG.debug("Connecting to Openstack API {}".format(
self.config['auth_url']
))
conn = openstack.connect(
auth_url=self.config['auth_url'],
username=self.config['username'],
password=self.config['password'],
user_domain_name=self.config['user_domain_name'],
project_domain_name=self.config['project_domain_name'],
project_name=self.config['project_name'],
region_name=self.region,
app_name='Openstack prometheus exporter',
app_version='1.0'
)
LOG.debug("Connected to OpenStack {}".format(
self.config['auth_url']
))
return conn
@abstractmethod
def describe(self):
pass
@abstractmethod
def collect(self):
pass
| [
"logging.getLogger",
"urllib3.disable_warnings",
"openstack.connect",
"openstack.enable_logging"
] | [((693, 760), 'urllib3.disable_warnings', 'urllib3.disable_warnings', (['urllib3.exceptions.InsecureRequestWarning'], {}), '(urllib3.exceptions.InsecureRequestWarning)\n', (717, 760), False, 'import urllib3\n'), ((768, 816), 'logging.getLogger', 'logging.getLogger', (['"""openstack_exporter.exporter"""'], {}), "('openstack_exporter.exporter')\n", (785, 816), False, 'import logging\n'), ((817, 891), 'openstack.enable_logging', 'openstack.enable_logging', ([], {'debug': '(False)', 'http_debug': '(False)', 'stream': 'sys.stdout'}), '(debug=False, http_debug=False, stream=sys.stdout)\n', (841, 891), False, 'import openstack\n'), ((1279, 1655), 'openstack.connect', 'openstack.connect', ([], {'auth_url': "self.config['auth_url']", 'username': "self.config['username']", 'password': "self.config['password']", 'user_domain_name': "self.config['user_domain_name']", 'project_domain_name': "self.config['project_domain_name']", 'project_name': "self.config['project_name']", 'region_name': 'self.region', 'app_name': '"""Openstack prometheus exporter"""', 'app_version': '"""1.0"""'}), "(auth_url=self.config['auth_url'], username=self.config[\n 'username'], password=self.config['password'], user_domain_name=self.\n config['user_domain_name'], project_domain_name=self.config[\n 'project_domain_name'], project_name=self.config['project_name'],\n region_name=self.region, app_name='Openstack prometheus exporter',\n app_version='1.0')\n", (1296, 1655), False, 'import openstack\n')] |
# ----------------------------------------------------------------------------------
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ----------------------------------------------------------------------------------
# ExportGDBFieldsToFolder.py
# Description: Exports all fields and field properties from a Military Features GDB
# as a set of csvs to the folder selected
#-------------------------------------------------------------------------------
# Requires: ArcGIS Desktop/Pro, arcpy, Python 2 or 3
#-------------------------------------------------------------------------------
import csv
import os
import sys
import arcpy
def exportFields():
gdb = arcpy.GetParameter(0)
folder = arcpy.GetParameter(1)
if (gdb == '') or (gdb is None):
arcpy.AddError('Input GDB not provided')
gdb = 'C:/Github/military-features-data/data/mil2525d/core_data/gdbs/MilitaryOverlay.gdb'
# return
if (folder == '') or (folder is None):
arcpy.AddError('Output folder not provided')
folder = 'C:/TestData/ExportedFields'
# return
# Check Input
try :
desc = arcpy.Describe(gdb)
if desc == None :
arcpy.AddError('Could not open GDB: ' + str(gdb))
print('Exiting...')
return
except Exception as openEx :
arcpy.AddError('Could not open GDB: ' + str(gdb))
print('Exiting...')
return
# Set the workspace for ListFeatureClasses
arcpy.env.workspace = gdb
datasets = arcpy.ListDatasets()
if datasets is None:
arcpy.AddError("Failed to find any datasets")
for dataset in datasets:
# List feature classes from this dataset
# IMPORTANT: Assumes all feature classes will be within a dataset
featureClasses = arcpy.ListFeatureClasses(feature_dataset=dataset)
for featureClass in featureClasses :
csvFileName = 'Fields_' + str(featureClass) + '.csv'
arcpy.AddMessage('-------------------------------------------------')
arcpy.AddMessage('---> Exporting: ' + str(featureClass) + ' to CSV: ' + csvFileName)
arcpy.AddMessage('-------------------------------------------------')
# Create a csv file and writer to export the fields
csvFullFileName = os.path.join(str(folder), csvFileName)
if sys.version < '3' : # Python 2 or 3 check for csv difference
csvFile = open(csvFullFileName, 'wb', newline='')
else :
csvFile = open(csvFullFileName, 'w', newline='')
writer = csv.writer(csvFile, delimiter=',')
# Expected order/format:
# field_name,field_type,field_length,field_alias,nullability,field_domain
# Make header row:
writer.writerow(["field_name", "field_type", "field_length", "field_alias", "nullability", "field_domain"])
fields = arcpy.ListFields(featureClass)
for field in fields :
# Skip OID & Shape fields
if 'OBJECTID' in field.name or 'SHAPE' in field.name or 'Shape' in field.name :
print('Skipping field: ' + field.name)
continue
if field.isNullable :
nullability = 'NULLABLE'
else :
nullability = 'NON_NULLABLE'
row = [field.name, str(field.type), str(field.length), field.aliasName, \
nullability, field.domain]
arcpy.AddMessage('Field Name: ' + field.name + ', Properties= ' + str(row))
writer.writerow(row)
csvFile.close()
if __name__ == '__main__':
exportFields() | [
"arcpy.AddMessage",
"arcpy.Describe",
"csv.writer",
"arcpy.AddError",
"arcpy.ListFeatureClasses",
"arcpy.ListFields",
"arcpy.GetParameter",
"arcpy.ListDatasets"
] | [((1159, 1180), 'arcpy.GetParameter', 'arcpy.GetParameter', (['(0)'], {}), '(0)\n', (1177, 1180), False, 'import arcpy\n'), ((1191, 1212), 'arcpy.GetParameter', 'arcpy.GetParameter', (['(1)'], {}), '(1)\n', (1209, 1212), False, 'import arcpy\n'), ((1891, 1911), 'arcpy.ListDatasets', 'arcpy.ListDatasets', ([], {}), '()\n', (1909, 1911), False, 'import arcpy\n'), ((1250, 1290), 'arcpy.AddError', 'arcpy.AddError', (['"""Input GDB not provided"""'], {}), "('Input GDB not provided')\n", (1264, 1290), False, 'import arcpy\n'), ((1437, 1481), 'arcpy.AddError', 'arcpy.AddError', (['"""Output folder not provided"""'], {}), "('Output folder not provided')\n", (1451, 1481), False, 'import arcpy\n'), ((1566, 1585), 'arcpy.Describe', 'arcpy.Describe', (['gdb'], {}), '(gdb)\n', (1580, 1585), False, 'import arcpy\n'), ((1938, 1983), 'arcpy.AddError', 'arcpy.AddError', (['"""Failed to find any datasets"""'], {}), "('Failed to find any datasets')\n", (1952, 1983), False, 'import arcpy\n'), ((2144, 2193), 'arcpy.ListFeatureClasses', 'arcpy.ListFeatureClasses', ([], {'feature_dataset': 'dataset'}), '(feature_dataset=dataset)\n', (2168, 2193), False, 'import arcpy\n'), ((2296, 2365), 'arcpy.AddMessage', 'arcpy.AddMessage', (['"""-------------------------------------------------"""'], {}), "('-------------------------------------------------')\n", (2312, 2365), False, 'import arcpy\n'), ((2457, 2526), 'arcpy.AddMessage', 'arcpy.AddMessage', (['"""-------------------------------------------------"""'], {}), "('-------------------------------------------------')\n", (2473, 2526), False, 'import arcpy\n'), ((2846, 2880), 'csv.writer', 'csv.writer', (['csvFile'], {'delimiter': '""","""'}), "(csvFile, delimiter=',')\n", (2856, 2880), False, 'import csv\n'), ((3133, 3163), 'arcpy.ListFields', 'arcpy.ListFields', (['featureClass'], {}), '(featureClass)\n', (3149, 3163), False, 'import arcpy\n')] |
# -*- coding: utf-8 -*-
import click
import gitlab
from colored import fg, bg, attr
from core import gitlabwrapper
from config import Config
from configparser import ConfigParser
from core.utils import read_config, print_green, print_red, create_gl
g_namespace_lst = {}
from prettytable import PrettyTable
# def read_config(ctx, param, value):
# cfg = ConfigParser()
# cfg.read('gitlab.cfg')
# dict = {}
# for item in cfg.sections():
# dict.setdefault(item, {})
# for key, value in cfg[item].items():
# # print(key,value)
# dict[item].setdefault(key, value)
# return dict
@click.group(invoke_without_command=False)
@click.option('-c', '--config', callback=read_config, type=click.File('r'),
help='default gitlab.cfg')
@click.option('-s', '--section', type=str, default="global", help='use default global section')
@click.pass_context
def cli(ctx, **kwargs):
ctx.obj = {}
ctx.obj.update(kwargs)
if ctx.invoked_subcommand is None and not ctx.obj.get('testing_mode'):
ctx.invoke(all)
return ctx
@cli.command()
@click.pass_context
def namespace_list(ctx, **kwargs):
# gl = create_gl(ctx, **{"section": section})
gl = create_gl(ctx, **kwargs)
x = PrettyTable(['id', 'namespace'])
x.align["namespace"] = "l" # Left align city names
if gl:
for item in gl.get_namespace_list():
x.add_row([item.id, item.name])
print_green(x)
else:
print_red("not subtaks")
@cli.command()
@click.pass_context
def project_list(ctx, **kwargs):
gl = create_gl(ctx, **kwargs)
x = PrettyTable(['id', 'name'])
x.align["name"] = "l" # Left align city names
for item in gl.get_project_list():
x.add_row([item.id, item.name])
print(x)
@cli.command()
@click.option('--namespace', type=str)
@click.option('--name', type=str)
@click.pass_context
def project_create(ctx, namespace, name, **kwargs):
try:
gl = create_gl(ctx, **kwargs)
print('创建工程')
x = PrettyTable(['id', 'name_with_namespace', 'ssh_url_to_repo', 'http_url_to_repo'])
res = gl.create_project(namespace, name)
x.align['name_with_namespace'] = 'l'
x.add_row([res.id, res.name_with_namespace, res.ssh_url_to_repo, res.http_url_to_repo])
print(x)
except Exception as e:
print("创建失败, %s" % (e))
@cli.command()
@click.option('--id', type=str)
@click.pass_context
def project_remove(ctx, id, **kwargs):
try:
print('删除工程')
gl = create_gl(ctx, **kwargs)
res = gl.del_project_by_id(id)
print("删除成功")
except Exception as e:
print("删除失败, %s" % (e))
# mytestaaa11112
if __name__ == '__main__':
cli()
# 创建命名空间
# 获取命名空间
# 创建工程
# 删除工程
# 创建用户
| [
"prettytable.PrettyTable",
"click.group",
"click.option",
"click.File",
"core.utils.create_gl",
"core.utils.print_green",
"core.utils.print_red"
] | [((641, 682), 'click.group', 'click.group', ([], {'invoke_without_command': '(False)'}), '(invoke_without_command=False)\n', (652, 682), False, 'import click\n'), ((801, 900), 'click.option', 'click.option', (['"""-s"""', '"""--section"""'], {'type': 'str', 'default': '"""global"""', 'help': '"""use default global section"""'}), "('-s', '--section', type=str, default='global', help=\n 'use default global section')\n", (813, 900), False, 'import click\n'), ((1817, 1854), 'click.option', 'click.option', (['"""--namespace"""'], {'type': 'str'}), "('--namespace', type=str)\n", (1829, 1854), False, 'import click\n'), ((1856, 1888), 'click.option', 'click.option', (['"""--name"""'], {'type': 'str'}), "('--name', type=str)\n", (1868, 1888), False, 'import click\n'), ((2408, 2438), 'click.option', 'click.option', (['"""--id"""'], {'type': 'str'}), "('--id', type=str)\n", (2420, 2438), False, 'import click\n'), ((1229, 1253), 'core.utils.create_gl', 'create_gl', (['ctx'], {}), '(ctx, **kwargs)\n', (1238, 1253), False, 'from core.utils import read_config, print_green, print_red, create_gl\n'), ((1262, 1294), 'prettytable.PrettyTable', 'PrettyTable', (["['id', 'namespace']"], {}), "(['id', 'namespace'])\n", (1273, 1294), False, 'from prettytable import PrettyTable\n'), ((1595, 1619), 'core.utils.create_gl', 'create_gl', (['ctx'], {}), '(ctx, **kwargs)\n', (1604, 1619), False, 'from core.utils import read_config, print_green, print_red, create_gl\n'), ((1628, 1655), 'prettytable.PrettyTable', 'PrettyTable', (["['id', 'name']"], {}), "(['id', 'name'])\n", (1639, 1655), False, 'from prettytable import PrettyTable\n'), ((742, 757), 'click.File', 'click.File', (['"""r"""'], {}), "('r')\n", (752, 757), False, 'import click\n'), ((1459, 1473), 'core.utils.print_green', 'print_green', (['x'], {}), '(x)\n', (1470, 1473), False, 'from core.utils import read_config, print_green, print_red, create_gl\n'), ((1492, 1516), 'core.utils.print_red', 'print_red', (['"""not subtaks"""'], {}), "('not subtaks')\n", (1501, 1516), False, 'from core.utils import read_config, print_green, print_red, create_gl\n'), ((1983, 2007), 'core.utils.create_gl', 'create_gl', (['ctx'], {}), '(ctx, **kwargs)\n', (1992, 2007), False, 'from core.utils import read_config, print_green, print_red, create_gl\n'), ((2042, 2127), 'prettytable.PrettyTable', 'PrettyTable', (["['id', 'name_with_namespace', 'ssh_url_to_repo', 'http_url_to_repo']"], {}), "(['id', 'name_with_namespace', 'ssh_url_to_repo',\n 'http_url_to_repo'])\n", (2053, 2127), False, 'from prettytable import PrettyTable\n'), ((2542, 2566), 'core.utils.create_gl', 'create_gl', (['ctx'], {}), '(ctx, **kwargs)\n', (2551, 2566), False, 'from core.utils import read_config, print_green, print_red, create_gl\n')] |
# Generated by Django 3.1 on 2020-11-23 02:24
from django.db import migrations
from manual.operations.manual_operations import ManualOperation
def test_has_sample_stats(apps):
SampleVariantAnnotationStats = apps.get_model("annotation", "SampleVariantAnnotationStats")
return SampleVariantAnnotationStats.objects.all().exists()
class Migration(migrations.Migration):
dependencies = [
('annotation', '0009_vcfannotationstats'),
]
operations = [
ManualOperation(task_id=ManualOperation.task_id_manage(["calculate_sample_stats", "--clear"]),
test=test_has_sample_stats)
]
| [
"manual.operations.manual_operations.ManualOperation.task_id_manage"
] | [((511, 580), 'manual.operations.manual_operations.ManualOperation.task_id_manage', 'ManualOperation.task_id_manage', (["['calculate_sample_stats', '--clear']"], {}), "(['calculate_sample_stats', '--clear'])\n", (541, 580), False, 'from manual.operations.manual_operations import ManualOperation\n')] |
from flask_wtf import FlaskForm
from wtforms import StringField, TextAreaField, BooleanField, SelectField,\
SubmitField,IntegerField,DateField
from wtforms.validators import DataRequired, Length, Email, Regexp
from wtforms import ValidationError
from flask_pagedown.fields import PageDownField
from ..models import Role, User
class NameForm(FlaskForm):
name = StringField('What is your name?', validators=[DataRequired()])
submit = SubmitField('Submit')
class EditProfileForm(FlaskForm):
name = StringField('Real name', validators=[Length(0, 64)])
location = StringField('Location', validators=[Length(0, 64)])
about_me = TextAreaField('About me')
submit = SubmitField('Submit')
class EditProfileAdminForm(FlaskForm):
email = StringField('Email', validators=[DataRequired(), Length(1, 64),
Email()])
username = StringField('Username', validators=[
DataRequired(), Length(1, 64),
Regexp('^[A-Za-z][A-Za-z0-9_.]*$', 0,
'Usernames must have only letters, numbers, dots or '
'underscores')])
confirmed = BooleanField('Confirmed')
role = SelectField('Role', coerce=int)
name = StringField('Real name', validators=[Length(0, 64)])
location = StringField('Location', validators=[Length(0, 64)])
about_me = TextAreaField('About me')
submit = SubmitField('Submit')
def __init__(self, user, *args, **kwargs):
super(EditProfileAdminForm, self).__init__(*args, **kwargs)
self.role.choices = [(role.id, role.name)
for role in Role.query.order_by(Role.name).all()]
self.user = user
def validate_email(self, field):
if field.data != self.user.email and \
User.query.filter_by(email=field.data).first():
raise ValidationError('Email already registered.')
def validate_username(self, field):
if field.data != self.user.username and \
User.query.filter_by(username=field.data).first():
raise ValidationError('Username already in use.')
class PostForm(FlaskForm):
#body = PageDownField("What's on your mind?", validators=[DataRequired()])
name = StringField('客户姓名', validators=[Length(0, 64)])
phnumber = StringField('手机号码', validators=[Length(0, 64)])
homeaddress = StringField('身份证号码', validators=[Length(0, 64)])
#career = StringField('职业', validators=[Length(0, 64)])
#company = StringField('公司名称', validators=[Length(0, 64)])
jobaddress = StringField('常住地址', validators=[Length(0, 64)])
old = IntegerField('出生年月')
#families = IntegerField('家庭成员')
#insurance = IntegerField('保单数')
source = StringField('客户来源', validators=[Length(0, 64)])
#married = BooleanField('是否已婚')
#bobies = BooleanField('是否有小孩')
#liking = StringField('兴趣爱好', validators=[Length(0, 64)])
connects = StringField('跟进建议')
#income = IntegerField('年收入')
submit = SubmitField('Submit')
class PostForm1(FlaskForm):
#body = PageDownField("What's on your mind?", validators=[DataRequired()])
name = StringField('客户姓名', validators=[Length(0, 64)])
phnumber = StringField('手机号码', validators=[Length(0, 64)])
homeaddress = StringField('身份证号码', validators=[Length(0, 64)])
career = StringField('职业', validators=[Length(0, 64)])
company = StringField('公司名称', validators=[Length(0, 64)])
#insurancese = IntegerField('保单数')
jobaddress = StringField('常住地址', validators=[Length(0, 64)])
grade = StringField('客户等级', validators=[Length(0, 64)])
gradeintro = StringField('客户经济状况描述', validators=[Length(0, 64)])
old = IntegerField('出生年月')
source = StringField('客户来源', validators=[Length(0, 64)])
married = BooleanField('是否已婚')
bobies = BooleanField('是否有小孩')
liking = StringField('兴趣爱好', validators=[Length(0, 64)])
connects = StringField('跟进建议')
income = IntegerField('年收入')
submit = SubmitField('Submit')
class CommentForm(FlaskForm):
#body = PageDownField("What's on your mind?", validators=[DataRequired()])
meetway = StringField('沟通方式', validators=[DataRequired()])
meetcase = StringField('沟通借口')
meetdate = DateField('拜访日期')
meetadress = StringField('拜访地址')
#meettimese = IntegerField('拜访次数')
beetway = StringField('沟通情况', validators=[DataRequired()])
newsabout = PageDownField("客户最新情况")
thisthink = StringField('沟通保险观念')
fation = StringField('客户反馈')
planbook = BooleanField('是否做计划书')
badthing = PageDownField("拜访不足总结")
donething = PageDownField("做了哪些准备")
nexttime = IntegerField('几天后再联系')
todo = PageDownField("这个客户以后怎么跟")
submit = SubmitField('Submit')
class CommentForm1(FlaskForm):
#body = PageDownField("What's on your mind?", validators=[DataRequired()])
meetway = StringField('沟通方式', validators=[DataRequired()])
meetcase = StringField('沟通借口', validators=[DataRequired()])
meetdate = DateField('拜访日期')
#meetadress = StringField('拜访地址', validators=[DataRequired()])
#meettimese = IntegerField('拜访次数')
meetway = StringField('沟通情况', validators=[DataRequired()])
#newsabout = PageDownField("客户最新情况", validators=[DataRequired()])
#thisthink = StringField('沟通保险观念', validators=[DataRequired()])
#fation = StringField('客户反馈', validators=[DataRequired()])
#planbook = BooleanField('是否做计划书')
#badthing = PageDownField("拜访不足总结", validators=[DataRequired()])
#donething = PageDownField("做了哪些准备", validators=[DataRequired()])
#nexttime = IntegerField('几天后再联系')
#todo = PageDownField("这个客户以后怎么跟", validators=[DataRequired()])
submit = SubmitField('Submit')
class InsuranceForm1(FlaskForm):
insurname = StringField('险种', validators=[DataRequired()])
toubaoriqi = IntegerField('投保日期')
baofei = IntegerField('保费')
baoer = IntegerField('保额')
submit = SubmitField('Submit')
class InsuranceForm(FlaskForm):
insurname = StringField('险种', validators=[DataRequired()])
toubaoriqi = IntegerField('投保日期')
baofei = IntegerField('保费',validators=[DataRequired()])
baoer = IntegerField('保额',validators=[DataRequired()])
baodanhao = StringField('投保单号')
shengxiaoriqi = IntegerField('生效日期')
baodanzhuangtai = StringField('保单状态')
jiaofeifangshi = StringField('缴费方式')
jiaofeiqi = IntegerField('缴费期')
baoxianqijian = IntegerField('保险期间')
shixiaoriqi = IntegerField('失效日期')
banknumber = StringField('银行账户')
bankname = StringField('缴费银行')
bxzeren = TextAreaField('保险责任')
tbname = StringField('投保人')
bbname = StringField('被保人')
syname = StringField('收益人')
jjname = StringField('紧急联系人')
submit = SubmitField('Submit')
| [
"wtforms.IntegerField",
"wtforms.validators.Email",
"wtforms.validators.DataRequired",
"wtforms.BooleanField",
"wtforms.SubmitField",
"wtforms.StringField",
"wtforms.validators.Length",
"flask_pagedown.fields.PageDownField",
"wtforms.DateField",
"wtforms.validators.Regexp",
"wtforms.SelectField"... | [((457, 478), 'wtforms.SubmitField', 'SubmitField', (['"""Submit"""'], {}), "('Submit')\n", (468, 478), False, 'from wtforms import StringField, TextAreaField, BooleanField, SelectField, SubmitField, IntegerField, DateField\n'), ((667, 692), 'wtforms.TextAreaField', 'TextAreaField', (['"""About me"""'], {}), "('About me')\n", (680, 692), False, 'from wtforms import StringField, TextAreaField, BooleanField, SelectField, SubmitField, IntegerField, DateField\n'), ((707, 728), 'wtforms.SubmitField', 'SubmitField', (['"""Submit"""'], {}), "('Submit')\n", (718, 728), False, 'from wtforms import StringField, TextAreaField, BooleanField, SelectField, SubmitField, IntegerField, DateField\n'), ((1166, 1191), 'wtforms.BooleanField', 'BooleanField', (['"""Confirmed"""'], {}), "('Confirmed')\n", (1178, 1191), False, 'from wtforms import StringField, TextAreaField, BooleanField, SelectField, SubmitField, IntegerField, DateField\n'), ((1204, 1235), 'wtforms.SelectField', 'SelectField', (['"""Role"""'], {'coerce': 'int'}), "('Role', coerce=int)\n", (1215, 1235), False, 'from wtforms import StringField, TextAreaField, BooleanField, SelectField, SubmitField, IntegerField, DateField\n'), ((1385, 1410), 'wtforms.TextAreaField', 'TextAreaField', (['"""About me"""'], {}), "('About me')\n", (1398, 1410), False, 'from wtforms import StringField, TextAreaField, BooleanField, SelectField, SubmitField, IntegerField, DateField\n'), ((1425, 1446), 'wtforms.SubmitField', 'SubmitField', (['"""Submit"""'], {}), "('Submit')\n", (1436, 1446), False, 'from wtforms import StringField, TextAreaField, BooleanField, SelectField, SubmitField, IntegerField, DateField\n'), ((2669, 2689), 'wtforms.IntegerField', 'IntegerField', (['"""出生年月"""'], {}), "('出生年月')\n", (2681, 2689), False, 'from wtforms import StringField, TextAreaField, BooleanField, SelectField, SubmitField, IntegerField, DateField\n'), ((2981, 3000), 'wtforms.StringField', 'StringField', (['"""跟进建议"""'], {}), "('跟进建议')\n", (2992, 3000), False, 'from wtforms import StringField, TextAreaField, BooleanField, SelectField, SubmitField, IntegerField, DateField\n'), ((3050, 3071), 'wtforms.SubmitField', 'SubmitField', (['"""Submit"""'], {}), "('Submit')\n", (3061, 3071), False, 'from wtforms import StringField, TextAreaField, BooleanField, SelectField, SubmitField, IntegerField, DateField\n'), ((3746, 3766), 'wtforms.IntegerField', 'IntegerField', (['"""出生年月"""'], {}), "('出生年月')\n", (3758, 3766), False, 'from wtforms import StringField, TextAreaField, BooleanField, SelectField, SubmitField, IntegerField, DateField\n'), ((3844, 3864), 'wtforms.BooleanField', 'BooleanField', (['"""是否已婚"""'], {}), "('是否已婚')\n", (3856, 3864), False, 'from wtforms import StringField, TextAreaField, BooleanField, SelectField, SubmitField, IntegerField, DateField\n'), ((3879, 3900), 'wtforms.BooleanField', 'BooleanField', (['"""是否有小孩"""'], {}), "('是否有小孩')\n", (3891, 3900), False, 'from wtforms import StringField, TextAreaField, BooleanField, SelectField, SubmitField, IntegerField, DateField\n'), ((3979, 3998), 'wtforms.StringField', 'StringField', (['"""跟进建议"""'], {}), "('跟进建议')\n", (3990, 3998), False, 'from wtforms import StringField, TextAreaField, BooleanField, SelectField, SubmitField, IntegerField, DateField\n'), ((4013, 4032), 'wtforms.IntegerField', 'IntegerField', (['"""年收入"""'], {}), "('年收入')\n", (4025, 4032), False, 'from wtforms import StringField, TextAreaField, BooleanField, SelectField, SubmitField, IntegerField, DateField\n'), ((4047, 4068), 'wtforms.SubmitField', 'SubmitField', (['"""Submit"""'], {}), "('Submit')\n", (4058, 4068), False, 'from wtforms import StringField, TextAreaField, BooleanField, SelectField, SubmitField, IntegerField, DateField\n'), ((4264, 4283), 'wtforms.StringField', 'StringField', (['"""沟通借口"""'], {}), "('沟通借口')\n", (4275, 4283), False, 'from wtforms import StringField, TextAreaField, BooleanField, SelectField, SubmitField, IntegerField, DateField\n'), ((4300, 4317), 'wtforms.DateField', 'DateField', (['"""拜访日期"""'], {}), "('拜访日期')\n", (4309, 4317), False, 'from wtforms import StringField, TextAreaField, BooleanField, SelectField, SubmitField, IntegerField, DateField\n'), ((4336, 4355), 'wtforms.StringField', 'StringField', (['"""拜访地址"""'], {}), "('拜访地址')\n", (4347, 4355), False, 'from wtforms import StringField, TextAreaField, BooleanField, SelectField, SubmitField, IntegerField, DateField\n'), ((4477, 4500), 'flask_pagedown.fields.PageDownField', 'PageDownField', (['"""客户最新情况"""'], {}), "('客户最新情况')\n", (4490, 4500), False, 'from flask_pagedown.fields import PageDownField\n'), ((4518, 4539), 'wtforms.StringField', 'StringField', (['"""沟通保险观念"""'], {}), "('沟通保险观念')\n", (4529, 4539), False, 'from wtforms import StringField, TextAreaField, BooleanField, SelectField, SubmitField, IntegerField, DateField\n'), ((4554, 4573), 'wtforms.StringField', 'StringField', (['"""客户反馈"""'], {}), "('客户反馈')\n", (4565, 4573), False, 'from wtforms import StringField, TextAreaField, BooleanField, SelectField, SubmitField, IntegerField, DateField\n'), ((4590, 4612), 'wtforms.BooleanField', 'BooleanField', (['"""是否做计划书"""'], {}), "('是否做计划书')\n", (4602, 4612), False, 'from wtforms import StringField, TextAreaField, BooleanField, SelectField, SubmitField, IntegerField, DateField\n'), ((4629, 4652), 'flask_pagedown.fields.PageDownField', 'PageDownField', (['"""拜访不足总结"""'], {}), "('拜访不足总结')\n", (4642, 4652), False, 'from flask_pagedown.fields import PageDownField\n'), ((4670, 4693), 'flask_pagedown.fields.PageDownField', 'PageDownField', (['"""做了哪些准备"""'], {}), "('做了哪些准备')\n", (4683, 4693), False, 'from flask_pagedown.fields import PageDownField\n'), ((4710, 4732), 'wtforms.IntegerField', 'IntegerField', (['"""几天后再联系"""'], {}), "('几天后再联系')\n", (4722, 4732), False, 'from wtforms import StringField, TextAreaField, BooleanField, SelectField, SubmitField, IntegerField, DateField\n'), ((4745, 4771), 'flask_pagedown.fields.PageDownField', 'PageDownField', (['"""这个客户以后怎么跟"""'], {}), "('这个客户以后怎么跟')\n", (4758, 4771), False, 'from flask_pagedown.fields import PageDownField\n'), ((4786, 4807), 'wtforms.SubmitField', 'SubmitField', (['"""Submit"""'], {}), "('Submit')\n", (4797, 4807), False, 'from wtforms import StringField, TextAreaField, BooleanField, SelectField, SubmitField, IntegerField, DateField\n'), ((5067, 5084), 'wtforms.DateField', 'DateField', (['"""拜访日期"""'], {}), "('拜访日期')\n", (5076, 5084), False, 'from wtforms import StringField, TextAreaField, BooleanField, SelectField, SubmitField, IntegerField, DateField\n'), ((5765, 5786), 'wtforms.SubmitField', 'SubmitField', (['"""Submit"""'], {}), "('Submit')\n", (5776, 5786), False, 'from wtforms import StringField, TextAreaField, BooleanField, SelectField, SubmitField, IntegerField, DateField\n'), ((5908, 5928), 'wtforms.IntegerField', 'IntegerField', (['"""投保日期"""'], {}), "('投保日期')\n", (5920, 5928), False, 'from wtforms import StringField, TextAreaField, BooleanField, SelectField, SubmitField, IntegerField, DateField\n'), ((5944, 5962), 'wtforms.IntegerField', 'IntegerField', (['"""保费"""'], {}), "('保费')\n", (5956, 5962), False, 'from wtforms import StringField, TextAreaField, BooleanField, SelectField, SubmitField, IntegerField, DateField\n'), ((5977, 5995), 'wtforms.IntegerField', 'IntegerField', (['"""保额"""'], {}), "('保额')\n", (5989, 5995), False, 'from wtforms import StringField, TextAreaField, BooleanField, SelectField, SubmitField, IntegerField, DateField\n'), ((6010, 6031), 'wtforms.SubmitField', 'SubmitField', (['"""Submit"""'], {}), "('Submit')\n", (6021, 6031), False, 'from wtforms import StringField, TextAreaField, BooleanField, SelectField, SubmitField, IntegerField, DateField\n'), ((6154, 6174), 'wtforms.IntegerField', 'IntegerField', (['"""投保日期"""'], {}), "('投保日期')\n", (6166, 6174), False, 'from wtforms import StringField, TextAreaField, BooleanField, SelectField, SubmitField, IntegerField, DateField\n'), ((6316, 6335), 'wtforms.StringField', 'StringField', (['"""投保单号"""'], {}), "('投保单号')\n", (6327, 6335), False, 'from wtforms import StringField, TextAreaField, BooleanField, SelectField, SubmitField, IntegerField, DateField\n'), ((6358, 6378), 'wtforms.IntegerField', 'IntegerField', (['"""生效日期"""'], {}), "('生效日期')\n", (6370, 6378), False, 'from wtforms import StringField, TextAreaField, BooleanField, SelectField, SubmitField, IntegerField, DateField\n'), ((6403, 6422), 'wtforms.StringField', 'StringField', (['"""保单状态"""'], {}), "('保单状态')\n", (6414, 6422), False, 'from wtforms import StringField, TextAreaField, BooleanField, SelectField, SubmitField, IntegerField, DateField\n'), ((6446, 6465), 'wtforms.StringField', 'StringField', (['"""缴费方式"""'], {}), "('缴费方式')\n", (6457, 6465), False, 'from wtforms import StringField, TextAreaField, BooleanField, SelectField, SubmitField, IntegerField, DateField\n'), ((6484, 6503), 'wtforms.IntegerField', 'IntegerField', (['"""缴费期"""'], {}), "('缴费期')\n", (6496, 6503), False, 'from wtforms import StringField, TextAreaField, BooleanField, SelectField, SubmitField, IntegerField, DateField\n'), ((6526, 6546), 'wtforms.IntegerField', 'IntegerField', (['"""保险期间"""'], {}), "('保险期间')\n", (6538, 6546), False, 'from wtforms import StringField, TextAreaField, BooleanField, SelectField, SubmitField, IntegerField, DateField\n'), ((6567, 6587), 'wtforms.IntegerField', 'IntegerField', (['"""失效日期"""'], {}), "('失效日期')\n", (6579, 6587), False, 'from wtforms import StringField, TextAreaField, BooleanField, SelectField, SubmitField, IntegerField, DateField\n'), ((6607, 6626), 'wtforms.StringField', 'StringField', (['"""银行账户"""'], {}), "('银行账户')\n", (6618, 6626), False, 'from wtforms import StringField, TextAreaField, BooleanField, SelectField, SubmitField, IntegerField, DateField\n'), ((6644, 6663), 'wtforms.StringField', 'StringField', (['"""缴费银行"""'], {}), "('缴费银行')\n", (6655, 6663), False, 'from wtforms import StringField, TextAreaField, BooleanField, SelectField, SubmitField, IntegerField, DateField\n'), ((6680, 6701), 'wtforms.TextAreaField', 'TextAreaField', (['"""保险责任"""'], {}), "('保险责任')\n", (6693, 6701), False, 'from wtforms import StringField, TextAreaField, BooleanField, SelectField, SubmitField, IntegerField, DateField\n'), ((6716, 6734), 'wtforms.StringField', 'StringField', (['"""投保人"""'], {}), "('投保人')\n", (6727, 6734), False, 'from wtforms import StringField, TextAreaField, BooleanField, SelectField, SubmitField, IntegerField, DateField\n'), ((6750, 6768), 'wtforms.StringField', 'StringField', (['"""被保人"""'], {}), "('被保人')\n", (6761, 6768), False, 'from wtforms import StringField, TextAreaField, BooleanField, SelectField, SubmitField, IntegerField, DateField\n'), ((6784, 6802), 'wtforms.StringField', 'StringField', (['"""收益人"""'], {}), "('收益人')\n", (6795, 6802), False, 'from wtforms import StringField, TextAreaField, BooleanField, SelectField, SubmitField, IntegerField, DateField\n'), ((6818, 6838), 'wtforms.StringField', 'StringField', (['"""紧急联系人"""'], {}), "('紧急联系人')\n", (6829, 6838), False, 'from wtforms import StringField, TextAreaField, BooleanField, SelectField, SubmitField, IntegerField, DateField\n'), ((6856, 6877), 'wtforms.SubmitField', 'SubmitField', (['"""Submit"""'], {}), "('Submit')\n", (6867, 6877), False, 'from wtforms import StringField, TextAreaField, BooleanField, SelectField, SubmitField, IntegerField, DateField\n'), ((1895, 1939), 'wtforms.ValidationError', 'ValidationError', (['"""Email already registered."""'], {}), "('Email already registered.')\n", (1910, 1939), False, 'from wtforms import ValidationError\n'), ((2121, 2164), 'wtforms.ValidationError', 'ValidationError', (['"""Username already in use."""'], {}), "('Username already in use.')\n", (2136, 2164), False, 'from wtforms import ValidationError\n'), ((426, 440), 'wtforms.validators.DataRequired', 'DataRequired', ([], {}), '()\n', (438, 440), False, 'from wtforms.validators import DataRequired, Length, Email, Regexp\n'), ((567, 580), 'wtforms.validators.Length', 'Length', (['(0)', '(64)'], {}), '(0, 64)\n', (573, 580), False, 'from wtforms.validators import DataRequired, Length, Email, Regexp\n'), ((635, 648), 'wtforms.validators.Length', 'Length', (['(0)', '(64)'], {}), '(0, 64)\n', (641, 648), False, 'from wtforms.validators import DataRequired, Length, Email, Regexp\n'), ((819, 833), 'wtforms.validators.DataRequired', 'DataRequired', ([], {}), '()\n', (831, 833), False, 'from wtforms.validators import DataRequired, Length, Email, Regexp\n'), ((835, 848), 'wtforms.validators.Length', 'Length', (['(1)', '(64)'], {}), '(1, 64)\n', (841, 848), False, 'from wtforms.validators import DataRequired, Length, Email, Regexp\n'), ((896, 903), 'wtforms.validators.Email', 'Email', ([], {}), '()\n', (901, 903), False, 'from wtforms.validators import DataRequired, Length, Email, Regexp\n'), ((968, 982), 'wtforms.validators.DataRequired', 'DataRequired', ([], {}), '()\n', (980, 982), False, 'from wtforms.validators import DataRequired, Length, Email, Regexp\n'), ((984, 997), 'wtforms.validators.Length', 'Length', (['(1)', '(64)'], {}), '(1, 64)\n', (990, 997), False, 'from wtforms.validators import DataRequired, Length, Email, Regexp\n'), ((1008, 1115), 'wtforms.validators.Regexp', 'Regexp', (['"""^[A-Za-z][A-Za-z0-9_.]*$"""', '(0)', '"""Usernames must have only letters, numbers, dots or underscores"""'], {}), "('^[A-Za-z][A-Za-z0-9_.]*$', 0,\n 'Usernames must have only letters, numbers, dots or underscores')\n", (1014, 1115), False, 'from wtforms.validators import DataRequired, Length, Email, Regexp\n'), ((1285, 1298), 'wtforms.validators.Length', 'Length', (['(0)', '(64)'], {}), '(0, 64)\n', (1291, 1298), False, 'from wtforms.validators import DataRequired, Length, Email, Regexp\n'), ((1353, 1366), 'wtforms.validators.Length', 'Length', (['(0)', '(64)'], {}), '(0, 64)\n', (1359, 1366), False, 'from wtforms.validators import DataRequired, Length, Email, Regexp\n'), ((2327, 2340), 'wtforms.validators.Length', 'Length', (['(0)', '(64)'], {}), '(0, 64)\n', (2333, 2340), False, 'from wtforms.validators import DataRequired, Length, Email, Regexp\n'), ((2391, 2404), 'wtforms.validators.Length', 'Length', (['(0)', '(64)'], {}), '(0, 64)\n', (2397, 2404), False, 'from wtforms.validators import DataRequired, Length, Email, Regexp\n'), ((2461, 2474), 'wtforms.validators.Length', 'Length', (['(0)', '(64)'], {}), '(0, 64)\n', (2467, 2474), False, 'from wtforms.validators import DataRequired, Length, Email, Regexp\n'), ((2650, 2663), 'wtforms.validators.Length', 'Length', (['(0)', '(64)'], {}), '(0, 64)\n', (2656, 2663), False, 'from wtforms.validators import DataRequired, Length, Email, Regexp\n'), ((2820, 2833), 'wtforms.validators.Length', 'Length', (['(0)', '(64)'], {}), '(0, 64)\n', (2826, 2833), False, 'from wtforms.validators import DataRequired, Length, Email, Regexp\n'), ((3235, 3248), 'wtforms.validators.Length', 'Length', (['(0)', '(64)'], {}), '(0, 64)\n', (3241, 3248), False, 'from wtforms.validators import DataRequired, Length, Email, Regexp\n'), ((3299, 3312), 'wtforms.validators.Length', 'Length', (['(0)', '(64)'], {}), '(0, 64)\n', (3305, 3312), False, 'from wtforms.validators import DataRequired, Length, Email, Regexp\n'), ((3369, 3382), 'wtforms.validators.Length', 'Length', (['(0)', '(64)'], {}), '(0, 64)\n', (3375, 3382), False, 'from wtforms.validators import DataRequired, Length, Email, Regexp\n'), ((3423, 3436), 'wtforms.validators.Length', 'Length', (['(0)', '(64)'], {}), '(0, 64)\n', (3429, 3436), False, 'from wtforms.validators import DataRequired, Length, Email, Regexp\n'), ((3490, 3503), 'wtforms.validators.Length', 'Length', (['(0)', '(64)'], {}), '(0, 64)\n', (3496, 3503), False, 'from wtforms.validators import DataRequired, Length, Email, Regexp\n'), ((3596, 3609), 'wtforms.validators.Length', 'Length', (['(0)', '(64)'], {}), '(0, 64)\n', (3602, 3609), False, 'from wtforms.validators import DataRequired, Length, Email, Regexp\n'), ((3657, 3670), 'wtforms.validators.Length', 'Length', (['(0)', '(64)'], {}), '(0, 64)\n', (3663, 3670), False, 'from wtforms.validators import DataRequired, Length, Email, Regexp\n'), ((3735, 3748), 'wtforms.validators.Length', 'Length', (['(0)', '(64)'], {}), '(0, 64)\n', (3741, 3748), False, 'from wtforms.validators import DataRequired, Length, Email, Regexp\n'), ((3821, 3834), 'wtforms.validators.Length', 'Length', (['(0)', '(64)'], {}), '(0, 64)\n', (3827, 3834), False, 'from wtforms.validators import DataRequired, Length, Email, Regexp\n'), ((3955, 3968), 'wtforms.validators.Length', 'Length', (['(0)', '(64)'], {}), '(0, 64)\n', (3961, 3968), False, 'from wtforms.validators import DataRequired, Length, Email, Regexp\n'), ((4239, 4253), 'wtforms.validators.DataRequired', 'DataRequired', ([], {}), '()\n', (4251, 4253), False, 'from wtforms.validators import DataRequired, Length, Email, Regexp\n'), ((4451, 4465), 'wtforms.validators.DataRequired', 'DataRequired', ([], {}), '()\n', (4463, 4465), False, 'from wtforms.validators import DataRequired, Length, Email, Regexp\n'), ((4977, 4991), 'wtforms.validators.DataRequired', 'DataRequired', ([], {}), '()\n', (4989, 4991), False, 'from wtforms.validators import DataRequired, Length, Email, Regexp\n'), ((5042, 5056), 'wtforms.validators.DataRequired', 'DataRequired', ([], {}), '()\n', (5054, 5056), False, 'from wtforms.validators import DataRequired, Length, Email, Regexp\n'), ((5248, 5262), 'wtforms.validators.DataRequired', 'DataRequired', ([], {}), '()\n', (5260, 5262), False, 'from wtforms.validators import DataRequired, Length, Email, Regexp\n'), ((5876, 5890), 'wtforms.validators.DataRequired', 'DataRequired', ([], {}), '()\n', (5888, 5890), False, 'from wtforms.validators import DataRequired, Length, Email, Regexp\n'), ((6122, 6136), 'wtforms.validators.DataRequired', 'DataRequired', ([], {}), '()\n', (6134, 6136), False, 'from wtforms.validators import DataRequired, Length, Email, Regexp\n'), ((6224, 6238), 'wtforms.validators.DataRequired', 'DataRequired', ([], {}), '()\n', (6236, 6238), False, 'from wtforms.validators import DataRequired, Length, Email, Regexp\n'), ((6285, 6299), 'wtforms.validators.DataRequired', 'DataRequired', ([], {}), '()\n', (6297, 6299), False, 'from wtforms.validators import DataRequired, Length, Email, Regexp\n')] |
import random
import six
import warnings
from .partitionedsink import WritePartitionedFiles
from .partitionedsink import PartitionedFileSink
from .partitionedsink import T
import apache_beam as beam
from apache_beam import PTransform
from apache_beam import core
from apache_beam.transforms import window
from apache_beam.io.filesystem import CompressionTypes
from pipe_tools.timestamp import SECONDS_IN_DAY
from pipe_tools.timestamp import datetimeFromTimestamp
from pipe_tools.coders import JSONDictCoder
from apache_beam import typehints
from apache_beam.typehints import Tuple, KV
DEFAULT_SHARDS_PER_DAY = 3
class WriteToDatePartitionedFiles(WritePartitionedFiles):
"""
Write the incoming pcoll to files partitioned by date. The date is taken from the
TimestampedValue associated with each element.
"""
def __init__(self,
file_path_prefix,
file_name_suffix='',
append_trailing_newlines=True,
shards_per_day=None,
shard_name_template=None,
coder=JSONDictCoder(),
compression_type=CompressionTypes.AUTO,
header=None):
self.shards_per_day = shards_per_day or DEFAULT_SHARDS_PER_DAY
self._sink = DatePartitionedFileSink(file_path_prefix,
file_name_suffix=file_name_suffix,
append_trailing_newlines=append_trailing_newlines,
shard_name_template=shard_name_template,
coder=coder,
compression_type=compression_type,
header=header)
self._sharder = DateShardDoFn(shards_per_day=self.shards_per_day)
@typehints.with_input_types(T)
@typehints.with_output_types(KV[Tuple[int,int],T])
class DateShardDoFn(beam.DoFn):
"""
Apply date and shard number
"""
def __init__(self, shards_per_day=None):
self.shards_per_day = shards_per_day or DEFAULT_SHARDS_PER_DAY
self.shard_counter = 0
def start_bundle(self):
self.shard_counter = random.randint(0, self.shards_per_day - 1)
def process(self, element, timestamp=beam.DoFn.TimestampParam):
# get the timestamp at the start of the day that contains this element
date = (int(timestamp) // SECONDS_IN_DAY) * SECONDS_IN_DAY
shard = self.shard_counter
self.shard_counter += 1
if self.shard_counter >= self.shards_per_day:
self.shard_counter -= self.shards_per_day
assert isinstance(element, dict), 'element must be a dict'
yield ((date, shard), element)
class DatePartitionedFileSink(PartitionedFileSink):
DATE_FORMAT='%Y-%m-%d'
def _date_str(self, date_ts):
"""convert a timestamp to a string date representation"""
return datetimeFromTimestamp(date_ts).strftime(self.DATE_FORMAT)
def _encode_key(self, date_ts):
return self._date_str(date_ts) | [
"apache_beam.typehints.with_output_types",
"apache_beam.typehints.with_input_types",
"pipe_tools.timestamp.datetimeFromTimestamp",
"pipe_tools.coders.JSONDictCoder",
"random.randint"
] | [((1863, 1892), 'apache_beam.typehints.with_input_types', 'typehints.with_input_types', (['T'], {}), '(T)\n', (1889, 1892), False, 'from apache_beam import typehints\n'), ((1894, 1945), 'apache_beam.typehints.with_output_types', 'typehints.with_output_types', (['KV[Tuple[int, int], T]'], {}), '(KV[Tuple[int, int], T])\n', (1921, 1945), False, 'from apache_beam import typehints\n'), ((1081, 1096), 'pipe_tools.coders.JSONDictCoder', 'JSONDictCoder', ([], {}), '()\n', (1094, 1096), False, 'from pipe_tools.coders import JSONDictCoder\n'), ((2230, 2272), 'random.randint', 'random.randint', (['(0)', '(self.shards_per_day - 1)'], {}), '(0, self.shards_per_day - 1)\n', (2244, 2272), False, 'import random\n'), ((2967, 2997), 'pipe_tools.timestamp.datetimeFromTimestamp', 'datetimeFromTimestamp', (['date_ts'], {}), '(date_ts)\n', (2988, 2997), False, 'from pipe_tools.timestamp import datetimeFromTimestamp\n')] |
#
# Copyright (c) 2012 The Chromium OS Authors.
#
# SPDX-License-Identifier: GPL-2.0+
#
import os
import shutil
import sys
import tempfile
import time
import unittest
# Bring in the patman libraries
our_path = os.path.dirname(os.path.realpath(__file__))
sys.path.append(os.path.join(our_path, '../patman'))
import board
import bsettings
import builder
import control
import command
import commit
import toolchain
errors = [
'''main.c: In function 'main_loop':
main.c:260:6: warning: unused variable 'joe' [-Wunused-variable]
''',
'''main.c: In function 'main_loop':
main.c:295:2: error: 'fred' undeclared (first use in this function)
main.c:295:2: note: each undeclared identifier is reported only once for each function it appears in
make[1]: *** [main.o] Error 1
make: *** [common/libcommon.o] Error 2
Make failed
''',
'''main.c: In function 'main_loop':
main.c:280:6: warning: unused variable 'mary' [-Wunused-variable]
''',
'''powerpc-linux-ld: warning: dot moved backwards before `.bss'
powerpc-linux-ld: warning: dot moved backwards before `.bss'
powerpc-linux-ld: u-boot: section .text lma 0xfffc0000 overlaps previous sections
powerpc-linux-ld: u-boot: section .rodata lma 0xfffef3ec overlaps previous sections
powerpc-linux-ld: u-boot: section .reloc lma 0xffffa400 overlaps previous sections
powerpc-linux-ld: u-boot: section .data lma 0xffffcd38 overlaps previous sections
powerpc-linux-ld: u-boot: section .u_boot_cmd lma 0xffffeb40 overlaps previous sections
powerpc-linux-ld: u-boot: section .bootpg lma 0xfffff198 overlaps previous sections
'''
]
# hash, subject, return code, list of errors/warnings
commits = [
['1234', 'upstream/master, ok', 0, []],
['5678', 'Second commit, a warning', 0, errors[0:1]],
['9012', 'Third commit, error', 1, errors[0:2]],
['3456', 'Fourth commit, warning', 0, [errors[0], errors[2]]],
['7890', 'Fifth commit, link errors', 1, [errors[0], errors[3]]],
['abcd', 'Sixth commit, fixes all errors', 0, []]
]
boards = [
['Active', 'arm', 'armv7', '', 'Tester', 'ARM Board 1', 'board0', ''],
['Active', 'arm', 'armv7', '', 'Tester', 'ARM Board 2', 'board1', ''],
['Active', 'powerpc', 'powerpc', '', 'Tester', 'PowerPC board 1', 'board2', ''],
['Active', 'powerpc', 'mpc5xx', '', 'Tester', 'PowerPC board 2', 'board3', ''],
['Active', 'sandbox', 'sandbox', '', 'Tester', 'Sandbox board', 'board4', ''],
]
class Options:
"""Class that holds build options"""
pass
class TestBuild(unittest.TestCase):
"""Test buildman
TODO: Write tests for the rest of the functionality
"""
def setUp(self):
# Set up commits to build
self.commits = []
sequence = 0
for commit_info in commits:
comm = commit.Commit(commit_info[0])
comm.subject = commit_info[1]
comm.return_code = commit_info[2]
comm.error_list = commit_info[3]
comm.sequence = sequence
sequence += 1
self.commits.append(comm)
# Set up boards to build
self.boards = board.Boards()
for brd in boards:
self.boards.AddBoard(board.Board(*brd))
self.boards.SelectBoards([])
# Set up the toolchains
bsettings.Setup()
self.toolchains = toolchain.Toolchains()
self.toolchains.Add('arm-linux-gcc', test=False)
self.toolchains.Add('sparc-linux-gcc', test=False)
self.toolchains.Add('powerpc-linux-gcc', test=False)
self.toolchains.Add('gcc', test=False)
def Make(self, commit, brd, stage, *args, **kwargs):
result = command.CommandResult()
boardnum = int(brd.target[-1])
result.return_code = 0
result.stderr = ''
result.stdout = ('This is the test output for board %s, commit %s' %
(brd.target, commit.hash))
if boardnum >= 1 and boardnum >= commit.sequence:
result.return_code = commit.return_code
result.stderr = ''.join(commit.error_list)
if stage == 'build':
target_dir = None
for arg in args:
if arg.startswith('O='):
target_dir = arg[2:]
if not os.path.isdir(target_dir):
os.mkdir(target_dir)
#time.sleep(.2 + boardnum * .2)
result.combined = result.stdout + result.stderr
return result
def testBasic(self):
"""Test basic builder operation"""
output_dir = tempfile.mkdtemp()
if not os.path.isdir(output_dir):
os.mkdir(output_dir)
build = builder.Builder(self.toolchains, output_dir, None, 1, 2,
checkout=False, show_unknown=False)
build.do_make = self.Make
board_selected = self.boards.GetSelectedDict()
#build.BuildCommits(self.commits, board_selected, False)
build.BuildBoards(self.commits, board_selected, False, False)
build.ShowSummary(self.commits, board_selected, True, False,
False, False)
def _testGit(self):
"""Test basic builder operation by building a branch"""
base_dir = tempfile.mkdtemp()
if not os.path.isdir(base_dir):
os.mkdir(base_dir)
options = Options()
options.git = os.getcwd()
options.summary = False
options.jobs = None
options.dry_run = False
#options.git = os.path.join(base_dir, 'repo')
options.branch = 'test-buildman'
options.force_build = False
options.list_tool_chains = False
options.count = -1
options.git_dir = None
options.threads = None
options.show_unknown = False
options.quick = False
options.show_errors = False
options.keep_outputs = False
args = ['tegra20']
control.DoBuildman(options, args)
if __name__ == "__main__":
unittest.main()
| [
"builder.Builder",
"toolchain.Toolchains",
"control.DoBuildman",
"command.CommandResult",
"os.path.join",
"commit.Commit",
"os.getcwd",
"os.path.realpath",
"board.Board",
"os.path.isdir",
"tempfile.mkdtemp",
"os.mkdir",
"bsettings.Setup",
"unittest.main",
"board.Boards"
] | [((228, 254), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (244, 254), False, 'import os\n'), ((272, 307), 'os.path.join', 'os.path.join', (['our_path', '"""../patman"""'], {}), "(our_path, '../patman')\n", (284, 307), False, 'import os\n'), ((5915, 5930), 'unittest.main', 'unittest.main', ([], {}), '()\n', (5928, 5930), False, 'import unittest\n'), ((3081, 3095), 'board.Boards', 'board.Boards', ([], {}), '()\n', (3093, 3095), False, 'import board\n'), ((3253, 3270), 'bsettings.Setup', 'bsettings.Setup', ([], {}), '()\n', (3268, 3270), False, 'import bsettings\n'), ((3297, 3319), 'toolchain.Toolchains', 'toolchain.Toolchains', ([], {}), '()\n', (3317, 3319), False, 'import toolchain\n'), ((3619, 3642), 'command.CommandResult', 'command.CommandResult', ([], {}), '()\n', (3640, 3642), False, 'import command\n'), ((4492, 4510), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {}), '()\n', (4508, 4510), False, 'import tempfile\n'), ((4602, 4698), 'builder.Builder', 'builder.Builder', (['self.toolchains', 'output_dir', 'None', '(1)', '(2)'], {'checkout': '(False)', 'show_unknown': '(False)'}), '(self.toolchains, output_dir, None, 1, 2, checkout=False,\n show_unknown=False)\n', (4617, 4698), False, 'import builder\n'), ((5169, 5187), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {}), '()\n', (5185, 5187), False, 'import tempfile\n'), ((5309, 5320), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (5318, 5320), False, 'import os\n'), ((5849, 5882), 'control.DoBuildman', 'control.DoBuildman', (['options', 'args'], {}), '(options, args)\n', (5867, 5882), False, 'import control\n'), ((2761, 2790), 'commit.Commit', 'commit.Commit', (['commit_info[0]'], {}), '(commit_info[0])\n', (2774, 2790), False, 'import commit\n'), ((4526, 4551), 'os.path.isdir', 'os.path.isdir', (['output_dir'], {}), '(output_dir)\n', (4539, 4551), False, 'import os\n'), ((4565, 4585), 'os.mkdir', 'os.mkdir', (['output_dir'], {}), '(output_dir)\n', (4573, 4585), False, 'import os\n'), ((5203, 5226), 'os.path.isdir', 'os.path.isdir', (['base_dir'], {}), '(base_dir)\n', (5216, 5226), False, 'import os\n'), ((5240, 5258), 'os.mkdir', 'os.mkdir', (['base_dir'], {}), '(base_dir)\n', (5248, 5258), False, 'import os\n'), ((3156, 3173), 'board.Board', 'board.Board', (['*brd'], {}), '(*brd)\n', (3167, 3173), False, 'import board\n'), ((4215, 4240), 'os.path.isdir', 'os.path.isdir', (['target_dir'], {}), '(target_dir)\n', (4228, 4240), False, 'import os\n'), ((4258, 4278), 'os.mkdir', 'os.mkdir', (['target_dir'], {}), '(target_dir)\n', (4266, 4278), False, 'import os\n')] |
if True:
import numpy as np
d = 3
K = 50
N = 10 ** 6
a = np.zeros(3)
b = np.ones(3)
orders = np.array([K for i in range(d)])
coeffs = np.random.random([k + 2 for k in orders])
points = np.random.random((N, d)) # each line is a vector
points_c = points.T.copy() # each column is a vector
vals = np.zeros(N)
print(points.max().max())
print(points.min().min())
import time
from alternative_implementations import *
from eval_cubic_splines_cython import vec_eval_cubic_spline_3 as rr
vec_eval_cubic_spline_3(a, b, orders, coeffs, points, vals) # warmup
vec_eval_cubic_spline_3_inlined(a, b, orders, coeffs, points, vals) # warmup
vec_eval_cubic_spline_3_inlined_columns(
a, b, orders, coeffs, points_c, vals
) # warmup
vec_eval_cubic_spline_3_kernel(a, b, orders, coeffs, points, vals) # warmup
vec_eval_cubic_spline_3_inlined_lesswork(orders, coeffs, points, vals, Ad, dAd)
# rr(a,b,orders,coeffs,points,vals,Ad,dAd)
rr(a, b, orders, coeffs, points, vals)
t1 = time.time()
vec_eval_cubic_spline_3(a, b, orders, coeffs, points, vals)
t2 = time.time()
vec_eval_cubic_spline_3_inlined(a, b, orders, coeffs, points, vals)
t3 = time.time()
vec_eval_cubic_spline_3_inlined_columns(a, b, orders, coeffs, points_c, vals)
t4 = time.time()
vec_eval_cubic_spline_3_kernel(a, b, orders, coeffs, points, vals)
t5 = time.time()
vec_eval_cubic_spline_3_inlined_lesswork(orders, coeffs, points, vals, Ad, dAd)
t6 = time.time()
# rr(a,b,orders,coeffs,points,vals,Ad,dAd)
rr(a, b, orders, coeffs, points, vals)
t7 = time.time()
print("one function call per point: {}".format(t2 - t1))
print("inlined (points in rows): {}".format(t3 - t2))
print("inlined (points in columns): {}".format(t4 - t3))
print("kernel: {}".format(t5 - t4))
print("less work: {}".format(t6 - t5))
print("cython: {}".format(t7 - t6))
print(vals[:10, 0])
| [
"numpy.ones",
"numpy.random.random",
"eval_cubic_splines_cython.vec_eval_cubic_spline_3",
"numpy.zeros",
"time.time"
] | [((79, 90), 'numpy.zeros', 'np.zeros', (['(3)'], {}), '(3)\n', (87, 90), True, 'import numpy as np\n'), ((99, 109), 'numpy.ones', 'np.ones', (['(3)'], {}), '(3)\n', (106, 109), True, 'import numpy as np\n'), ((168, 211), 'numpy.random.random', 'np.random.random', (['[(k + 2) for k in orders]'], {}), '([(k + 2) for k in orders])\n', (184, 211), True, 'import numpy as np\n'), ((223, 247), 'numpy.random.random', 'np.random.random', (['(N, d)'], {}), '((N, d))\n', (239, 247), True, 'import numpy as np\n'), ((342, 353), 'numpy.zeros', 'np.zeros', (['N'], {}), '(N)\n', (350, 353), True, 'import numpy as np\n'), ((1030, 1068), 'eval_cubic_splines_cython.vec_eval_cubic_spline_3', 'rr', (['a', 'b', 'orders', 'coeffs', 'points', 'vals'], {}), '(a, b, orders, coeffs, points, vals)\n', (1032, 1068), True, 'from eval_cubic_splines_cython import vec_eval_cubic_spline_3 as rr\n'), ((1079, 1090), 'time.time', 'time.time', ([], {}), '()\n', (1088, 1090), False, 'import time\n'), ((1164, 1175), 'time.time', 'time.time', ([], {}), '()\n', (1173, 1175), False, 'import time\n'), ((1257, 1268), 'time.time', 'time.time', ([], {}), '()\n', (1266, 1268), False, 'import time\n'), ((1360, 1371), 'time.time', 'time.time', ([], {}), '()\n', (1369, 1371), False, 'import time\n'), ((1452, 1463), 'time.time', 'time.time', ([], {}), '()\n', (1461, 1463), False, 'import time\n'), ((1557, 1568), 'time.time', 'time.time', ([], {}), '()\n', (1566, 1568), False, 'import time\n'), ((1623, 1661), 'eval_cubic_splines_cython.vec_eval_cubic_spline_3', 'rr', (['a', 'b', 'orders', 'coeffs', 'points', 'vals'], {}), '(a, b, orders, coeffs, points, vals)\n', (1625, 1661), True, 'from eval_cubic_splines_cython import vec_eval_cubic_spline_3 as rr\n'), ((1671, 1682), 'time.time', 'time.time', ([], {}), '()\n', (1680, 1682), False, 'import time\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""move_safe.py: get all the places in a radius using google place api ."""
__author__ = "<NAME>, <NAME>"
import populartimes
import json
import math
import requests
import geocoder
apiKey = ""
# get your current position
def my_current_location():
g = geocoder.ip('me')
my_location = g.latlng
return my_location
# method 1
def get_location_bound(lat, lon, distance):
R = 6378.1 # Radius of the Earth
brng = 1.57 # Bearing is 90 degrees converted to radians.
d = distance # Distance in km
lat1 = math.radians(lat) # Current lat point converted to radians
lon1 = math.radians(lon) # Current long point converted to radians
lat2 = math.asin(math.sin(lat1) * math.cos(d / R) +
math.cos(lat1) * math.sin(d / R) * math.cos(brng))
lon2 = lon1 + math.atan2(math.sin(brng) * math.sin(d / R) * math.cos(lat1),
math.cos(d / R) - math.sin(lat1) * math.sin(lat2))
lat2 = math.degrees(lat2)
lon2 = math.degrees(lon2)
return lat2, lon2
def get_radius_locations(mylocation, dist):
lat = mylocation["lat"]
lon = mylocation["lon"]
lat2, lon2 = get_location_bound(lat, lon, dist)
print(lat, lon, lat2, lon2)
data = populartimes.get(apiKey, ["Restaurant"], (lat, lon), (lat2, lon2))
print(data)
d = json.dumps(data, indent=2)
print(d)
# method 2 -- depend on the nerby service by google
def get_nearby_id(location, rad, types):
lat = location["lat"]
lon = location["lon"]
response = requests.get('https://maps.googleapis.com/maps/api/place/nearbysearch/json?'
'location=' + str(lat) + ',' + str(lon) + '&radius=' + str(rad) + '&type=' + types +
'&key=' + apiKey)
res = response.json()
i = 0
for place in res["results"]:
get_location_by_id(place["place_id"])
i =+1
if i > 10:
break
def get_location_by_id(place_id):
place_details = populartimes.get_id(apiKey, place_id)
print(json.dumps(place_details, indent=2))
"""
:param types: such bus_station,restaurant,hospital(https://developers.google.com/places/web-service/supported_types)
:param radius: define the the radius of the search
:param my_location: represent my current position
"""
if __name__ == "__main__":
# my_location = {"lat": my_current_location()[0], "lon": my_current_location()[1]}
my_location = {"lat": 48.7606439, "lon": 1.2772357}
place_type = "supermarket"
radius = 50
get_nearby_id(my_location, radius, place_type)
get_radius_locations(my_location, 50000)
| [
"geocoder.ip",
"populartimes.get",
"populartimes.get_id",
"json.dumps",
"math.degrees",
"math.radians",
"math.cos",
"math.sin"
] | [((307, 324), 'geocoder.ip', 'geocoder.ip', (['"""me"""'], {}), "('me')\n", (318, 324), False, 'import geocoder\n'), ((581, 598), 'math.radians', 'math.radians', (['lat'], {}), '(lat)\n', (593, 598), False, 'import math\n'), ((652, 669), 'math.radians', 'math.radians', (['lon'], {}), '(lon)\n', (664, 669), False, 'import math\n'), ((1015, 1033), 'math.degrees', 'math.degrees', (['lat2'], {}), '(lat2)\n', (1027, 1033), False, 'import math\n'), ((1045, 1063), 'math.degrees', 'math.degrees', (['lon2'], {}), '(lon2)\n', (1057, 1063), False, 'import math\n'), ((1286, 1352), 'populartimes.get', 'populartimes.get', (['apiKey', "['Restaurant']", '(lat, lon)', '(lat2, lon2)'], {}), "(apiKey, ['Restaurant'], (lat, lon), (lat2, lon2))\n", (1302, 1352), False, 'import populartimes\n'), ((1377, 1403), 'json.dumps', 'json.dumps', (['data'], {'indent': '(2)'}), '(data, indent=2)\n', (1387, 1403), False, 'import json\n'), ((2038, 2075), 'populartimes.get_id', 'populartimes.get_id', (['apiKey', 'place_id'], {}), '(apiKey, place_id)\n', (2057, 2075), False, 'import populartimes\n'), ((2086, 2121), 'json.dumps', 'json.dumps', (['place_details'], {'indent': '(2)'}), '(place_details, indent=2)\n', (2096, 2121), False, 'import json\n'), ((735, 749), 'math.sin', 'math.sin', (['lat1'], {}), '(lat1)\n', (743, 749), False, 'import math\n'), ((752, 767), 'math.cos', 'math.cos', (['(d / R)'], {}), '(d / R)\n', (760, 767), False, 'import math\n'), ((826, 840), 'math.cos', 'math.cos', (['brng'], {}), '(brng)\n', (834, 840), False, 'import math\n'), ((907, 921), 'math.cos', 'math.cos', (['lat1'], {}), '(lat1)\n', (915, 921), False, 'import math\n'), ((952, 967), 'math.cos', 'math.cos', (['(d / R)'], {}), '(d / R)\n', (960, 967), False, 'import math\n'), ((791, 805), 'math.cos', 'math.cos', (['lat1'], {}), '(lat1)\n', (799, 805), False, 'import math\n'), ((808, 823), 'math.sin', 'math.sin', (['(d / R)'], {}), '(d / R)\n', (816, 823), False, 'import math\n'), ((872, 886), 'math.sin', 'math.sin', (['brng'], {}), '(brng)\n', (880, 886), False, 'import math\n'), ((889, 904), 'math.sin', 'math.sin', (['(d / R)'], {}), '(d / R)\n', (897, 904), False, 'import math\n'), ((970, 984), 'math.sin', 'math.sin', (['lat1'], {}), '(lat1)\n', (978, 984), False, 'import math\n'), ((987, 1001), 'math.sin', 'math.sin', (['lat2'], {}), '(lat2)\n', (995, 1001), False, 'import math\n')] |
# Generated by Django 3.0.8 on 2020-07-31 06:20
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('ews', '0007_auto_20200729_1039'),
]
operations = [
migrations.AlterField(
model_name='featuredata',
name='station',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='station', to='ews.Station'),
),
]
| [
"django.db.models.ForeignKey"
] | [((371, 480), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""station"""', 'to': '"""ews.Station"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='station', to='ews.Station')\n", (388, 480), False, 'from django.db import migrations, models\n')] |
#!/usr/bin/env python2.7
# -*- coding: utf-8 -*-
#===============================================================================
# Filename : check_ssh_file_existence
# Author : <NAME> <<EMAIL>>
# Description : Check on remote server if some files are present using SSH.
#-------------------------------------------------------------------------------
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#===============================================================================
__version__ = '0.2.4'
import hashlib
import logging as log
from pprint import pformat
import re
from datetime import datetime, timedelta
from monitoring.nagios.plugin import NagiosPluginSSH
logger = log.getLogger('plugin')
def format_time_from_arg(time_string):
"""
Format a time string from args and return a datetime.
:param time_string: String of the form HH:MM:SS.
:type time_string: str
:return: datetime
"""
time_from_arg = datetime.strptime(time_string, '%H:%M:%S')
current_time = datetime.today()
check_time = time_from_arg.replace(year=current_time.year, month=current_time.month, day=current_time.day)
return check_time
class PluginCheckFileExistence(NagiosPluginSSH):
"""
Plugin customization class.
"""
def initialize(self):
super(PluginCheckFileExistence, self).initialize()
# Use MD5 hash in pickle file name for the case you have multiple service check on the same host that use
# this plugin.
pickle_pattern = '%s_%s_%s' % (self.options.hostname, self.options.regexp.pattern, self.options.directory)
self.picklefile_pattern = hashlib.md5(pickle_pattern).hexdigest()
self.has_check_period = False
self.in_check_period = False
self.flags = {
'DoneForToday': False,
'NotYetPresent': None,
'Files': [],
}
def define_plugin_arguments(self):
super(PluginCheckFileExistence, self).define_plugin_arguments()
parser_file_group = self.parser.add_argument_group('Files', 'Arguments to list and filter files.')
parser_file_group.add_argument('-d',
dest='directory',
default='.',
help='Directory to look files in. Default to the current directory.')
parser_file_group.add_argument('-r',
dest='regexp',
default=re.compile(r'.*'),
type=re.compile,
help='Regexp pattern to filter files. Default to all \'.*\'.')
parser_file_group.add_argument('-n',
dest='count',
type=int,
default=1,
help='Number of file (at least) that must be found to consider it is valid. ' \
'Default to 1 occurence.')
parser_file_group.add_argument('--stime',
dest='stime',
type=format_time_from_arg,
help='Check start time. Check for files starting at the specified time.')
parser_file_group.add_argument('--etime',
dest='etime',
type=format_time_from_arg,
help='Check end time. Do not check for files above this time.')
def verify_plugin_arguments(self):
super(PluginCheckFileExistence, self).verify_plugin_arguments()
# Check time thresholds syntax
if self.options.stime and self.options.etime:
if self.options.stime >= self.options.etime:
self.unknown('Start time cannot be >= end time, check syntax !')
if datetime.today() > self.options.etime:
tomorrow = timedelta(days=1)
self.options.stime += tomorrow
self.options.etime += tomorrow
logger.debug('We must take care of the time period.')
logger.debug('\tStart time: %s' % self.options.stime)
logger.debug('\tEnd time: %s' % self.options.etime)
self.has_check_period = True
if (datetime.today() > self.options.stime) and (datetime.today() < self.options.etime):
logger.debug('We are in check period...')
self.in_check_period = True
else:
logger.debug('We are not in check period...')
elif (self.options.stime and not self.options.etime) or (not self.options.stime and self.options.etime):
self.unknown('Missing start/end time information, check syntax !')
def search_files(self, files):
found = []
regexp = self.options.regexp
for file in files:
if regexp.search(file):
logger.debug('\tFound file \'%s\'.' % file)
found.append(file)
return found
plugin = PluginCheckFileExistence(description='Check on remote server if some files are present using SSH.',
version=__version__)
# Look for files on the remote server.
files = plugin.ssh.list_files(plugin.options.directory)
logger.debug('Retrieve files list:')
logger.debug(pformat(files))
# Search files using the regexp
found_files = plugin.search_files(files)
# Should we check if plugin must be executed ?
status = None
message = ''
if plugin.has_check_period:
# Check period defined
# Load previous state
try:
flags = plugin.load_data()
except IOError:
flags = plugin.flags
if plugin.in_check_period:
if flags['DoneForToday']:
status = plugin.ok
message = '%d files have already been checked today.\n'\
'The following files have been found:\n'\
'%s' % (len(flags['Files']), '\n'.join(flags['Files']))
else:
if found_files:
if len(found_files) >= plugin.options.count:
flags['Files'] = found_files
flags['DoneForToday'] = True
flags['NotYetPresent'] = None
status = plugin.ok
message = '%d files with regexp \"%s\" have been found in \"%s\".\n'\
'The following files have been found:\n'\
'%s' % (len(found_files),
plugin.options.regexp.pattern,
plugin.options.directory,
'\n'.join(found_files))
else:
status = plugin.critical
message = 'Only %d files with regexp \"%s\" have been found in \"%s\".'\
'Should be at least %d.\n' % (len(found_files),
plugin.options.regexp.pattern,
plugin.options.directory,
plugin.options.count)
else:
flags['NotYetPresent'] = True
status = plugin.ok
message = 'Files with regexp \"%s\" are not yet present in \"%s\".'\
'Verify in next check...' % (plugin.options.regexp.pattern, plugin.options.directory)
else:
if flags['NotYetPresent']:
status = plugin.critical
message = 'Files have not been received today !'
else:
# Compute the start time
start = int((plugin.options.stime - datetime.today()).total_seconds())
hours, remainder = divmod(start, 3600)
minutes, seconds = divmod(remainder, 60)
flags['DoneForToday'] = False
status = plugin.ok
message = 'Nothing to do. Will start to do something in %s hours, %s mins and %s secs.' % (hours,
minutes,
seconds)
logger.debug('Flags status:')
logger.debug(pformat(flags, indent=4))
plugin.save_data(flags)
else:
# No check period, run all the time
if found_files:
if len(found_files) >= plugin.options.count:
status = plugin.ok
message = '%d files with regexp \"%s\" have been found in \"%s\".\n'\
'The following files have been found:\n'\
'%s' % (len(found_files),
plugin.options.regexp.pattern,
plugin.options.directory,
'\n'.join(found_files))
else:
status = plugin.critical
message = 'Only %d files with regexp \"%s\" have been found in \"%s\".'\
'Should be at least %d.\n' % (len(found_files),
plugin.options.regexp.pattern,
plugin.options.directory,
plugin.options.count)
else:
status = plugin.critical
message = 'Cannot find files with regexp \"%s\" in \"%s\" !' % (plugin.options.regexp.pattern,
plugin.options.directory)
# Return status to Nagios
status(message)
| [
"logging.getLogger",
"hashlib.md5",
"re.compile",
"datetime.datetime.strptime",
"pprint.pformat",
"datetime.datetime.today",
"datetime.timedelta"
] | [((1285, 1308), 'logging.getLogger', 'log.getLogger', (['"""plugin"""'], {}), "('plugin')\n", (1298, 1308), True, 'import logging as log\n'), ((1546, 1588), 'datetime.datetime.strptime', 'datetime.strptime', (['time_string', '"""%H:%M:%S"""'], {}), "(time_string, '%H:%M:%S')\n", (1563, 1588), False, 'from datetime import datetime, timedelta\n'), ((1608, 1624), 'datetime.datetime.today', 'datetime.today', ([], {}), '()\n', (1622, 1624), False, 'from datetime import datetime, timedelta\n'), ((6054, 6068), 'pprint.pformat', 'pformat', (['files'], {}), '(files)\n', (6061, 6068), False, 'from pprint import pformat\n'), ((9061, 9085), 'pprint.pformat', 'pformat', (['flags'], {'indent': '(4)'}), '(flags, indent=4)\n', (9068, 9085), False, 'from pprint import pformat\n'), ((2230, 2257), 'hashlib.md5', 'hashlib.md5', (['pickle_pattern'], {}), '(pickle_pattern)\n', (2241, 2257), False, 'import hashlib\n'), ((3108, 3124), 're.compile', 're.compile', (['""".*"""'], {}), "('.*')\n", (3118, 3124), False, 'import re\n'), ((4588, 4604), 'datetime.datetime.today', 'datetime.today', ([], {}), '()\n', (4602, 4604), False, 'from datetime import datetime, timedelta\n'), ((4654, 4671), 'datetime.timedelta', 'timedelta', ([], {'days': '(1)'}), '(days=1)\n', (4663, 4671), False, 'from datetime import datetime, timedelta\n'), ((5022, 5038), 'datetime.datetime.today', 'datetime.today', ([], {}), '()\n', (5036, 5038), False, 'from datetime import datetime, timedelta\n'), ((5066, 5082), 'datetime.datetime.today', 'datetime.today', ([], {}), '()\n', (5080, 5082), False, 'from datetime import datetime, timedelta\n'), ((8462, 8478), 'datetime.datetime.today', 'datetime.today', ([], {}), '()\n', (8476, 8478), False, 'from datetime import datetime, timedelta\n')] |
#!/usr/bin/python3
'''Adds a State object and one of its City object children to a database.
'''
import sys
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker, relationship
from relationship_state import Base, State
from relationship_city import City
if __name__ == '__main__':
if len(sys.argv) >= 4:
user = sys.argv[1]
pword = sys.argv[2]
db_name = sys.argv[3]
DATABASE_URL = 'mysql://{}:{}@localhost:3306/{}'.format(
user, pword, db_name
)
engine = create_engine(DATABASE_URL)
Base.metadata.create_all(engine)
session = sessionmaker(bind=engine)()
new_state = State(name='California')
new_city = City(name='San Francisco')
new_state.cities.append(new_city)
session.add(new_state)
session.commit()
| [
"sqlalchemy.orm.sessionmaker",
"sqlalchemy.create_engine",
"relationship_city.City",
"relationship_state.Base.metadata.create_all",
"relationship_state.State"
] | [((544, 571), 'sqlalchemy.create_engine', 'create_engine', (['DATABASE_URL'], {}), '(DATABASE_URL)\n', (557, 571), False, 'from sqlalchemy import create_engine\n'), ((580, 612), 'relationship_state.Base.metadata.create_all', 'Base.metadata.create_all', (['engine'], {}), '(engine)\n', (604, 612), False, 'from relationship_state import Base, State\n'), ((679, 703), 'relationship_state.State', 'State', ([], {'name': '"""California"""'}), "(name='California')\n", (684, 703), False, 'from relationship_state import Base, State\n'), ((723, 749), 'relationship_city.City', 'City', ([], {'name': '"""San Francisco"""'}), "(name='San Francisco')\n", (727, 749), False, 'from relationship_city import City\n'), ((631, 656), 'sqlalchemy.orm.sessionmaker', 'sessionmaker', ([], {'bind': 'engine'}), '(bind=engine)\n', (643, 656), False, 'from sqlalchemy.orm import sessionmaker, relationship\n')] |
# Copyright 2015 Isotoma Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from touchdown.tests.aws import StubberTestCase
from touchdown.tests.stubs.aws import LoadBalancerStubber, Stubber
class TestCreateLoadBalancer(StubberTestCase):
def test_create_load_balancer(self):
goal = self.create_goal("apply")
load_balancer = self.fixtures.enter_context(
LoadBalancerStubber(
goal.get_service(
self.aws.add_load_balancer(name="test-load_balancer", listeners=[]),
"apply",
)
)
)
load_balancer.add_describe_load_balancers_empty()
load_balancer.add_create_load_balancer()
load_balancer.add_describe_load_balancers_one()
load_balancer.add_describe_load_balancer_attributes()
load_balancer.add_describe_load_balancers_one()
load_balancer.add_describe_load_balancer_attributes()
load_balancer.add_describe_load_balancers_one()
load_balancer.add_describe_load_balancer_attributes()
goal.execute()
def test_create_load_balancer_idempotent(self):
goal = self.create_goal("apply")
load_balancer = self.fixtures.enter_context(
LoadBalancerStubber(
goal.get_service(
self.aws.add_load_balancer(name="test-load_balancer", listeners=[]),
"apply",
)
)
)
load_balancer.add_describe_load_balancers_one()
load_balancer.add_describe_load_balancer_attributes()
self.assertEqual(len(list(goal.plan())), 0)
self.assertEqual(len(goal.get_changes(load_balancer.resource)), 0)
class TestDestroyLoadBalancer(StubberTestCase):
def test_destroy_load_balancer(self):
goal = self.create_goal("destroy")
load_balancer = self.fixtures.enter_context(
LoadBalancerStubber(
goal.get_service(
self.aws.add_load_balancer(name="test-load_balancer"), "destroy"
)
)
)
load_balancer.add_describe_load_balancers_one()
load_balancer.add_describe_load_balancer_attributes()
load_balancer.add_delete_load_balancer()
network_interface_waiter = self.fixtures.enter_context(
Stubber(load_balancer.service.ec2_client)
)
network_interface_waiter.add_response(
"describe_network_interfaces",
service_response={},
expected_params={
"Filters": [
{"Name": "description", "Values": ["ELB test-load_balancer"]}
]
},
)
goal.execute()
def test_destroy_load_balancer_idempotent(self):
goal = self.create_goal("destroy")
load_balancer = self.fixtures.enter_context(
LoadBalancerStubber(
goal.get_service(
self.aws.add_load_balancer(name="test-load_balancer"), "destroy"
)
)
)
load_balancer.add_describe_load_balancers_empty()
self.assertEqual(len(list(goal.plan())), 0)
self.assertEqual(len(goal.get_changes(load_balancer.resource)), 0)
| [
"touchdown.tests.stubs.aws.Stubber"
] | [((2844, 2885), 'touchdown.tests.stubs.aws.Stubber', 'Stubber', (['load_balancer.service.ec2_client'], {}), '(load_balancer.service.ec2_client)\n', (2851, 2885), False, 'from touchdown.tests.stubs.aws import LoadBalancerStubber, Stubber\n')] |
from __future__ import print_function
import argparse
import os
import platform
import time
import cv2
import h5py
import torch.optim as optim
from torch.utils.data import DataLoader
from datasets import DATASET_NAMES, TrainDataset, TestDataset, dataset_info
from losses import *
from model import DexiNed
from model_learnable_sigmoid import DexiNed_learnable_sigmoid
from utils import (save_image_batch_to_disk, visualize_result, visualize_result_ml_hypersim)
IS_LINUX = True if platform.system() == "Linux" else False
os.environ["CUDA_VISIBLE_DEVICES"] = "1"
def parse_args():
"""Parse command line arguments."""
parser = argparse.ArgumentParser(description='DexiNed trainer.')
parser.add_argument('--choose_test_data', type=int, default=-1,
help='Already set the dataset for testing choice: 0 - 11')
parser.add_argument('--choose_train_data', type=int, default=-1,
help='Already set the dataset for testing choice: 0 - 11')
TEST_DATA = DATASET_NAMES[parser.parse_args().choose_test_data] # max 11
test_inf = dataset_info(TEST_DATA, is_linux=IS_LINUX)
test_dir = test_inf['data_dir']
is_testing = False # current test _bdcnlossNew256-sd7-1.10.4p5
# test related
parser.add_argument('--is_testing', type=bool, default=is_testing, help='Script in testing mode.')
parser.add_argument('--use_dataset', default=True, type=bool, help='test: dataset=True; single image=FALSE')
parser.add_argument('--test_data', type=str, choices=DATASET_NAMES, default=TEST_DATA, help='Name of the dataset.')
parser.add_argument('--test_list', type=str, default=test_inf['test_list'], help='Dataset sample indices list.')
parser.add_argument('--test_img_height', type=int, default=test_inf['img_height'], help='Image height for testing.')
parser.add_argument('--test_img_width', type=int, default=test_inf['img_width'], help='Image width for testing.')
# Training settings
TRAIN_DATA = DATASET_NAMES[parser.parse_args().choose_train_data] # BIPED=0
train_inf = dataset_info(TRAIN_DATA, is_linux=IS_LINUX)
train_dir = train_inf['data_dir']
# training data
parser.add_argument('--input_dir', type=str, default=train_dir,
help='the path to the directory with the input data.')
parser.add_argument('--input_val_dir', type=str, default=test_dir,
help='the path to the directory with the input data for validation.')
parser.add_argument('--output_dir', type=str, default='checkpoints', help='the path to output the results.')
parser.add_argument('--train_data', type=str, choices=DATASET_NAMES, default=TRAIN_DATA,
help='Name of the dataset.')
parser.add_argument('--train_list', type=str, default=train_inf['train_list'], help='Dataset sample indices list.')
parser.add_argument('--img_width', type=int, default=train_inf['img_width'],
help='Image width for training.') # BIPED 400 BSDS 352/320 MDBD 480
parser.add_argument('--img_height', type=int, default=train_inf['img_height'],
help='Image height for training.') # BIPED 480 BSDS 352/320
parser.add_argument('--mean_pixel_values', default=[103.939, 116.779, 123.68, 137.86],
type=float) # [103.939,116.779,123.68] [104.00699, 116.66877, 122.67892]
# training detail
parser.add_argument('--data_augmentation', type=bool, default=True,
help='(BOOL) whether or not to use data augmentation.')
parser.add_argument('--double_img', type=bool, default=False,
help='True: use same 2 imgs changing channels') # Just for test
parser.add_argument('--channel_swap', default=[2, 1, 0], type=int)
parser.add_argument('--crop_img', default=True, type=bool,
help='If true crop training images, else resize images to match image width and height.')
parser.add_argument('--resume', type=bool, default=True, help='use previous trained data') # Just for test
parser.add_argument('--checkpoint_data', type=str, default='14/14_model.pth',
help='Checkpoint path from which to restore model weights from.')
parser.add_argument('--res_dir', type=str, default='result', help='Result directory')
parser.add_argument('--log_interval_vis', type=int, default=50,
help='The number of batches to wait before printing test predictions.')
parser.add_argument('--two_type', type=bool, default=False, help='Result directory')
parser.add_argument('--epochs', type=int, default=34, metavar='N', help='Number of training epochs (default: 25).')
parser.add_argument('--lr', default=1e-4, type=float, help='Initial learning rate.')
parser.add_argument('--wd', type=float, default=0., metavar='WD', help='weight decay (default: 1e-4) in F1=0')
# parser.add_argument('--lr_stepsize', default=1e4, type=int, help='Learning rate step size.')
parser.add_argument('--batch_size', type=int, default=8, metavar='B', help='the mini-batch size (default: 8)')
parser.add_argument('--workers', default=1, type=int, help='The number of workers for the dataloaders.')
parser.add_argument('--tensorboard', type=bool, default=True, help='Use Tensorboard for logging.'),
parser.add_argument('--use_learnable_sigmoid', type=bool, default=False, help='Use learnable sigmoid function.')
args_out = parser.parse_args()
return args_out
def train_one_epoch(epoch, dataloader, model, criterion, optimizer, device,
log_interval_vis, tb_writer, args_in=None):
imgs_res_folder = os.path.join(args_in.output_dir, 'current_res')
os.makedirs(imgs_res_folder, exist_ok=True)
# Put model in training mode
model.train()
# l_weight = [0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 1.1] # for bdcn ori loss
# before [0.6,0.6,1.1,1.1,0.4,0.4,1.3] [0.4,0.4,1.1,1.1,0.6,0.6,1.3],[0.4,0.4,1.1,1.1,0.8,0.8,1.3]
l_weight = [0.7, 0.7, 1.1, 1.1, 0.3, 0.3, 1.3] # for bdcn loss theory 3 before the last 1.3 0.6-0..5
# l_weight = [[0.05, 2.], [0.05, 2.], [0.05, 2.],
# [0.1, 1.], [0.1, 1.], [0.1, 1.],
# [0.01, 4.]] # for cats loss
loss_avg = []
for batch_id, sample_batched in enumerate(dataloader):
images = sample_batched['images'].to(device) # BxCxHxW
labels = sample_batched['labels'].to(device) # BxCxHxW
preds_list = model(images)
# loss = sum([criterion(preds, labels, l_w, device) for preds, l_w in zip(preds_list, l_weight)]) # cats_loss
loss = sum([criterion(preds, labels, l_w) / args_in.batch_size for preds, l_w in zip(preds_list, l_weight)])
# bdcn_loss
# loss = sum([criterion(preds, labels) for preds in preds_list]) #HED loss, rcf_loss
optimizer.zero_grad()
loss.backward()
optimizer.step()
loss_avg.append(loss.item())
if epoch == 0 and (batch_id == 100 and tb_writer is not None):
tmp_loss = np.array(loss_avg).mean()
tb_writer.add_scalar('loss', tmp_loss, epoch)
if batch_id % 5 == 0:
print(time.ctime(), 'Epoch: {0} Sample {1}/{2} Loss: {3}'
.format(epoch, batch_id, len(dataloader), loss.item()))
if batch_id % log_interval_vis == 0:
scene_name = None
if args_in.train_data == 'ML-Hypersim':
scene_name = sample_batched['scene_name'][2]
res_data = []
img = images.cpu().numpy()
res_data.append(img[2])
ed_gt = labels.cpu().numpy()
res_data.append(ed_gt[2])
# tmp_pred = tmp_preds[2,...]
for i in range(len(preds_list)):
tmp = preds_list[i]
tmp = tmp[2]
# print(tmp.shape)
tmp = torch.sigmoid(tmp)
if args_in.train_data != 'ML-Hypersim':
tmp = tmp.unsqueeze(dim=0)
tmp = tmp.cpu().detach().numpy()
res_data.append(tmp)
if args_in.train_data == 'ML-Hypersim':
vis_imgs = visualize_result_ml_hypersim(res_data, scene_name, arg=args_in)
else:
vis_imgs = visualize_result(res_data, arg=args_in)
del tmp, res_data
vis_imgs = cv2.resize(vis_imgs, (int(vis_imgs.shape[1] * 0.8), int(vis_imgs.shape[0] * 0.8)))
img_test = 'Epoch: {0} Sample {1}/{2} Loss: {3} File_name: {4}'.format(epoch, batch_id, len(dataloader),
loss.item(), scene_name)
BLACK = (0, 0, 255)
font = cv2.FONT_HERSHEY_SIMPLEX
font_size = 1.1
font_color = BLACK
font_thickness = 2
x, y = 30, 30
vis_imgs = cv2.putText(vis_imgs,
img_test,
(x, y),
font, font_size, font_color, font_thickness, cv2.LINE_AA)
cv2.imwrite(os.path.join(imgs_res_folder, 'results.png'), vis_imgs)
loss_avg = np.array(loss_avg).mean()
return loss_avg
def validate_one_epoch(epoch, dataloader, model, device, output_dir, arg=None):
# XXX This is not really validation, but testing
print(epoch)
# Put model in eval mode
model.eval()
with torch.no_grad():
for _, sample_batched in enumerate(dataloader):
# print(len(sample_batched['images']))
images = sample_batched['images'].to(device)
# labels = sample_batched['labels'].to(device)
scene_name = sample_batched['scene_name']
cam_name = sample_batched['cam_name']
img_id = sample_batched['img_id']
file_names = [f'{scene_name[i]}.{cam_name[i]}.{img_id[i]}.png' for i in range(len(scene_name))]
# file_names = sample_batched['file_names']
image_shape = [sample_batched['image_shape'][0].cpu().numpy()[0],
sample_batched['image_shape'][1].cpu().numpy()[0]]
preds = model(images)
# print(preds.shape)
# exit()
# print('pred shape', preds[0].shape)
# print(preds[-1].shape)
# print(output_dir)
# print(file_names)
# print(image_shape)
save_image_batch_to_disk(preds[-1], output_dir, file_names, img_shape=image_shape, arg=arg)
def model_test(checkpoint_path, dataloader, model, device, output_dir, args_in):
if not os.path.isfile(checkpoint_path):
raise FileNotFoundError(
f"Checkpoint filte note found: {checkpoint_path}")
print(f"Restoring weights from: {checkpoint_path}")
model.load_state_dict(torch.load(checkpoint_path,
map_location=device))
# Put model in evaluation mode
model.eval()
with torch.no_grad():
total_duration = []
for batch_id, sample_batched in enumerate(dataloader):
images = sample_batched['images'].to(device)
if not (args_in.test_data == "CLASSIC" or args_in.test_data == 'cabin'):
_ = sample_batched['labels'].to(device) # get the labels
if args_in.test_data == 'ML-Hypersim':
file_names = [sample_batched['scene_name'][0] + '_' + sample_batched['cam_name'][0] + '_' +
sample_batched['img_id'][0] + '.png']
image_shape = sample_batched['image_shape']
else:
file_names = sample_batched['file_names']
image_shape = sample_batched['image_shape']
print(f"input tensor shape: {images.shape}")
# images = images[:, [2, 1, 0], :, :]
start_time = time.time()
preds = model(images)
tmp_duration = time.time() - start_time
total_duration.append(tmp_duration)
save_image_batch_to_disk(preds,
output_dir,
file_names,
image_shape,
arg=args_in)
torch.cuda.empty_cache()
total_duration = np.array(total_duration)
print("******** Testing finished in", args_in.test_data, "dataset. *****")
print("Average time per image: %f.4" % total_duration.mean(), "seconds")
print("Time spend in the Dataset: %f.4" % total_duration.sum(), "seconds")
def verify_single(checkpoint_path, image_in, model, device, output_dir, args_in):
if not os.path.isfile(checkpoint_path):
raise FileNotFoundError(
f"Checkpoint filte note found: {checkpoint_path}")
print(f"Restoring weights from: {checkpoint_path}")
model.load_state_dict(torch.load(checkpoint_path,
map_location=device))
# Put model in evaluation mode
model.eval()
with torch.no_grad():
images = image_in['data'].to(device)
file_names = image_in['filename']
image_shape = image_in['data'].shape
print(f"input tensor shape: {images.shape}")
# images = images[:, [2, 1, 0], :, :]
start_time = time.time()
preds = model(images)
duration = time.time() - start_time
save_image_batch_to_disk(preds,
output_dir,
file_names,
image_shape,
arg=args_in)
torch.cuda.empty_cache()
print("******** Testing finished in", args_in.test_data, "dataset. *****")
print("Time spend: %f.4" % duration, "seconds")
def verifyPich(checkpoint_path, dataloader, model, device, output_dir, args_in):
# a test model plus the interganged channels
if not os.path.isfile(checkpoint_path):
raise FileNotFoundError(f"Checkpoint filte note found: {checkpoint_path}")
print(f"Restoring weights from: {checkpoint_path}")
model.load_state_dict(torch.load(checkpoint_path, map_location=device))
# Put model in evaluation mode
model.eval()
with torch.no_grad():
total_duration = []
for batch_id, sample_batched in enumerate(dataloader):
images = sample_batched['images'].to(device)
if not args_in.test_data == "CLASSIC":
_ = sample_batched['labels'].to(device) # get the labels
file_names = sample_batched['file_names']
image_shape = sample_batched['image_shape']
print(f"input tensor shape: {images.shape}")
start_time = time.time()
# images2 = images[:, [1, 0, 2], :, :] #GBR
images2 = images[:, [2, 1, 0], :, :] # RGB
preds = model(images)
preds2 = model(images2)
tmp_duration = time.time() - start_time
total_duration.append(tmp_duration)
save_image_batch_to_disk([preds, preds2],
output_dir,
file_names,
image_shape,
arg=args_in, is_inchannel=True)
torch.cuda.empty_cache()
total_duration = np.array(total_duration)
print("******** Testing finished in", args_in.test_data, "dataset. *****")
print("Average time per image: %f.4" % total_duration.mean(), "seconds")
print("Time spend in the Dataset: %f.4" % total_duration.sum(), "seconds")
def main(args_in):
"""Main function."""
print(f"Number of GPU's available: {torch.cuda.device_count()}")
print(f"Pytorch version: {torch.__version__}")
# Tensorboard summary writer
tb_writer = None
training_dir = os.path.join(args_in.output_dir, args_in.train_data)
os.makedirs(training_dir, exist_ok=True)
checkpoint_path = os.path.join(args_in.output_dir, args_in.train_data, args_in.checkpoint_data)
if args_in.tensorboard and not args_in.is_testing:
# from tensorboardX import SummaryWriter # previous torch version
from torch.utils.tensorboard import SummaryWriter # for torch 1.4 or greather
tb_writer = SummaryWriter(log_dir=training_dir)
# Get computing device
device = torch.device('cpu' if torch.cuda.device_count() == 0 else 'cuda')
# Instantiate model and move it to the computing device
if args_in.use_learnable_sigmoid:
model = DexiNed_learnable_sigmoid(args_in).to(device)
else:
model = DexiNed(args_in).to(device)
# model = nn.DataParallel(model)
ini_epoch = 0
dataloader_train = None
dataloader_val = None
if not args_in.is_testing:
if args_in.resume:
ini_epoch = 17
model.load_state_dict(torch.load(checkpoint_path, map_location=device))
dataset_train = TrainDataset(args_in.input_dir,
img_width=args_in.img_width,
img_height=args_in.img_height,
mean_bgr=args_in.mean_pixel_values[0:3] if len(
args_in.mean_pixel_values) == 4 else args_in.mean_pixel_values,
train_mode='train',
arg=args_in
)
_ = dataset_train[0] # get a data sample
dataloader_train = DataLoader(dataset_train, batch_size=args_in.batch_size, shuffle=True,
num_workers=args_in.workers)
if args_in.use_dataset:
dataset_val = TestDataset(args_in.input_val_dir,
test_data=args_in.test_data,
img_width=args_in.test_img_width,
img_height=args_in.test_img_height,
mean_bgr=args_in.mean_pixel_values[0:3] if len(
args_in.mean_pixel_values) == 4 else args_in.mean_pixel_values,
test_list=args_in.test_list,
arg=args_in
)
_ = dataset_val[0] # get a data sample
dataloader_val = DataLoader(dataset_val, batch_size=1, shuffle=False, num_workers=args_in.workers)
# Testing
if args_in.is_testing:
output_dir = os.path.join(args_in.res_dir, args_in.train_data + "2" + args_in.test_data)
print(f"output_dir: {output_dir}")
if args_in.use_dataset:
if args_in.double_img:
# predict twice an image changing channels, then mix those results
verifyPich(checkpoint_path, dataloader_val, model, device, output_dir, args_in)
else:
model_test(checkpoint_path, dataloader_val, model, device, output_dir, args_in)
else:
img_data = h5py.File('/home/ubuntu/DexiNed/data/archive/Classic/frame.0000.depth_meters.hdf5', 'r')
img_data = img_data['dataset'][:]
img_test = {
'data': torch.tensor(img_data),
'filename': 'frame.0000.depth_meters'}
verify_single(checkpoint_path, img_test, model, device, output_dir, args_in)
return
if args_in.two_type:
criterion = multi_class
else:
criterion = bdcn_loss2
# criterion = bdcn_loss_liu
# criterion = mse_loss
optimizer = optim.Adam(model.parameters(), lr=args_in.lr, weight_decay=args_in.wd)
# lr_schd = lr_scheduler.StepLR(optimizer, step_size=args_in.lr_stepsize, gamma=args_in.lr_gamma)
# Main training loop
seed = 1021
for epoch in range(ini_epoch, args_in.epochs):
if epoch % 7 == 0:
seed = seed + 1000
np.random.seed(seed)
torch.manual_seed(seed)
torch.cuda.manual_seed(seed)
print("------ Random seed applied-------------")
# Create output directories
output_dir_epoch = os.path.join(args_in.output_dir, args_in.train_data, str(epoch))
img_test_dir = os.path.join(output_dir_epoch, args_in.test_data + '_res')
os.makedirs(output_dir_epoch, exist_ok=True)
os.makedirs(img_test_dir, exist_ok=True)
avg_loss = train_one_epoch(epoch, dataloader_train, model, criterion, optimizer, device,
args_in.log_interval_vis,
tb_writer, args_in=args_in)
validate_one_epoch(epoch, dataloader_val, model, device, img_test_dir, arg=args_in)
# Save model after end of every epoch
if args_in.use_learnable_sigmoid:
torch.save(model.module.state_dict() if hasattr(model, "module") else model.state_dict(),
os.path.join(output_dir_epoch, 'learnable_sigmoid_{0}_model.pth'.format(epoch)))
else:
torch.save(model.module.state_dict() if hasattr(model, "module") else model.state_dict(),
os.path.join(output_dir_epoch, '{0}_model.pth'.format(epoch)))
if tb_writer is not None:
tb_writer.add_scalar('loss', avg_loss, epoch + 1)
if __name__ == '__main__':
args = parse_args()
main(args)
| [
"model.DexiNed",
"torch.utils.tensorboard.SummaryWriter",
"time.ctime",
"utils.visualize_result",
"os.makedirs",
"argparse.ArgumentParser",
"model_learnable_sigmoid.DexiNed_learnable_sigmoid",
"os.path.join",
"cv2.putText",
"os.path.isfile",
"h5py.File",
"platform.system",
"utils.save_image_... | [((639, 694), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""DexiNed trainer."""'}), "(description='DexiNed trainer.')\n", (662, 694), False, 'import argparse\n'), ((1092, 1134), 'datasets.dataset_info', 'dataset_info', (['TEST_DATA'], {'is_linux': 'IS_LINUX'}), '(TEST_DATA, is_linux=IS_LINUX)\n', (1104, 1134), False, 'from datasets import DATASET_NAMES, TrainDataset, TestDataset, dataset_info\n'), ((2073, 2116), 'datasets.dataset_info', 'dataset_info', (['TRAIN_DATA'], {'is_linux': 'IS_LINUX'}), '(TRAIN_DATA, is_linux=IS_LINUX)\n', (2085, 2116), False, 'from datasets import DATASET_NAMES, TrainDataset, TestDataset, dataset_info\n'), ((5692, 5739), 'os.path.join', 'os.path.join', (['args_in.output_dir', '"""current_res"""'], {}), "(args_in.output_dir, 'current_res')\n", (5704, 5739), False, 'import os\n'), ((5744, 5787), 'os.makedirs', 'os.makedirs', (['imgs_res_folder'], {'exist_ok': '(True)'}), '(imgs_res_folder, exist_ok=True)\n', (5755, 5787), False, 'import os\n'), ((15870, 15922), 'os.path.join', 'os.path.join', (['args_in.output_dir', 'args_in.train_data'], {}), '(args_in.output_dir, args_in.train_data)\n', (15882, 15922), False, 'import os\n'), ((15927, 15967), 'os.makedirs', 'os.makedirs', (['training_dir'], {'exist_ok': '(True)'}), '(training_dir, exist_ok=True)\n', (15938, 15967), False, 'import os\n'), ((15990, 16067), 'os.path.join', 'os.path.join', (['args_in.output_dir', 'args_in.train_data', 'args_in.checkpoint_data'], {}), '(args_in.output_dir, args_in.train_data, args_in.checkpoint_data)\n', (16002, 16067), False, 'import os\n'), ((485, 502), 'platform.system', 'platform.system', ([], {}), '()\n', (500, 502), False, 'import platform\n'), ((10662, 10693), 'os.path.isfile', 'os.path.isfile', (['checkpoint_path'], {}), '(checkpoint_path)\n', (10676, 10693), False, 'import os\n'), ((12704, 12735), 'os.path.isfile', 'os.path.isfile', (['checkpoint_path'], {}), '(checkpoint_path)\n', (12718, 12735), False, 'import os\n'), ((13334, 13345), 'time.time', 'time.time', ([], {}), '()\n', (13343, 13345), False, 'import time\n'), ((13428, 13514), 'utils.save_image_batch_to_disk', 'save_image_batch_to_disk', (['preds', 'output_dir', 'file_names', 'image_shape'], {'arg': 'args_in'}), '(preds, output_dir, file_names, image_shape, arg=\n args_in)\n', (13452, 13514), False, 'from utils import save_image_batch_to_disk, visualize_result, visualize_result_ml_hypersim\n'), ((13950, 13981), 'os.path.isfile', 'os.path.isfile', (['checkpoint_path'], {}), '(checkpoint_path)\n', (13964, 13981), False, 'import os\n'), ((16305, 16340), 'torch.utils.tensorboard.SummaryWriter', 'SummaryWriter', ([], {'log_dir': 'training_dir'}), '(log_dir=training_dir)\n', (16318, 16340), False, 'from torch.utils.tensorboard import SummaryWriter\n'), ((17544, 17647), 'torch.utils.data.DataLoader', 'DataLoader', (['dataset_train'], {'batch_size': 'args_in.batch_size', 'shuffle': '(True)', 'num_workers': 'args_in.workers'}), '(dataset_train, batch_size=args_in.batch_size, shuffle=True,\n num_workers=args_in.workers)\n', (17554, 17647), False, 'from torch.utils.data import DataLoader\n'), ((18370, 18456), 'torch.utils.data.DataLoader', 'DataLoader', (['dataset_val'], {'batch_size': '(1)', 'shuffle': '(False)', 'num_workers': 'args_in.workers'}), '(dataset_val, batch_size=1, shuffle=False, num_workers=args_in.\n workers)\n', (18380, 18456), False, 'from torch.utils.data import DataLoader\n'), ((18515, 18590), 'os.path.join', 'os.path.join', (['args_in.res_dir', "(args_in.train_data + '2' + args_in.test_data)"], {}), "(args_in.res_dir, args_in.train_data + '2' + args_in.test_data)\n", (18527, 18590), False, 'import os\n'), ((20228, 20286), 'os.path.join', 'os.path.join', (['output_dir_epoch', "(args_in.test_data + '_res')"], {}), "(output_dir_epoch, args_in.test_data + '_res')\n", (20240, 20286), False, 'import os\n'), ((20295, 20339), 'os.makedirs', 'os.makedirs', (['output_dir_epoch'], {'exist_ok': '(True)'}), '(output_dir_epoch, exist_ok=True)\n', (20306, 20339), False, 'import os\n'), ((20348, 20388), 'os.makedirs', 'os.makedirs', (['img_test_dir'], {'exist_ok': '(True)'}), '(img_test_dir, exist_ok=True)\n', (20359, 20388), False, 'import os\n'), ((8930, 9031), 'cv2.putText', 'cv2.putText', (['vis_imgs', 'img_test', '(x, y)', 'font', 'font_size', 'font_color', 'font_thickness', 'cv2.LINE_AA'], {}), '(vis_imgs, img_test, (x, y), font, font_size, font_color,\n font_thickness, cv2.LINE_AA)\n', (8941, 9031), False, 'import cv2\n'), ((10476, 10572), 'utils.save_image_batch_to_disk', 'save_image_batch_to_disk', (['preds[-1]', 'output_dir', 'file_names'], {'img_shape': 'image_shape', 'arg': 'arg'}), '(preds[-1], output_dir, file_names, img_shape=\n image_shape, arg=arg)\n', (10500, 10572), False, 'from utils import save_image_batch_to_disk, visualize_result, visualize_result_ml_hypersim\n'), ((11902, 11913), 'time.time', 'time.time', ([], {}), '()\n', (11911, 11913), False, 'import time\n'), ((12060, 12146), 'utils.save_image_batch_to_disk', 'save_image_batch_to_disk', (['preds', 'output_dir', 'file_names', 'image_shape'], {'arg': 'args_in'}), '(preds, output_dir, file_names, image_shape, arg=\n args_in)\n', (12084, 12146), False, 'from utils import save_image_batch_to_disk, visualize_result, visualize_result_ml_hypersim\n'), ((13395, 13406), 'time.time', 'time.time', ([], {}), '()\n', (13404, 13406), False, 'import time\n'), ((14743, 14754), 'time.time', 'time.time', ([], {}), '()\n', (14752, 14754), False, 'import time\n'), ((15050, 15164), 'utils.save_image_batch_to_disk', 'save_image_batch_to_disk', (['[preds, preds2]', 'output_dir', 'file_names', 'image_shape'], {'arg': 'args_in', 'is_inchannel': '(True)'}), '([preds, preds2], output_dir, file_names,\n image_shape, arg=args_in, is_inchannel=True)\n', (15074, 15164), False, 'from utils import save_image_batch_to_disk, visualize_result, visualize_result_ml_hypersim\n'), ((19031, 19128), 'h5py.File', 'h5py.File', (['"""/home/ubuntu/DexiNed/data/archive/Classic/frame.0000.depth_meters.hdf5"""', '"""r"""'], {}), "(\n '/home/ubuntu/DexiNed/data/archive/Classic/frame.0000.depth_meters.hdf5',\n 'r')\n", (19040, 19128), False, 'import h5py\n'), ((7208, 7220), 'time.ctime', 'time.ctime', ([], {}), '()\n', (7218, 7220), False, 'import time\n'), ((8203, 8266), 'utils.visualize_result_ml_hypersim', 'visualize_result_ml_hypersim', (['res_data', 'scene_name'], {'arg': 'args_in'}), '(res_data, scene_name, arg=args_in)\n', (8231, 8266), False, 'from utils import save_image_batch_to_disk, visualize_result, visualize_result_ml_hypersim\n'), ((8312, 8351), 'utils.visualize_result', 'visualize_result', (['res_data'], {'arg': 'args_in'}), '(res_data, arg=args_in)\n', (8328, 8351), False, 'from utils import save_image_batch_to_disk, visualize_result, visualize_result_ml_hypersim\n'), ((9157, 9201), 'os.path.join', 'os.path.join', (['imgs_res_folder', '"""results.png"""'], {}), "(imgs_res_folder, 'results.png')\n", (9169, 9201), False, 'import os\n'), ((11975, 11986), 'time.time', 'time.time', ([], {}), '()\n', (11984, 11986), False, 'import time\n'), ((14965, 14976), 'time.time', 'time.time', ([], {}), '()\n', (14974, 14976), False, 'import time\n'), ((16563, 16597), 'model_learnable_sigmoid.DexiNed_learnable_sigmoid', 'DexiNed_learnable_sigmoid', (['args_in'], {}), '(args_in)\n', (16588, 16597), False, 'from model_learnable_sigmoid import DexiNed_learnable_sigmoid\n'), ((16635, 16651), 'model.DexiNed', 'DexiNed', (['args_in'], {}), '(args_in)\n', (16642, 16651), False, 'from model import DexiNed\n')] |
import numpy as np
import matplotlib.pyplot as plt
from matplotlib import patches
def get_ellipse_params(rho,M):
"""
Returns ellipse params (excl center point)
"""
#eigenvalue decomposition to get the axes
w,v=np.linalg.eigh(M/rho)
try:
#let the smaller eigenvalue define the width (major axis*2!)
width = 2/float(np.sqrt(w[0]))
height = 2/float(np.sqrt(w[1]))
#the angle of the ellipse is defined by the eigenvector assigned to the smallest eigenvalue (because this defines the major axis (width of the ellipse))
angle = np.rad2deg(np.arctan2(v[:,0][1],v[:,0][0]))
except:
print("paramters do not represent an ellipse.")
return width,height,angle
def get_ellipse_patch(px,py,rho,M,alpha_val=1,linec="red",facec="none",linest="solid"):
"""
return an ellipse patch
"""
w,h,a = get_ellipse_params(rho,M)
return patches.Ellipse((px,py), w, h, a, alpha=alpha_val,ec=linec,facecolor=facec,linestyle=linest)
def plot_ellipse(px,py,rho, M, save_to=None, show=True):
p=get_ellipse_patch(px,py,rho,M)
fig, ax = plt.subplots()
ax.add_patch(p)
l=np.max([p.width,p.height])
ax.set_xlim(px-l/2,px+l/2)
ax.set_ylim(py-l/2,py+l/2)
ax.grid(True)
if not (save_to is None):
plt.savefig(save_to)
if show:
plt.show()
| [
"matplotlib.pyplot.savefig",
"numpy.sqrt",
"numpy.max",
"numpy.arctan2",
"numpy.linalg.eigh",
"matplotlib.patches.Ellipse",
"matplotlib.pyplot.subplots",
"matplotlib.pyplot.show"
] | [((232, 255), 'numpy.linalg.eigh', 'np.linalg.eigh', (['(M / rho)'], {}), '(M / rho)\n', (246, 255), True, 'import numpy as np\n'), ((925, 1026), 'matplotlib.patches.Ellipse', 'patches.Ellipse', (['(px, py)', 'w', 'h', 'a'], {'alpha': 'alpha_val', 'ec': 'linec', 'facecolor': 'facec', 'linestyle': 'linest'}), '((px, py), w, h, a, alpha=alpha_val, ec=linec, facecolor=\n facec, linestyle=linest)\n', (940, 1026), False, 'from matplotlib import patches\n'), ((1132, 1146), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (1144, 1146), True, 'import matplotlib.pyplot as plt\n'), ((1173, 1200), 'numpy.max', 'np.max', (['[p.width, p.height]'], {}), '([p.width, p.height])\n', (1179, 1200), True, 'import numpy as np\n'), ((1321, 1341), 'matplotlib.pyplot.savefig', 'plt.savefig', (['save_to'], {}), '(save_to)\n', (1332, 1341), True, 'import matplotlib.pyplot as plt\n'), ((1363, 1373), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1371, 1373), True, 'import matplotlib.pyplot as plt\n'), ((610, 644), 'numpy.arctan2', 'np.arctan2', (['v[:, 0][1]', 'v[:, 0][0]'], {}), '(v[:, 0][1], v[:, 0][0])\n', (620, 644), True, 'import numpy as np\n'), ((358, 371), 'numpy.sqrt', 'np.sqrt', (['w[0]'], {}), '(w[0])\n', (365, 371), True, 'import numpy as np\n'), ((398, 411), 'numpy.sqrt', 'np.sqrt', (['w[1]'], {}), '(w[1])\n', (405, 411), True, 'import numpy as np\n')] |
import cv2
import matplotlib.pyplot as plt
import numpy as np
import tensorflow as tf
from google.colab.patches import cv2_imshow
from tensorflow.keras.applications import *
from tensorflow.keras.layers import *
from tensorflow.keras.models import *
from math import *
import glob
import os
import time
from tensorflow.keras import regularizers
from pathfile import *
def iou(box_a,box_b):
y1=max(box_a[1],box_b[1])
y2=min(box_a[1]+box_a[3],box_b[1]+box_b[3])
x1=max(box_a[0],box_b[0])
x2=min(box_a[0]+box_a[2],box_b[0]+box_b[2])
if y1>y2 or x1>x2:
return 0 #iou calculation
inter_area=(x2-x1)*(y2-y1)
union_area=box_a[2]*box_a[3]+box_b[2]*box_b[3]-inter_area
if union_area == 0:
return 0
return inter_area/union_area
def get_proposals(path):
im=cv2.imread(path)
im=cv2.resize(im,(800,800),cv2.INTER_AREA)
im=cv2.cvtColor(im,cv2.COLOR_BGR2RGB)
edge_detection = cv2.ximgproc.createStructuredEdgeDetection(MODEL_YML)
edges = edge_detection.detectEdges(np.float32(im) / 255.0)
orimap = edge_detection.computeOrientation(edges)
edges = edge_detection.edgesNms(edges, orimap)
edge_boxes = cv2.ximgproc.createEdgeBoxes()
edge_boxes.setMaxBoxes(64)
boxes = edge_boxes.getBoundingBoxes(edges, orimap)
return boxes
def get_feature_map(path,model):
im=tf.keras.preprocessing.image.load_img(path=path,target_size=(800,800))
im = tf.keras.preprocessing.image.img_to_array(im)
im=tf.keras.applications.vgg16.preprocess_input(im)
im=np.reshape(im,(1,800,800,3))
pred=model.predict(im)[0]
return pred
def get_vgg():
m=VGG16(include_top=False,input_shape=(800,800,3))
x=Input((800,800,3))
inp=x
for layer in m.layers:
if layer.__class__.__name__=='InputLayer':
continue
if layer.output_shape[1]>=50:
x=layer(x)
model=Model(inp,x)
model.trainable=False
return model
def get_resized_boxes(path,original_boxes):
temp=cv2.imread(path)
gt_boxes=[]
width,height=len(temp[0]),len(temp)
temp=cv2.resize(temp,(800,800),interpolation=cv2.INTER_AREA)
for gt in original_boxes:
x,y,w,h=list(map(int,gt))
x=int(x*(800.0/width))
y=int(y*(800.0/height))
w=int(w*(800.0/width))
h=int(h*(800.0/height))
#print(x,y,w,h)
gt_boxes.append([x,y,w,h])
return gt_boxes
def get_fastrcnn():
y=Input((14,14,512))
y_inp=y
y=MaxPool2D(2)(y)
y=Flatten()(y)
y=Dense(1024)(y)
y=Dropout(0.25)(y)
y=Dense(1024)(y)
y=Dropout(0.25)(y)
y=Dense(512)(y)
y=Dropout(0.25)(y)
y=Dense(1024,name='logits')(y)
reg=Dense(4,activity_regularizer=regularizers.l2(1e-1),name='regression_layer')(y)
cls=Dense(11,name='class_layer')(y)
cls=Softmax()(cls)
fastrcnn=Model(inputs=y_inp,outputs=[reg,cls])
return fastrcnn | [
"tensorflow.keras.preprocessing.image.load_img",
"numpy.reshape",
"cv2.ximgproc.createStructuredEdgeDetection",
"tensorflow.keras.applications.vgg16.preprocess_input",
"cv2.cvtColor",
"tensorflow.keras.preprocessing.image.img_to_array",
"cv2.ximgproc.createEdgeBoxes",
"cv2.resize",
"cv2.imread",
"... | [((882, 898), 'cv2.imread', 'cv2.imread', (['path'], {}), '(path)\n', (892, 898), False, 'import cv2\n'), ((907, 949), 'cv2.resize', 'cv2.resize', (['im', '(800, 800)', 'cv2.INTER_AREA'], {}), '(im, (800, 800), cv2.INTER_AREA)\n', (917, 949), False, 'import cv2\n'), ((955, 990), 'cv2.cvtColor', 'cv2.cvtColor', (['im', 'cv2.COLOR_BGR2RGB'], {}), '(im, cv2.COLOR_BGR2RGB)\n', (967, 990), False, 'import cv2\n'), ((1012, 1065), 'cv2.ximgproc.createStructuredEdgeDetection', 'cv2.ximgproc.createStructuredEdgeDetection', (['MODEL_YML'], {}), '(MODEL_YML)\n', (1054, 1065), False, 'import cv2\n'), ((1255, 1285), 'cv2.ximgproc.createEdgeBoxes', 'cv2.ximgproc.createEdgeBoxes', ([], {}), '()\n', (1283, 1285), False, 'import cv2\n'), ((1434, 1506), 'tensorflow.keras.preprocessing.image.load_img', 'tf.keras.preprocessing.image.load_img', ([], {'path': 'path', 'target_size': '(800, 800)'}), '(path=path, target_size=(800, 800))\n', (1471, 1506), True, 'import tensorflow as tf\n'), ((1515, 1560), 'tensorflow.keras.preprocessing.image.img_to_array', 'tf.keras.preprocessing.image.img_to_array', (['im'], {}), '(im)\n', (1556, 1560), True, 'import tensorflow as tf\n'), ((1569, 1617), 'tensorflow.keras.applications.vgg16.preprocess_input', 'tf.keras.applications.vgg16.preprocess_input', (['im'], {}), '(im)\n', (1613, 1617), True, 'import tensorflow as tf\n'), ((1626, 1658), 'numpy.reshape', 'np.reshape', (['im', '(1, 800, 800, 3)'], {}), '(im, (1, 800, 800, 3))\n', (1636, 1658), True, 'import numpy as np\n'), ((2101, 2117), 'cv2.imread', 'cv2.imread', (['path'], {}), '(path)\n', (2111, 2117), False, 'import cv2\n'), ((2186, 2244), 'cv2.resize', 'cv2.resize', (['temp', '(800, 800)'], {'interpolation': 'cv2.INTER_AREA'}), '(temp, (800, 800), interpolation=cv2.INTER_AREA)\n', (2196, 2244), False, 'import cv2\n'), ((1106, 1120), 'numpy.float32', 'np.float32', (['im'], {}), '(im)\n', (1116, 1120), True, 'import numpy as np\n'), ((2834, 2854), 'tensorflow.keras.regularizers.l2', 'regularizers.l2', (['(0.1)'], {}), '(0.1)\n', (2849, 2854), False, 'from tensorflow.keras import regularizers\n')] |
import random
import collections
import math
import os
import zipfile
import time
import re
import numpy as np
import tensorflow as tf
from matplotlib import pylab
from six.moves import range
from six.moves.urllib.request import urlretrieve
dataset_link = 'http://mattmahoney.net/dc/'
zip_file = 'text8.zip'
def data_download(zip_file):
"""Downloading the required file"""
if not os.path.exists(zip_file):
zip_file, _ = urlretrieve(dataset_link + zip_file, zip_file)
print('File download successfully!')
return None
data_download(zip_file)
extracted_folder = 'dataset'
if not os.path.isdir(extracted_folder):
with zipfile.ZipFile(zip_file) as zf:
zf.extractall(extracted_folder)
with open('dataset/text8') as ft_:
full_text = ft_.read()
def text_processing(ft8_text):
ft8_text = ft8_text.lower()
ft8_text = ft8_text.replace('.', ' <period> ')
ft8_text = ft8_text.replace(',', ' <comma> ')
ft8_text = ft8_text.replace('"', ' <quotation> ')
ft8_text = ft8_text.replace(';', ' <semicolon> ')
ft8_text = ft8_text.replace('!', ' <exclamation> ')
ft8_text = ft8_text.replace('?', ' <question> ')
ft8_text = ft8_text.replace('(', ' <paren_l> ')
ft8_text = ft8_text.replace(')', ' <paren_r> ')
ft8_text = ft8_text.replace('--', ' <hyphen> ')
ft8_text = ft8_text.replace(':', ' <colon> ')
ft8_text_token = ft8_text.split()
return ft8_text_token
ft_token = text_processing(full_text)
word_cnt = collections.Counter(ft_token)
shortlisted_words = [w for w in ft_token if word_cnt[w] > 7]
print(shortlisted_words[:15])
print('Total number of shortlisted words: ', len(shortlisted_words))
print('Unique number of shortlisted words: ', len(set(shortlisted_words)))
def dict_creation(shortlisted_words):
"""
here, one word map into one int, frequency is disgarded
"""
counts = collections.Counter(shortlisted_words)
vocabulary = sorted(counts, key=counts.get, reverse=True)
rev_dictionary_ = {ii: word for ii, word in enumerate(vocabulary)}
dictionary_ = {word: ii for ii, word in rev_dictionary_.items()}
return dictionary_, rev_dictionary_
dictionary_, rev_dictionary_ = dict_creation(shortlisted_words)
words_cnt = [dictionary_[word] for word in shortlisted_words] | [
"os.path.exists",
"zipfile.ZipFile",
"collections.Counter",
"os.path.isdir",
"six.moves.urllib.request.urlretrieve"
] | [((1494, 1523), 'collections.Counter', 'collections.Counter', (['ft_token'], {}), '(ft_token)\n', (1513, 1523), False, 'import collections\n'), ((609, 640), 'os.path.isdir', 'os.path.isdir', (['extracted_folder'], {}), '(extracted_folder)\n', (622, 640), False, 'import os\n'), ((1887, 1925), 'collections.Counter', 'collections.Counter', (['shortlisted_words'], {}), '(shortlisted_words)\n', (1906, 1925), False, 'import collections\n'), ((390, 414), 'os.path.exists', 'os.path.exists', (['zip_file'], {}), '(zip_file)\n', (404, 414), False, 'import os\n'), ((438, 484), 'six.moves.urllib.request.urlretrieve', 'urlretrieve', (['(dataset_link + zip_file)', 'zip_file'], {}), '(dataset_link + zip_file, zip_file)\n', (449, 484), False, 'from six.moves.urllib.request import urlretrieve\n'), ((651, 676), 'zipfile.ZipFile', 'zipfile.ZipFile', (['zip_file'], {}), '(zip_file)\n', (666, 676), False, 'import zipfile\n')] |
# Generated by Django 2.0.3 on 2020-01-31 07:59
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('app1', '0003_auto_20200129_0337'),
]
operations = [
migrations.AlterField(
model_name='flight',
name='stripImage',
field=models.ImageField(upload_to=''),
),
]
| [
"django.db.models.ImageField"
] | [((337, 368), 'django.db.models.ImageField', 'models.ImageField', ([], {'upload_to': '""""""'}), "(upload_to='')\n", (354, 368), False, 'from django.db import migrations, models\n')] |
import os , time
from client.client import client
from client.text_color import color
def main():
client().system("clear")
client().system(f"figlet Tool Kit Searcher")
client().puts(f"Welcome to Tool Kit Search Tool\n\ngithub.com/Altify-Development")
client().puts(f"{color.GREEN}[✔] Starting ...{color.RESET_ALL}")
time.sleep(2.5)
client().system(f"chkrootkit")
returnED = client().read(f"{color.GREEN} Do you want to make a new search?(Y/N) {color.RESET_ALL}").lower()
if returnED == "y":
client().system("python3 tools/toolkit_search.py")
elif returnED == "n":
client().puts(f"""
View My Other Projects On Github:
{color.YELLOW}https://github.com/Altify-Development\n\nhttps://github.com/Altify-Developing{color.RESET_ALL}
goodbye 👍👍
""")
else:
client().puts("There is no such option")
returnED = client().read(f"{color.GREEN} Do you want to make a new search?(Y/N) {color.RESET_ALL}").lower
if returnED == "Y":
client().system("python3 tools/toolkit_search.py")
elif returnED == "N":
client().puts("We are waiting for you again")
else:
client().puts("There is no such option")
if __name__ == '__main__' and os.name == 'posix':
if os.getuid() == 0:
main()
else:
client().puts(f"""
{color.GREEN}
█░█ ▄▀█ █▀▀ █▄▀ █ █▄░█ █▀▀ ▀█▀ █▀█ █▀█ █░░ █▄▀ █ ▀█▀ █▀ Developed by Altify
█▀█ █▀█ █▄▄ █░█ █ █░▀█ █▄█ ░█░ █▄█ █▄█ █▄▄ █░█ █ ░█░ ▄█
{color.RESET_ALL}
{color.RED}[!] You can run this script only on debian system.{color.RESET_ALL}
View My Other Projects On Github:
{color.YELLOW}https://github.com/Altify-Development\n\nhttps://github.com/Altify-Developing{color.RESET_ALL}
goodbye 👍👍
""")
| [
"os.getuid",
"client.client.client",
"time.sleep"
] | [((354, 369), 'time.sleep', 'time.sleep', (['(2.5)'], {}), '(2.5)\n', (364, 369), False, 'import os, time\n'), ((1294, 1305), 'os.getuid', 'os.getuid', ([], {}), '()\n', (1303, 1305), False, 'import os, time\n'), ((112, 120), 'client.client.client', 'client', ([], {}), '()\n', (118, 120), False, 'from client.client import client\n'), ((146, 154), 'client.client.client', 'client', ([], {}), '()\n', (152, 154), False, 'from client.client import client\n'), ((196, 204), 'client.client.client', 'client', ([], {}), '()\n', (202, 204), False, 'from client.client import client\n'), ((285, 293), 'client.client.client', 'client', ([], {}), '()\n', (291, 293), False, 'from client.client import client\n'), ((374, 382), 'client.client.client', 'client', ([], {}), '()\n', (380, 382), False, 'from client.client import client\n'), ((1337, 1345), 'client.client.client', 'client', ([], {}), '()\n', (1343, 1345), False, 'from client.client import client\n'), ((551, 559), 'client.client.client', 'client', ([], {}), '()\n', (557, 559), False, 'from client.client import client\n'), ((1036, 1044), 'client.client.client', 'client', ([], {}), '()\n', (1042, 1044), False, 'from client.client import client\n'), ((421, 429), 'client.client.client', 'client', ([], {}), '()\n', (427, 429), False, 'from client.client import client\n'), ((638, 646), 'client.client.client', 'client', ([], {}), '()\n', (644, 646), False, 'from client.client import client\n'), ((846, 854), 'client.client.client', 'client', ([], {}), '()\n', (852, 854), False, 'from client.client import client\n'), ((1123, 1131), 'client.client.client', 'client', ([], {}), '()\n', (1129, 1131), False, 'from client.client import client\n'), ((1189, 1197), 'client.client.client', 'client', ([], {}), '()\n', (1195, 1197), False, 'from client.client import client\n'), ((908, 916), 'client.client.client', 'client', ([], {}), '()\n', (914, 916), False, 'from client.client import client\n')] |
import FWCore.ParameterSet.Config as cms
ctppsRPAlignmentCorrectionsDataESSourceXML = cms.ESSource("CTPPSRPAlignmentCorrectionsDataESSourceXML",
verbosity = cms.untracked.uint32(0),
MeasuredFiles = cms.vstring(),
RealFiles = cms.vstring(),
MisalignedFiles = cms.vstring()
)
| [
"FWCore.ParameterSet.Config.vstring",
"FWCore.ParameterSet.Config.untracked.uint32"
] | [((162, 185), 'FWCore.ParameterSet.Config.untracked.uint32', 'cms.untracked.uint32', (['(0)'], {}), '(0)\n', (182, 185), True, 'import FWCore.ParameterSet.Config as cms\n'), ((208, 221), 'FWCore.ParameterSet.Config.vstring', 'cms.vstring', ([], {}), '()\n', (219, 221), True, 'import FWCore.ParameterSet.Config as cms\n'), ((239, 252), 'FWCore.ParameterSet.Config.vstring', 'cms.vstring', ([], {}), '()\n', (250, 252), True, 'import FWCore.ParameterSet.Config as cms\n'), ((276, 289), 'FWCore.ParameterSet.Config.vstring', 'cms.vstring', ([], {}), '()\n', (287, 289), True, 'import FWCore.ParameterSet.Config as cms\n')] |
import torch
import torch.nn as nn
import torch.nn.functional as F
import numpy as np
from config import gamma, lr
def flat_grad(grads):
grad_flatten = []
for grad in grads:
grad_flatten.append(grad.view(-1))
grad_flatten = torch.cat(grad_flatten)
return grad_flatten
def flat_hessian(hessians):
hessians_flatten = []
for hessian in hessians:
hessians_flatten.append(hessian.contiguous().view(-1))
hessians_flatten = torch.cat(hessians_flatten).data
return hessians_flatten
def flat_params(model):
params = []
for param in model.parameters():
params.append(param.data.view(-1))
params_flatten = torch.cat(params)
return params_flatten
def update_model(model, new_params):
index = 0
for params in model.parameters():
params_length = len(params.view(-1))
new_param = new_params[index: index + params_length]
new_param = new_param.view(params.size())
params.data.copy_(new_param)
index += params_length
def kl_divergence(net, old_net, states):
policy = net(states)
old_policy = old_net(states).detach()
kl = old_policy * torch.log(old_policy / policy)
kl = kl.sum(1, keepdim=True)
return kl
def fisher_vector_product(net, states, p, cg_damp=0.1):
kl = kl_divergence(net, net, states)
kl = kl.mean()
kl_grad = torch.autograd.grad(kl, net.parameters(), create_graph=True) # create_graph is True if we need higher order derivative products
kl_grad = flat_grad(kl_grad)
kl_grad_p = (kl_grad * p.detach()).sum()
kl_hessian_p = torch.autograd.grad(kl_grad_p, net.parameters())
kl_hessian_p = flat_hessian(kl_hessian_p)
return kl_hessian_p + cg_damp * p.detach()
def conjugate_gradient(net, states, loss_grad, n_step=10, residual_tol=1e-10):
x = torch.zeros(loss_grad.size())
r = loss_grad.clone()
p = loss_grad.clone()
r_dot_r = torch.dot(r, r)
for i in range(n_step):
A_dot_p = fisher_vector_product(net, states, p)
alpha = r_dot_r / torch.dot(p, A_dot_p)
x += alpha * p
r -= alpha * A_dot_p
new_r_dot_r = torch.dot(r,r)
betta = new_r_dot_r / r_dot_r
p = r + betta * p
r_dot_r = new_r_dot_r
if r_dot_r < residual_tol:
break
return x
class TNPG(nn.Module):
def __init__(self, num_inputs, num_outputs):
super(TNPG, self).__init__()
self.t = 0
self.num_inputs = num_inputs
self.num_outputs = num_outputs
self.fc_1 = nn.Linear(num_inputs, 128)
self.fc_2 = nn.Linear(128, num_outputs)
for m in self.modules():
if isinstance(m, nn.Linear):
nn.init.xavier_uniform(m.weight)
def forward(self, input):
x = torch.tanh(self.fc_1(input))
policy = F.softmax(self.fc_2(x))
return policy
@classmethod
def train_model(cls, net, transitions):
states, actions, rewards, masks = transitions.state, transitions.action, transitions.reward, transitions.mask
states = torch.stack(states)
actions = torch.stack(actions)
rewards = torch.Tensor(rewards)
masks = torch.Tensor(masks)
returns = torch.zeros_like(rewards)
running_return = 0
for t in reversed(range(len(rewards))):
running_return = rewards[t] + gamma * running_return * masks[t]
returns[t] = running_return
policies = net(states)
policies = policies.view(-1, net.num_outputs)
policy_actions = (policies * actions.detach()).sum(dim=1)
loss = (policy_actions * returns).mean()
loss_grad = torch.autograd.grad(loss, net.parameters())
loss_grad = flat_grad(loss_grad)
step_dir = conjugate_gradient(net, states, loss_grad.data)
params = flat_params(net)
new_params = params + lr * step_dir
update_model(net, new_params)
return -loss
def get_action(self, input):
policy = self.forward(input)
policy = policy[0].data.numpy()
action = np.random.choice(self.num_outputs, 1, p=policy)[0]
return action
| [
"torch.log",
"numpy.random.choice",
"torch.stack",
"torch.Tensor",
"torch.nn.Linear",
"torch.nn.init.xavier_uniform",
"torch.zeros_like",
"torch.cat",
"torch.dot"
] | [((246, 269), 'torch.cat', 'torch.cat', (['grad_flatten'], {}), '(grad_flatten)\n', (255, 269), False, 'import torch\n'), ((667, 684), 'torch.cat', 'torch.cat', (['params'], {}), '(params)\n', (676, 684), False, 'import torch\n'), ((1921, 1936), 'torch.dot', 'torch.dot', (['r', 'r'], {}), '(r, r)\n', (1930, 1936), False, 'import torch\n'), ((464, 491), 'torch.cat', 'torch.cat', (['hessians_flatten'], {}), '(hessians_flatten)\n', (473, 491), False, 'import torch\n'), ((1157, 1187), 'torch.log', 'torch.log', (['(old_policy / policy)'], {}), '(old_policy / policy)\n', (1166, 1187), False, 'import torch\n'), ((2144, 2159), 'torch.dot', 'torch.dot', (['r', 'r'], {}), '(r, r)\n', (2153, 2159), False, 'import torch\n'), ((2545, 2571), 'torch.nn.Linear', 'nn.Linear', (['num_inputs', '(128)'], {}), '(num_inputs, 128)\n', (2554, 2571), True, 'import torch.nn as nn\n'), ((2592, 2619), 'torch.nn.Linear', 'nn.Linear', (['(128)', 'num_outputs'], {}), '(128, num_outputs)\n', (2601, 2619), True, 'import torch.nn as nn\n'), ((3078, 3097), 'torch.stack', 'torch.stack', (['states'], {}), '(states)\n', (3089, 3097), False, 'import torch\n'), ((3116, 3136), 'torch.stack', 'torch.stack', (['actions'], {}), '(actions)\n', (3127, 3136), False, 'import torch\n'), ((3155, 3176), 'torch.Tensor', 'torch.Tensor', (['rewards'], {}), '(rewards)\n', (3167, 3176), False, 'import torch\n'), ((3193, 3212), 'torch.Tensor', 'torch.Tensor', (['masks'], {}), '(masks)\n', (3205, 3212), False, 'import torch\n'), ((3232, 3257), 'torch.zeros_like', 'torch.zeros_like', (['rewards'], {}), '(rewards)\n', (3248, 3257), False, 'import torch\n'), ((2048, 2069), 'torch.dot', 'torch.dot', (['p', 'A_dot_p'], {}), '(p, A_dot_p)\n', (2057, 2069), False, 'import torch\n'), ((4095, 4142), 'numpy.random.choice', 'np.random.choice', (['self.num_outputs', '(1)'], {'p': 'policy'}), '(self.num_outputs, 1, p=policy)\n', (4111, 4142), True, 'import numpy as np\n'), ((2711, 2743), 'torch.nn.init.xavier_uniform', 'nn.init.xavier_uniform', (['m.weight'], {}), '(m.weight)\n', (2733, 2743), True, 'import torch.nn as nn\n')] |
import json
import os
import sys
import jsonpatch
import unittest
import pytest
from deepdiff import DeepDiff
from mock import patch
from dump.helper import create_template_dict, sort_lists
from dump.plugins.port import Port
from dump.match_infra import MatchEngine, ConnectionPool
from swsscommon.swsscommon import SonicV2Connector
# Location for dedicated db's used for UT
module_tests_path = os.path.dirname(__file__)
dump_tests_path = os.path.join(module_tests_path, "../")
tests_path = os.path.join(dump_tests_path, "../")
dump_test_input = os.path.join(tests_path, "dump_input")
port_files_path = os.path.join(dump_test_input, "port")
# Define the mock files to read from
dedicated_dbs = {}
dedicated_dbs['CONFIG_DB'] = os.path.join(port_files_path, "config_db.json")
dedicated_dbs['APPL_DB'] = os.path.join(port_files_path, "appl_db.json")
dedicated_dbs['ASIC_DB'] = os.path.join(port_files_path, "asic_db.json")
dedicated_dbs['STATE_DB'] = os.path.join(port_files_path, "state_db.json")
def populate_mock(db, db_names):
for db_name in db_names:
db.connect(db_name)
# Delete any default data
db.delete_all_by_pattern(db_name, "*")
with open(dedicated_dbs[db_name]) as f:
mock_json = json.load(f)
for key in mock_json:
for field, value in mock_json[key].items():
db.set(db_name, key, field, value)
@pytest.fixture(scope="class", autouse=True)
def match_engine():
print("SETUP")
os.environ["VERBOSE"] = "1"
# Monkey Patch the SonicV2Connector Object
from ...mock_tables import dbconnector
db = SonicV2Connector()
# popualate the db with mock data
db_names = list(dedicated_dbs.keys())
try:
populate_mock(db, db_names)
except Exception as e:
assert False, "Mock initialization failed: " + str(e)
# Initialize connection pool
conn_pool = ConnectionPool()
DEF_NS = '' # Default Namespace
conn_pool.cache = {DEF_NS: {'conn': db,
'connected_to': set(db_names)}}
# Initialize match_engine
match_engine = MatchEngine(conn_pool)
yield match_engine
print("TEARDOWN")
os.environ["VERBOSE"] = "0"
@pytest.mark.usefixtures("match_engine")
class TestPortModule:
def test_working_state(self, match_engine):
"""
Scenario: When the config is properly applied and propagated
"""
params = {Port.ARG_NAME: "Ethernet176", "namespace": ""}
m_port = Port(match_engine)
returned = m_port.execute(params)
expect = create_template_dict(dbs=["CONFIG_DB", "APPL_DB", "ASIC_DB", "STATE_DB"])
expect["CONFIG_DB"]["keys"].append("PORT|Ethernet176")
expect["APPL_DB"]["keys"].append("PORT_TABLE:Ethernet176")
expect["STATE_DB"]["keys"].append("PORT_TABLE|Ethernet176")
expect["ASIC_DB"]["keys"].append("ASIC_STATE:SAI_OBJECT_TYPE_PORT:oid:0x100000000036a")
expect["ASIC_DB"]["keys"].append("ASIC_STATE:SAI_OBJECT_TYPE_HOSTIF:oid:0xd000000000a4d")
ddiff = DeepDiff(sort_lists(returned), sort_lists(expect), ignore_order=True)
assert not ddiff, ddiff
def test_missing_asic_port(self, match_engine):
"""
Scenario: When the config was applied and just the SAI_OBJECT_TYPE_PORT is missing
"""
params = {Port.ARG_NAME: "Ethernet160", "namespace": ""}
m_port = Port(match_engine)
returned = m_port.execute(params)
expect = create_template_dict(dbs=["CONFIG_DB", "APPL_DB", "ASIC_DB", "STATE_DB"])
expect["CONFIG_DB"]["keys"].append("PORT|Ethernet160")
expect["APPL_DB"]["keys"].append("PORT_TABLE:Ethernet160")
expect["STATE_DB"]["keys"].append("PORT_TABLE|Ethernet160")
expect["ASIC_DB"]["keys"].append("ASIC_STATE:SAI_OBJECT_TYPE_HOSTIF:oid:0xd000000000a49")
expect["ASIC_DB"]["tables_not_found"].append("ASIC_STATE:SAI_OBJECT_TYPE_PORT")
ddiff = DeepDiff(sort_lists(returned), sort_lists(expect), ignore_order=True)
assert not ddiff, ddiff
def test_missing_asic_hostif(self, match_engine):
"""
Scenario: When the config was applied and it did not propagate to ASIC DB
"""
params = {Port.ARG_NAME: "Ethernet164", "namespace": ""}
m_port = Port(match_engine)
returned = m_port.execute(params)
expect = create_template_dict(dbs=["CONFIG_DB", "APPL_DB", "ASIC_DB", "STATE_DB"])
expect["CONFIG_DB"]["keys"].append("PORT|Ethernet164")
expect["APPL_DB"]["keys"].append("PORT_TABLE:Ethernet164")
expect["STATE_DB"]["keys"].append("PORT_TABLE|Ethernet164")
expect["ASIC_DB"]["tables_not_found"].append("ASIC_STATE:SAI_OBJECT_TYPE_PORT")
expect["ASIC_DB"]["tables_not_found"].append("ASIC_STATE:SAI_OBJECT_TYPE_HOSTIF")
ddiff = DeepDiff(returned, expect, ignore_order=True)
assert not ddiff, ddiff
def test_missing_state_and_appl(self, match_engine):
"""
Scenario: When the config was applied and it did not propagate to other db's
"""
params = {Port.ARG_NAME: "Ethernet156", "namespace": ""}
m_port = Port(match_engine)
returned = m_port.execute(params)
expect = create_template_dict(dbs=["CONFIG_DB", "APPL_DB", "ASIC_DB", "STATE_DB"])
expect["CONFIG_DB"]["keys"].append("PORT|Ethernet156")
expect["APPL_DB"]["tables_not_found"].append("PORT_TABLE")
expect["STATE_DB"]["tables_not_found"].append("PORT_TABLE")
expect["ASIC_DB"]["tables_not_found"].append("ASIC_STATE:SAI_OBJECT_TYPE_PORT")
expect["ASIC_DB"]["tables_not_found"].append("ASIC_STATE:SAI_OBJECT_TYPE_HOSTIF")
ddiff = DeepDiff(returned, expect, ignore_order=True)
assert not ddiff, ddiff
def test_no_port(self, match_engine):
"""
Scenario: When no entry for the port is present in any of the db's
"""
params = {Port.ARG_NAME: "Ethernet152", "namespace": ""}
m_port = Port(match_engine)
returned = m_port.execute(params)
expect = create_template_dict(dbs=["CONFIG_DB", "APPL_DB", "ASIC_DB", "STATE_DB"])
expect["CONFIG_DB"]["tables_not_found"].append("PORT")
expect["APPL_DB"]["tables_not_found"].append("PORT_TABLE")
expect["STATE_DB"]["tables_not_found"].append("PORT_TABLE")
expect["ASIC_DB"]["tables_not_found"].append("ASIC_STATE:SAI_OBJECT_TYPE_PORT")
expect["ASIC_DB"]["tables_not_found"].append("ASIC_STATE:SAI_OBJECT_TYPE_HOSTIF")
ddiff = DeepDiff(returned, expect, ignore_order=True)
assert not ddiff, ddiff
def test_all_args(self, match_engine):
"""
Scenario: Verify Whether the get_all_args method is working as expected
"""
params = {}
m_port = Port(match_engine)
returned = m_port.get_all_args("")
expect = ["Ethernet156", "Ethernet160", "Ethernet164", "Ethernet176"]
ddiff = DeepDiff(expect, returned, ignore_order=True)
assert not ddiff, ddiff
| [
"deepdiff.DeepDiff",
"dump.plugins.port.Port",
"dump.match_infra.ConnectionPool",
"os.path.join",
"dump.helper.create_template_dict",
"json.load",
"os.path.dirname",
"pytest.mark.usefixtures",
"dump.match_infra.MatchEngine",
"swsscommon.swsscommon.SonicV2Connector",
"pytest.fixture",
"dump.hel... | [((396, 421), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (411, 421), False, 'import os\n'), ((440, 478), 'os.path.join', 'os.path.join', (['module_tests_path', '"""../"""'], {}), "(module_tests_path, '../')\n", (452, 478), False, 'import os\n'), ((492, 528), 'os.path.join', 'os.path.join', (['dump_tests_path', '"""../"""'], {}), "(dump_tests_path, '../')\n", (504, 528), False, 'import os\n'), ((547, 585), 'os.path.join', 'os.path.join', (['tests_path', '"""dump_input"""'], {}), "(tests_path, 'dump_input')\n", (559, 585), False, 'import os\n'), ((604, 641), 'os.path.join', 'os.path.join', (['dump_test_input', '"""port"""'], {}), "(dump_test_input, 'port')\n", (616, 641), False, 'import os\n'), ((728, 775), 'os.path.join', 'os.path.join', (['port_files_path', '"""config_db.json"""'], {}), "(port_files_path, 'config_db.json')\n", (740, 775), False, 'import os\n'), ((803, 848), 'os.path.join', 'os.path.join', (['port_files_path', '"""appl_db.json"""'], {}), "(port_files_path, 'appl_db.json')\n", (815, 848), False, 'import os\n'), ((876, 921), 'os.path.join', 'os.path.join', (['port_files_path', '"""asic_db.json"""'], {}), "(port_files_path, 'asic_db.json')\n", (888, 921), False, 'import os\n'), ((950, 996), 'os.path.join', 'os.path.join', (['port_files_path', '"""state_db.json"""'], {}), "(port_files_path, 'state_db.json')\n", (962, 996), False, 'import os\n'), ((1395, 1438), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""class"""', 'autouse': '(True)'}), "(scope='class', autouse=True)\n", (1409, 1438), False, 'import pytest\n'), ((2209, 2248), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""match_engine"""'], {}), "('match_engine')\n", (2232, 2248), False, 'import pytest\n'), ((1611, 1629), 'swsscommon.swsscommon.SonicV2Connector', 'SonicV2Connector', ([], {}), '()\n', (1627, 1629), False, 'from swsscommon.swsscommon import SonicV2Connector\n'), ((1895, 1911), 'dump.match_infra.ConnectionPool', 'ConnectionPool', ([], {}), '()\n', (1909, 1911), False, 'from dump.match_infra import MatchEngine, ConnectionPool\n'), ((2106, 2128), 'dump.match_infra.MatchEngine', 'MatchEngine', (['conn_pool'], {}), '(conn_pool)\n', (2117, 2128), False, 'from dump.match_infra import MatchEngine, ConnectionPool\n'), ((2494, 2512), 'dump.plugins.port.Port', 'Port', (['match_engine'], {}), '(match_engine)\n', (2498, 2512), False, 'from dump.plugins.port import Port\n'), ((2572, 2645), 'dump.helper.create_template_dict', 'create_template_dict', ([], {'dbs': "['CONFIG_DB', 'APPL_DB', 'ASIC_DB', 'STATE_DB']"}), "(dbs=['CONFIG_DB', 'APPL_DB', 'ASIC_DB', 'STATE_DB'])\n", (2592, 2645), False, 'from dump.helper import create_template_dict, sort_lists\n'), ((3406, 3424), 'dump.plugins.port.Port', 'Port', (['match_engine'], {}), '(match_engine)\n', (3410, 3424), False, 'from dump.plugins.port import Port\n'), ((3484, 3557), 'dump.helper.create_template_dict', 'create_template_dict', ([], {'dbs': "['CONFIG_DB', 'APPL_DB', 'ASIC_DB', 'STATE_DB']"}), "(dbs=['CONFIG_DB', 'APPL_DB', 'ASIC_DB', 'STATE_DB'])\n", (3504, 3557), False, 'from dump.helper import create_template_dict, sort_lists\n'), ((4303, 4321), 'dump.plugins.port.Port', 'Port', (['match_engine'], {}), '(match_engine)\n', (4307, 4321), False, 'from dump.plugins.port import Port\n'), ((4381, 4454), 'dump.helper.create_template_dict', 'create_template_dict', ([], {'dbs': "['CONFIG_DB', 'APPL_DB', 'ASIC_DB', 'STATE_DB']"}), "(dbs=['CONFIG_DB', 'APPL_DB', 'ASIC_DB', 'STATE_DB'])\n", (4401, 4454), False, 'from dump.helper import create_template_dict, sort_lists\n'), ((4847, 4892), 'deepdiff.DeepDiff', 'DeepDiff', (['returned', 'expect'], {'ignore_order': '(True)'}), '(returned, expect, ignore_order=True)\n', (4855, 4892), False, 'from deepdiff import DeepDiff\n'), ((5174, 5192), 'dump.plugins.port.Port', 'Port', (['match_engine'], {}), '(match_engine)\n', (5178, 5192), False, 'from dump.plugins.port import Port\n'), ((5252, 5325), 'dump.helper.create_template_dict', 'create_template_dict', ([], {'dbs': "['CONFIG_DB', 'APPL_DB', 'ASIC_DB', 'STATE_DB']"}), "(dbs=['CONFIG_DB', 'APPL_DB', 'ASIC_DB', 'STATE_DB'])\n", (5272, 5325), False, 'from dump.helper import create_template_dict, sort_lists\n'), ((5718, 5763), 'deepdiff.DeepDiff', 'DeepDiff', (['returned', 'expect'], {'ignore_order': '(True)'}), '(returned, expect, ignore_order=True)\n', (5726, 5763), False, 'from deepdiff import DeepDiff\n'), ((6020, 6038), 'dump.plugins.port.Port', 'Port', (['match_engine'], {}), '(match_engine)\n', (6024, 6038), False, 'from dump.plugins.port import Port\n'), ((6098, 6171), 'dump.helper.create_template_dict', 'create_template_dict', ([], {'dbs': "['CONFIG_DB', 'APPL_DB', 'ASIC_DB', 'STATE_DB']"}), "(dbs=['CONFIG_DB', 'APPL_DB', 'ASIC_DB', 'STATE_DB'])\n", (6118, 6171), False, 'from dump.helper import create_template_dict, sort_lists\n'), ((6564, 6609), 'deepdiff.DeepDiff', 'DeepDiff', (['returned', 'expect'], {'ignore_order': '(True)'}), '(returned, expect, ignore_order=True)\n', (6572, 6609), False, 'from deepdiff import DeepDiff\n'), ((6827, 6845), 'dump.plugins.port.Port', 'Port', (['match_engine'], {}), '(match_engine)\n', (6831, 6845), False, 'from dump.plugins.port import Port\n'), ((6983, 7028), 'deepdiff.DeepDiff', 'DeepDiff', (['expect', 'returned'], {'ignore_order': '(True)'}), '(expect, returned, ignore_order=True)\n', (6991, 7028), False, 'from deepdiff import DeepDiff\n'), ((1242, 1254), 'json.load', 'json.load', (['f'], {}), '(f)\n', (1251, 1254), False, 'import json\n'), ((3063, 3083), 'dump.helper.sort_lists', 'sort_lists', (['returned'], {}), '(returned)\n', (3073, 3083), False, 'from dump.helper import create_template_dict, sort_lists\n'), ((3085, 3103), 'dump.helper.sort_lists', 'sort_lists', (['expect'], {}), '(expect)\n', (3095, 3103), False, 'from dump.helper import create_template_dict, sort_lists\n'), ((3967, 3987), 'dump.helper.sort_lists', 'sort_lists', (['returned'], {}), '(returned)\n', (3977, 3987), False, 'from dump.helper import create_template_dict, sort_lists\n'), ((3989, 4007), 'dump.helper.sort_lists', 'sort_lists', (['expect'], {}), '(expect)\n', (3999, 4007), False, 'from dump.helper import create_template_dict, sort_lists\n')] |
from numpy.random import randint
def find_sum(a_list, target):
r = []
sz = len(a_list)
for i in range(sz):
for j in range(i+1, sz):
if (a_list[i] + a_list[j]) == target:
r.append((a_list[i], a_list[j]))
return r
def fast_sum(a_list, target):
r = []
h = {}
for x in a_list:
y = target - x
if y in h:
r.append(y, x)
h[x] = x
return r
if __name__ == '__main__':
values = randint(0, 100, 20)
target = randint(100)
print(f"values {values} target {target}")
print(find_sum(values, target))
| [
"numpy.random.randint"
] | [((482, 501), 'numpy.random.randint', 'randint', (['(0)', '(100)', '(20)'], {}), '(0, 100, 20)\n', (489, 501), False, 'from numpy.random import randint\n'), ((515, 527), 'numpy.random.randint', 'randint', (['(100)'], {}), '(100)\n', (522, 527), False, 'from numpy.random import randint\n')] |
import asyncio
import random
import re
from abc import ABC
from typing import List, Iterable
from src.entity.proxy_entity import ProxyEntity
from src.enum.common import ProxyCoverEnum, ProxyTypeEnum
from src.log.logger import logger
from src.spider.abs_spider import AbsSpider
from bs4 import BeautifulSoup, Tag
from pyppeteer import launch
spider_collection = {}
def spider_register(cls):
spider_collection.update({cls.__name__: cls()})
logger.info(f'注册{cls.__name__}')
return cls
@spider_register
class SpiderSpysOneIP(AbsSpider, ABC):
"""
spys.one
http://spys.one/proxys
"""
def __init__(self) -> None:
super().__init__('spys.one IP代理爬虫')
async def _scrape(self):
addr_re = r'\d{2,3}\.\d{2,3}\.\d{2,3}\.\d{2,3}'
addr_port_re = addr_re + r':\d{2,5}'
countries = ['US', 'UK', 'DE', 'JP']
browser = await launch(
headless=False,
handleSIGINT=False,
handleSIGTERM=False,
handleSIGHUP=False
)
page = await browser.newPage()
if countries is None:
await page.goto(self.get_urls()[0], {
"waitLoad": True,
"waitNetworkIdle": True
})
await asyncio.sleep(random.uniform(2, 3))
countries = [
await page.evaluate('(ele) => ele.innerText', ele) for ele in await
page.xpath('//a[@href]//*[@class="spy6"]//*[@class="spy4"]')
]
res = []
for country in countries:
result = []
logger.info(f"Extracting proxies from Spys {country}.")
page_url = self.get_urls()[0]
await page.goto(f'{page_url}{country}', {
"waitLoad": True,
"waitNetworkIdle": True
})
await asyncio.sleep(self.get_interval())
table_rows = [
await page.evaluate('(ele) => ele.innerText', ele)
for ele in await page.xpath('//*[contains(@class,"spy1x")]')
]
for row_data in table_rows:
if "HTTP" in row_data or "SOCK" in row_data:
proxy_matches = row_data.split("\t")
protocol = proxy_matches[1]
if "SOCKS5" in protocol:
continue
if "HTTPS" in protocol:
protocol = 'https'
elif "HTTP" in protocol:
protocol = 'http'
else:
continue
ip_port = proxy_matches[0]
proxy_cover = proxy_matches[2]
region = proxy_matches[3]
result.append(ProxyEntity(f'{protocol}://{ip_port}',
source=self._name,
proxy_type=self._judge_proxy_type(protocol),
proxy_cover=self._judge_proxy_cover(proxy_cover),
region=region))
res.extend(result)
await asyncio.sleep(self.get_interval())
print(f"Extracted {len(result)} total proxies from Spys One.")
await browser.close()
return res
async def crawl(self):
logger.info(f'{self._name}开始爬取...')
logger.info(f'_scrape_free_proxy_list_net 开始运行...')
result = await self._scrape()
return result
def get_urls(self) -> List[str]:
return ['http://spys.one/free-proxy-list/']
# 爬太快会被封
def get_interval(self) -> int:
return 2
def get_page_url(self, url, page) -> str:
return url
def get_encoding(self) -> str:
return 'utf-8'
@staticmethod
def _judge_proxy_type(type_str: str):
type_low = type_str.lower()
if type_low == 'http':
return ProxyTypeEnum.HTTP.value
elif type_low == 'https':
return ProxyTypeEnum.HTTPS.value
else:
return ProxyTypeEnum.UNKNOWN.value
@staticmethod
def _judge_proxy_cover(cover_str: str):
if cover_str == 'HIA':
return ProxyCoverEnum.HIGH_COVER.value
elif cover_str == 'ANM':
return ProxyCoverEnum.NORMAL_COVER.value
else:
return ProxyCoverEnum.UNKNOWN.value
@spider_register
class SpiderFreeProxyListIP(AbsSpider, ABC):
"""
free-proxy-list
https://free-proxy-list.net
"""
def __init__(self) -> None:
super().__init__('free-proxy-list IP代理爬虫')
async def _scrape(self):
browser = await launch(
headless=False,
handleSIGINT=False,
handleSIGTERM=False,
handleSIGHUP=False
)
page = await browser.newPage()
await page.goto(self.get_urls()[0], {
"waitLoad": True,
"waitNetworkIdle": True
})
res = []
while True:
result = []
await asyncio.sleep(self.get_interval())
col_names = [
await page.evaluate('(ele) => ele.innerText.toLowerCase()', ele)
for ele in await page.xpath('//*[@id="proxylisttable"]/thead/*[@role="row"]//*[@aria-label]')
]
for row in await page.xpath('//*[@id="proxylisttable"]/tbody/*[@role="row"]'):
col_values = [
await page.evaluate('(ele) => ele.innerText', ele)
for ele in await row.xpath('./td')
]
row_data = dict(zip(col_names, col_values))
protocol = 'http'
if row_data['https'] == 'yes':
protocol = 'https'
ip = row_data['ip address\t'].replace("\t", '')
port = row_data['port\t'].replace("\t", '')
proxy_cover = row_data['anonymity\t'].replace("\t", '')
region = row_data['code\t'].replace("\t", '')
result.append(ProxyEntity(f'{protocol}://{ip}:{port}',
source=self._name,
proxy_type=self._judge_proxy_type(protocol),
proxy_cover=self._judge_proxy_cover(proxy_cover),
region=region))
res.extend(result)
next_button_ele = await page.xpath('//*[@class="fg-button ui-button ui-state-default next"]')
if next_button_ele:
await next_button_ele[0].click()
else:
await browser.close()
logger.error(f"Extracted {len(result)} total proxies from free-proxy-list.net")
return res
async def crawl(self):
logger.info(f'{self._name}开始爬取...')
logger.info(f'_scrape_free_proxy_list_net 开始运行...')
result = await self._scrape()
return result
def get_urls(self) -> List[str]:
return ['https://free-proxy-list.net']
# 爬太快会被封
def get_interval(self) -> int:
return 2
def get_page_url(self, url, page) -> str:
return url
def get_encoding(self) -> str:
return 'utf-8'
@staticmethod
def _judge_proxy_type(type_str: str):
type_low = type_str.lower()
if type_low == 'http':
return ProxyTypeEnum.HTTP.value
elif type_low == 'https':
return ProxyTypeEnum.HTTPS.value
else:
return ProxyTypeEnum.UNKNOWN.value
@staticmethod
def _judge_proxy_cover(cover_str: str):
if cover_str == 'elite proxy':
return ProxyCoverEnum.HIGH_COVER.value
elif cover_str == 'anonymous':
return ProxyCoverEnum.NORMAL_COVER.value
else:
return ProxyCoverEnum.UNKNOWN.value
@spider_register
class Spider66Ip(AbsSpider):
"""
66IP代理爬虫 刷新速度:🐌慢
http://www.66ip.cn/
"""
def __init__(self) -> None:
super().__init__('66IP代理爬虫')
def do_crawl(self, resp) -> List[ProxyEntity]:
result = []
soup = BeautifulSoup(resp, 'lxml')
tr_list = soup.find('table', attrs={'width': '100%', 'bordercolor': '#6699ff'}).find_all('tr')
for i, tr in enumerate(tr_list):
if i == 0:
continue
contents = tr.contents
ip = contents[0].text
port = contents[1].text
region = contents[2].text
proxy_cover = contents[3].text
result.append(ProxyEntity(f'http://{ip}:{port}',
source=self._name,
proxy_cover=self._judge_proxy_cover(proxy_cover),
region=region))
return result
def get_urls(self) -> List[str]:
return ['http://www.66ip.cn']
def get_page_range(self) -> Iterable:
return range(1, 6)
def get_page_url(self, url, page) -> str:
return f'{url}/{page}.html'
def get_encoding(self) -> str:
return 'gb2312'
@staticmethod
def _judge_proxy_cover(cover_str: str):
if cover_str == '高匿代理':
return ProxyCoverEnum.HIGH_COVER.value
else:
return ProxyCoverEnum.UNKNOWN.value
@spider_register
class SpiderQuanWangIp(AbsSpider):
"""
全网IP代理爬虫 刷新速度:极快
http://www.goubanjia.com/
"""
def __init__(self) -> None:
super().__init__('全网IP代理爬虫')
def do_crawl(self, resp) -> List[ProxyEntity]:
result = []
soup = BeautifulSoup(resp, 'lxml')
tr_list = soup.find('tbody').find_all('tr')
for i, tr in enumerate(tr_list):
tds = tr.find_all('td')
id_and_port = tds[0]
ip, port = self._parse_ip_and_port(id_and_port)
proxy_cover = tds[1].text
proxy_type = tds[2].text
region = tds[3].contents[1].text
supplier = tds[4].text
result.append(ProxyEntity(f'{proxy_type.lower()}://{ip}:{port}',
source=self._name,
supplier=supplier,
proxy_type=self._judge_proxy_type(proxy_type),
proxy_cover=self._judge_proxy_cover(proxy_cover),
region=region
)
)
return result
def get_urls(self) -> List[str]:
return ['http://www.goubanjia.com']
def get_page_url(self, url, page) -> str:
return url
def _parse_ip_and_port(self, ip_td: Tag):
res = []
contents = ip_td.find_all(['div', 'span'])
for content in contents:
res.append(content.text)
res.pop()
ip = ''.join(res)
port_tag = contents[-1]
port_ori_str = port_tag.get('class')[1]
# 解码真实的端口
port = 0
for c in port_ori_str:
port *= 10
port += (ord(c) - ord('A'))
port /= 8
port = int(port)
return ip, str(port)
def _judge_proxy_type(self, type_str: str):
type_low = type_str.lower()
if type_low == 'http':
return ProxyTypeEnum.HTTP.value
elif type_low == 'https':
return ProxyTypeEnum.HTTPS.value
else:
return ProxyTypeEnum.UNKNOWN.value
def _judge_proxy_cover(self, cover_str: str):
if cover_str == '透明':
return ProxyCoverEnum.TRANSPARENT.value
elif cover_str == '高匿':
return ProxyCoverEnum.HIGH_COVER.value
else:
return ProxyCoverEnum.UNKNOWN.value
@spider_register
class SpiderXiciIp(AbsSpider):
"""
西刺代理爬虫 刷新速度:🐌慢
基本上没几个代理个能用🆒
https://www.xicidaili.com/
"""
def __init__(self) -> None:
super().__init__('西刺IP代理爬虫')
def do_crawl(self, resp) -> List[ProxyEntity]:
result = []
soup = BeautifulSoup(resp, 'lxml')
tab = soup.find('table', attrs={'id': 'ip_list'})
if tab is None:
return []
tr_list = tab.find_all('tr')[1: -1]
for tr in tr_list:
tds = tr.find_all('td')
ip = tds[1].text
port = tds[2].text
proxy_cover = tds[4].text
proxy_type = tds[5].text
result.append(ProxyEntity(f'{proxy_type.lower()}://{ip}:{port}',
source=self._name,
proxy_cover=self._judge_proxy_cover(proxy_cover),
proxy_type=self._judge_proxy_type(proxy_type),
))
return result
def get_urls(self) -> List[str]:
return [
'https://www.xicidaili.com/nn', # 高匿
'https://www.xicidaili.com/nt' # 透明
]
def get_page_range(self) -> Iterable:
return range(1, 3)
@staticmethod
def _judge_proxy_cover(cover_str: str):
if cover_str == '高匿':
return ProxyCoverEnum.HIGH_COVER.value
if cover_str == '透明':
return ProxyCoverEnum.TRANSPARENT.value
else:
return ProxyCoverEnum.UNKNOWN.value
@staticmethod
def _judge_proxy_type(type_str: str):
if type_str == 'HTTPS':
return ProxyTypeEnum.HTTPS.value
if type_str == 'HTTP':
return ProxyTypeEnum.HTTP.value
else:
return ProxyTypeEnum.UNKNOWN.value
@spider_register
class SpiderKuaiDaiLiIp(AbsSpider):
"""
快代理IP 刷新速度: 极快
https://www.kuaidaili.com/free
"""
def __init__(self) -> None:
super().__init__('快代理IP代理爬虫')
def do_crawl(self, resp) -> List[ProxyEntity]:
result = []
soup = BeautifulSoup(resp, 'lxml')
trs = soup.find('table').find('tbody').find_all('tr')
for tr in trs:
tds = tr.find_all('td')
ip = tds[0].text
port = tds[1].text
proxy_cover = tds[2].text
proxy_type = tds[3].text
region = tds[4].text
result.append(ProxyEntity(f'{proxy_type.lower()}://{ip}:{port}',
# ip, port, protocol=proxy_type.lower(),
source=self._name,
proxy_type=self._judge_proxy_type(proxy_type),
proxy_cover=self._judge_proxy_cover(proxy_cover),
region=region))
return result
def get_urls(self) -> List[str]:
return [
'https://www.kuaidaili.com/free/inha', # 高匿
'https://www.kuaidaili.com/free/intr' # 透明
]
def get_page_range(self) -> Iterable:
return range(1, 3)
# 爬太快会被封
def get_interval(self) -> int:
return 3
def _judge_proxy_type(self, type_str: str):
type_low = type_str.lower()
if type_low == 'http':
return ProxyTypeEnum.HTTP.value
elif type_low == 'https':
return ProxyTypeEnum.HTTPS.value
else:
return ProxyTypeEnum.UNKNOWN.value
def _judge_proxy_cover(self, cover_str: str):
if cover_str == '透明':
return ProxyCoverEnum.TRANSPARENT.value
elif cover_str == '高匿名':
return ProxyCoverEnum.HIGH_COVER.value
else:
return ProxyCoverEnum.UNKNOWN.value
@spider_register
class SpiderYunDaiLiIp(AbsSpider):
"""
云代理IP 刷新速度: 快
http://www.ip3366.net/free
"""
def __init__(self) -> None:
super().__init__('云代理IP爬虫')
def do_crawl(self, resp) -> List[ProxyEntity]:
result = []
soup = BeautifulSoup(resp, 'lxml')
trs = soup.find('table').find('tbody').find_all('tr')
for tr in trs:
tds = tr.find_all('td')
ip = tds[0].text
port = tds[1].text
proxy_cover = tds[2].text
proxy_type = tds[3].text
region = tds[4].text
result.append(ProxyEntity(f'{proxy_type.lower()}://{ip}:{port}',
source=self._name,
proxy_type=self._judge_proxy_type(proxy_type),
proxy_cover=self._judge_proxy_cover(proxy_cover),
region=region))
return result
def get_urls(self) -> List[str]:
return [
'http://www.ip3366.net/free/?stype=1', # 高匿
'http://www.ip3366.net/free/?stype=2' # 透明 or 普匿
]
def get_page_range(self) -> Iterable:
return range(1, 3)
def get_page_url(self, url, page) -> str:
return f'{url}&page={page}'
def _judge_proxy_type(self, type_str: str):
type_low = type_str.lower()
if type_low == 'http':
return ProxyTypeEnum.HTTP.value
elif type_low == 'https':
return ProxyTypeEnum.HTTPS.value
else:
return ProxyTypeEnum.UNKNOWN.value
def _judge_proxy_cover(self, cover_str: str):
if cover_str == '透明代理IP':
return ProxyCoverEnum.TRANSPARENT.value
elif cover_str == '高匿代理IP':
return ProxyCoverEnum.HIGH_COVER.value
elif cover_str == '普通代理IP':
return ProxyCoverEnum.NORMAL_COVER.value
else:
return ProxyCoverEnum.UNKNOWN.value
@spider_register
class SpiderIpHaiIp(AbsSpider):
"""
IP海代理IP 刷新速度: 8分钟/1个
有时会连不上
http://www.iphai.com
"""
def __init__(self) -> None:
super().__init__('IP海代理IP爬虫')
def do_crawl(self, resp) -> List[ProxyEntity]:
result = []
soup = BeautifulSoup(resp, 'lxml')
table = soup.find('table')
if table is None:
return []
tbody = soup.find('tbody')
if tbody is None:
return []
trs = tbody.find_all('tr')
for i, tr in enumerate(trs):
if i == 0:
continue
tds = tr.find_all('td')
ip = tds[0].text
port = tds[1].text
proxy_cover = tds[2].text
proxy_type = tds[3].text if tds[3].text != '' else 'http'
region = tds[4].text
result.append(ProxyEntity(f'{proxy_type.lower()}://{ip}:{port}',
source=self._name,
proxy_type=self._judge_proxy_type(proxy_type),
proxy_cover=self._judge_proxy_cover(proxy_cover),
region=region))
return result
def get_urls(self) -> List[str]:
return [
'http://www.iphai.com/free/ng', # 国内高匿
'http://www.iphai.com/free/np', # 国内普通
'http://www.iphai.com/free/wg', # 国外高匿
'http://www.iphai.com/free/wp', # 国外普通
]
# 爬太快会被封
def get_interval(self) -> int:
return 2
def get_page_url(self, url, page) -> str:
return url
@staticmethod
def _judge_proxy_type(type_str: str):
type_low = type_str.lower()
if type_low == 'http':
return ProxyTypeEnum.HTTP.value
elif type_low == 'https':
return ProxyTypeEnum.HTTPS.value
else:
return ProxyTypeEnum.UNKNOWN.value
@staticmethod
def _judge_proxy_cover(cover_str: str):
if cover_str == '透明':
return ProxyCoverEnum.TRANSPARENT.value
elif cover_str == '高匿':
return ProxyCoverEnum.HIGH_COVER.value
elif cover_str == '普匿':
return ProxyCoverEnum.NORMAL_COVER.value
else:
return ProxyCoverEnum.UNKNOWN.value
@spider_register
class SpiderMianFeiDaiLiIp(AbsSpider):
"""
免费代理IP库
http://ip.jiangxianli.com/
"""
def __init__(self) -> None:
super().__init__('免费代理IP爬虫')
def do_crawl(self, resp) -> List[ProxyEntity]:
result = []
soup = BeautifulSoup(resp, 'lxml')
table = soup.find('table')
if table is None:
return []
tbody = soup.find('tbody')
if tbody is None:
return []
trs = tbody.find_all('tr')
for i, tr in enumerate(trs):
if i == 0:
continue
tds = tr.find_all('td')
logger.info('免费代理IP爬虫 -- ' + tds)
ip = tds[0].text
port = tds[1].text
proxy_cover = tds[2].text
proxy_type = tds[3].text if tds[2].text != '' else 'http'
region = tds[4].text
supplier = tds[5].text
result.append(ProxyEntity(f'{proxy_type.lower()}://{ip}:{port}',
source=self._name,
supplier=supplier,
proxy_type=self._judge_proxy_type(proxy_type),
proxy_cover=self._judge_proxy_cover(proxy_cover),
region=region))
return result
def get_interval(self) -> int:
return 2
def get_page_range(self) -> Iterable:
return range(1, 4)
def get_urls(self) -> List[str]:
return ['http://ip.jiangxianli.com/?page={}']
def get_page_url(self, url, page) -> str:
return url.format(page)
@staticmethod
def _judge_proxy_type(type_str: str):
type_low = type_str.lower()
if type_low == 'http':
return ProxyTypeEnum.HTTP.value
elif type_low == 'https':
return ProxyTypeEnum.HTTPS.value
else:
return ProxyTypeEnum.UNKNOWN.value
@staticmethod
def _judge_proxy_cover(cover_str: str):
if cover_str == '透明':
return ProxyCoverEnum.TRANSPARENT.value
elif cover_str == '高匿':
return ProxyCoverEnum.HIGH_COVER.value
elif cover_str == '普匿':
return ProxyCoverEnum.NORMAL_COVER.value
else:
return ProxyCoverEnum.UNKNOWN.value
if __name__ == '__main__':
# proxies = []
# tasks = [SpiderXiciIp().crawl()]
# loop = asyncio.new_event_loop()
# asyncio.set_event_loop(loop)
# results = loop.run_until_complete(asyncio.gather(*tasks))
# loop.close()
results = asyncio.run(SpiderSpysOneIP().crawl())
print(results)
| [
"bs4.BeautifulSoup",
"random.uniform",
"pyppeteer.launch",
"src.log.logger.logger.info"
] | [((449, 481), 'src.log.logger.logger.info', 'logger.info', (['f"""注册{cls.__name__}"""'], {}), "(f'注册{cls.__name__}')\n", (460, 481), False, 'from src.log.logger import logger\n'), ((3352, 3387), 'src.log.logger.logger.info', 'logger.info', (['f"""{self._name}开始爬取..."""'], {}), "(f'{self._name}开始爬取...')\n", (3363, 3387), False, 'from src.log.logger import logger\n'), ((3396, 3447), 'src.log.logger.logger.info', 'logger.info', (['f"""_scrape_free_proxy_list_net 开始运行..."""'], {}), "(f'_scrape_free_proxy_list_net 开始运行...')\n", (3407, 3447), False, 'from src.log.logger import logger\n'), ((6819, 6854), 'src.log.logger.logger.info', 'logger.info', (['f"""{self._name}开始爬取..."""'], {}), "(f'{self._name}开始爬取...')\n", (6830, 6854), False, 'from src.log.logger import logger\n'), ((6863, 6914), 'src.log.logger.logger.info', 'logger.info', (['f"""_scrape_free_proxy_list_net 开始运行..."""'], {}), "(f'_scrape_free_proxy_list_net 开始运行...')\n", (6874, 6914), False, 'from src.log.logger import logger\n'), ((8136, 8163), 'bs4.BeautifulSoup', 'BeautifulSoup', (['resp', '"""lxml"""'], {}), "(resp, 'lxml')\n", (8149, 8163), False, 'from bs4 import BeautifulSoup, Tag\n'), ((9599, 9626), 'bs4.BeautifulSoup', 'BeautifulSoup', (['resp', '"""lxml"""'], {}), "(resp, 'lxml')\n", (9612, 9626), False, 'from bs4 import BeautifulSoup, Tag\n'), ((12037, 12064), 'bs4.BeautifulSoup', 'BeautifulSoup', (['resp', '"""lxml"""'], {}), "(resp, 'lxml')\n", (12050, 12064), False, 'from bs4 import BeautifulSoup, Tag\n'), ((13860, 13887), 'bs4.BeautifulSoup', 'BeautifulSoup', (['resp', '"""lxml"""'], {}), "(resp, 'lxml')\n", (13873, 13887), False, 'from bs4 import BeautifulSoup, Tag\n'), ((15807, 15834), 'bs4.BeautifulSoup', 'BeautifulSoup', (['resp', '"""lxml"""'], {}), "(resp, 'lxml')\n", (15820, 15834), False, 'from bs4 import BeautifulSoup, Tag\n'), ((17805, 17832), 'bs4.BeautifulSoup', 'BeautifulSoup', (['resp', '"""lxml"""'], {}), "(resp, 'lxml')\n", (17818, 17832), False, 'from bs4 import BeautifulSoup, Tag\n'), ((20105, 20132), 'bs4.BeautifulSoup', 'BeautifulSoup', (['resp', '"""lxml"""'], {}), "(resp, 'lxml')\n", (20118, 20132), False, 'from bs4 import BeautifulSoup, Tag\n'), ((888, 975), 'pyppeteer.launch', 'launch', ([], {'headless': '(False)', 'handleSIGINT': '(False)', 'handleSIGTERM': '(False)', 'handleSIGHUP': '(False)'}), '(headless=False, handleSIGINT=False, handleSIGTERM=False,\n handleSIGHUP=False)\n', (894, 975), False, 'from pyppeteer import launch\n'), ((1580, 1635), 'src.log.logger.logger.info', 'logger.info', (['f"""Extracting proxies from Spys {country}."""'], {}), "(f'Extracting proxies from Spys {country}.')\n", (1591, 1635), False, 'from src.log.logger import logger\n'), ((4664, 4751), 'pyppeteer.launch', 'launch', ([], {'headless': '(False)', 'handleSIGINT': '(False)', 'handleSIGTERM': '(False)', 'handleSIGHUP': '(False)'}), '(headless=False, handleSIGINT=False, handleSIGTERM=False,\n handleSIGHUP=False)\n', (4670, 4751), False, 'from pyppeteer import launch\n'), ((20467, 20500), 'src.log.logger.logger.info', 'logger.info', (["('免费代理IP爬虫 -- ' + tds)"], {}), "('免费代理IP爬虫 -- ' + tds)\n", (20478, 20500), False, 'from src.log.logger import logger\n'), ((1270, 1290), 'random.uniform', 'random.uniform', (['(2)', '(3)'], {}), '(2, 3)\n', (1284, 1290), False, 'import random\n')] |
import gooz_basic
import dev.gooz_thread
import os
username = "Gorkem"
password = "<PASSWORD>"
login_flag = False
print("Welcome to GoozOS")
usr = input("Username: ")
if usr == username:
paswd = input("Password: ")
if paswd == password:
login_flag = True
else:
print("Wrong Password")
else:
print("User not found")
while(login_flag):
print(username+"@RPi_Pico:",end="")
print(os.getcwd(),end=" ")
msg = input(">> ")
cmd_list = gooz_basic.command_analyzator(msg)
gooz_basic.add_run_commands(cmd_list)
gooz_basic.history.append(cmd_list)
if cmd_list[0] == "shutdown":
os.chdir("/")
print("System will be shutdown")
dev.gooz_thread.exit_flag = 1
break
| [
"gooz_basic.command_analyzator",
"gooz_basic.history.append",
"os.getcwd",
"os.chdir",
"gooz_basic.add_run_commands"
] | [((479, 513), 'gooz_basic.command_analyzator', 'gooz_basic.command_analyzator', (['msg'], {}), '(msg)\n', (508, 513), False, 'import gooz_basic\n'), ((518, 555), 'gooz_basic.add_run_commands', 'gooz_basic.add_run_commands', (['cmd_list'], {}), '(cmd_list)\n', (545, 555), False, 'import gooz_basic\n'), ((560, 595), 'gooz_basic.history.append', 'gooz_basic.history.append', (['cmd_list'], {}), '(cmd_list)\n', (585, 595), False, 'import gooz_basic\n'), ((420, 431), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (429, 431), False, 'import os\n'), ((639, 652), 'os.chdir', 'os.chdir', (['"""/"""'], {}), "('/')\n", (647, 652), False, 'import os\n')] |
## This File Contains all the different Neural Network Architectures used and the Loss function
import torch
import torch.nn as nn
import torch.nn.functional as functions
## Dense Network
class Dense(nn.Module):
def __init__(self):
super(Dense,self).__init__()
self.fc1 = nn.Linear(6*7,32) #board-size hard-coded
self.fc2 = nn.Linear(32,16)
self.probhead = nn.Linear(16,7)
self.valuehead = nn.Linear(16,1)
self.soft = nn.Softmax(dim=1)
self.tanh = nn.Tanh()
def forward(self,x):
x = x.view(-1,6*7)
x = functions.relu(self.fc1(x))
x = functions.relu(self.fc2(x))
#action probs
P = self.soft(self.probhead(x))
#value probs
v = self.tanh(self.valuehead(x))
return P,v
## Convolutional Network
class Conv(nn.Module):
def __init__(self):
super(Conv,self).__init__()
self.conv1 = nn.Conv2d(1,8,3,stride=1,padding=1)
self.bn1 = nn.BatchNorm2d(8)
self.fc1 = nn.Linear(336,150)
self.fc2 = nn.Linear(150,60)
self.probhead = nn.Linear(60,7)
self.valuehead = nn.Linear(60,1)
self.soft = nn.Softmax(dim=1)
self.tanh = nn.Tanh()
def forward(self,x):
x = x.view(-1,1,6,7)
x = functions.relu(self.bn1(self.conv1(x)))
x = x.view(-1,6*7*8)
x = functions.relu(self.fc1(x))
x = functions.relu(self.fc2(x))
P = self.soft(self.probhead(x))
v = self.tanh(self.valuehead(x))
return P,v
## Loss Function
class Alphaloss(nn.Module):
def __init__(self):
super(Alphaloss,self).__init__()
def forward(self,z,v,pi,P): #Notation as per AlphaZero Paper
value_error = (z - v) **2
policy_error = -torch.matmul(pi,torch.log(P).T) # gives the same result
#policy_error = torch.sum(-pi*torch.log(P),1)
return (value_error.view(-1)+policy_error).mean() | [
"torch.nn.BatchNorm2d",
"torch.log",
"torch.nn.Tanh",
"torch.nn.Softmax",
"torch.nn.Conv2d",
"torch.nn.Linear"
] | [((294, 314), 'torch.nn.Linear', 'nn.Linear', (['(6 * 7)', '(32)'], {}), '(6 * 7, 32)\n', (303, 314), True, 'import torch.nn as nn\n'), ((356, 373), 'torch.nn.Linear', 'nn.Linear', (['(32)', '(16)'], {}), '(32, 16)\n', (365, 373), True, 'import torch.nn as nn\n'), ((397, 413), 'torch.nn.Linear', 'nn.Linear', (['(16)', '(7)'], {}), '(16, 7)\n', (406, 413), True, 'import torch.nn as nn\n'), ((438, 454), 'torch.nn.Linear', 'nn.Linear', (['(16)', '(1)'], {}), '(16, 1)\n', (447, 454), True, 'import torch.nn as nn\n'), ((474, 491), 'torch.nn.Softmax', 'nn.Softmax', ([], {'dim': '(1)'}), '(dim=1)\n', (484, 491), True, 'import torch.nn as nn\n'), ((512, 521), 'torch.nn.Tanh', 'nn.Tanh', ([], {}), '()\n', (519, 521), True, 'import torch.nn as nn\n'), ((940, 979), 'torch.nn.Conv2d', 'nn.Conv2d', (['(1)', '(8)', '(3)'], {'stride': '(1)', 'padding': '(1)'}), '(1, 8, 3, stride=1, padding=1)\n', (949, 979), True, 'import torch.nn as nn\n'), ((995, 1012), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['(8)'], {}), '(8)\n', (1009, 1012), True, 'import torch.nn as nn\n'), ((1032, 1051), 'torch.nn.Linear', 'nn.Linear', (['(336)', '(150)'], {}), '(336, 150)\n', (1041, 1051), True, 'import torch.nn as nn\n'), ((1070, 1088), 'torch.nn.Linear', 'nn.Linear', (['(150)', '(60)'], {}), '(150, 60)\n', (1079, 1088), True, 'import torch.nn as nn\n'), ((1112, 1128), 'torch.nn.Linear', 'nn.Linear', (['(60)', '(7)'], {}), '(60, 7)\n', (1121, 1128), True, 'import torch.nn as nn\n'), ((1153, 1169), 'torch.nn.Linear', 'nn.Linear', (['(60)', '(1)'], {}), '(60, 1)\n', (1162, 1169), True, 'import torch.nn as nn\n'), ((1189, 1206), 'torch.nn.Softmax', 'nn.Softmax', ([], {'dim': '(1)'}), '(dim=1)\n', (1199, 1206), True, 'import torch.nn as nn\n'), ((1227, 1236), 'torch.nn.Tanh', 'nn.Tanh', ([], {}), '()\n', (1234, 1236), True, 'import torch.nn as nn\n'), ((1832, 1844), 'torch.log', 'torch.log', (['P'], {}), '(P)\n', (1841, 1844), False, 'import torch\n')] |
import findspark
findspark.init()
import time
from pyspark import SparkConf,SparkContext
from pyspark.streaming import StreamingContext
from pyspark.sql import Row,SQLContext
import sys
import requests
def aggregate_tweets_count(new_values, total_sum):
return sum(new_values) + (total_sum or 0)
conf=SparkConf()
conf.setAppName("BigData")
sc=SparkContext.getOrCreate(conf=conf)
ssc=StreamingContext(sc,2)
ssc.checkpoint("checkpoint_BIGDATA")
dataStream=ssc.socketTextStream("localhost",9009)
#dataStream.pprint()
#tweet=dataStream.map(tmp)
# OR
tweet=dataStream.map(lambda w:(w.split(';')[0],1))
count=tweet.reduceByKey(lambda x,y:x+y)
count.pprint()
#TO maintain state
totalcount=tweet.updateStateByKey(aggregate_tweets_count)
totalcount.pprint()
ssc.start()
ssc.awaitTermination(14)
ssc.stop()
| [
"findspark.init",
"pyspark.streaming.StreamingContext",
"pyspark.SparkConf",
"pyspark.SparkContext.getOrCreate"
] | [((18, 34), 'findspark.init', 'findspark.init', ([], {}), '()\n', (32, 34), False, 'import findspark\n'), ((319, 330), 'pyspark.SparkConf', 'SparkConf', ([], {}), '()\n', (328, 330), False, 'from pyspark import SparkConf, SparkContext\n'), ((363, 398), 'pyspark.SparkContext.getOrCreate', 'SparkContext.getOrCreate', ([], {'conf': 'conf'}), '(conf=conf)\n', (387, 398), False, 'from pyspark import SparkConf, SparkContext\n'), ((406, 429), 'pyspark.streaming.StreamingContext', 'StreamingContext', (['sc', '(2)'], {}), '(sc, 2)\n', (422, 429), False, 'from pyspark.streaming import StreamingContext\n')] |
from flask import jsonify, Blueprint, request
from flask_restful import Resource, Api
from services.cost_prediction_model.cost_prediction_model import CostPredictionModel
from services.building_vol_calculator.building_vol_metres_calculator import BuildingVolMetresCalculator
from measurement.utils import guess
from services.error_handling.exceptions.cost_prediction_failed import CostPredictionFailed
class FitOutCostPrediction(Resource):
def post(self):
try:
costPredictionParameters = request.get_json()
buildingVolumeCalculator = BuildingVolMetresCalculator(costPredictionParameters['floorArea'], costPredictionParameters['averageFloorHeight'])
buildingVolumeValue = buildingVolumeCalculator.calculateVolume()
isCatAIncluded = 0
isCatBIncluded = 0
isCatAAndBIncluded = 0
if costPredictionParameters['isCatAIncluded'] == True and costPredictionParameters['isCatBIncluded'] == True:
isCatAAndBIncluded = 1
elif costPredictionParameters['isCatAIncluded'] == True and costPredictionParameters['isCatBIncluded'] == False:
isCatAIncluded = 1
elif costPredictionParameters['isCatAIncluded'] == False and costPredictionParameters['isCatBIncluded'] == True:
isCatBIncluded = 1
costPredictionParametersForModel = [
buildingVolumeValue, isCatAIncluded, isCatBIncluded, isCatAAndBIncluded]
costPredictionModel = CostPredictionModel()
cost = costPredictionModel.predictCost(
costPredictionParametersForModel)
except ValueError as error:
raise CostPredictionFailed(error.args[0], response_status_code=500)
except TypeError as error:
raise CostPredictionFailed(error.args[0], response_status_code=500)
except KeyError as error:
raise CostPredictionFailed(error.args[0], response_status_code=500)
# return a json value
return jsonify({'cost': cost})
fitout_cost_prediction_api = Blueprint(
'resources.fit_out_cost_prediction', __name__)
api = Api(fitout_cost_prediction_api)
api.add_resource(
FitOutCostPrediction,
'/fitOutCostPrediction',
endpoint='fitOutCostPrediction'
)
| [
"services.cost_prediction_model.cost_prediction_model.CostPredictionModel",
"flask_restful.Api",
"flask.request.get_json",
"services.error_handling.exceptions.cost_prediction_failed.CostPredictionFailed",
"services.building_vol_calculator.building_vol_metres_calculator.BuildingVolMetresCalculator",
"flask... | [((2095, 2151), 'flask.Blueprint', 'Blueprint', (['"""resources.fit_out_cost_prediction"""', '__name__'], {}), "('resources.fit_out_cost_prediction', __name__)\n", (2104, 2151), False, 'from flask import jsonify, Blueprint, request\n'), ((2164, 2195), 'flask_restful.Api', 'Api', (['fitout_cost_prediction_api'], {}), '(fitout_cost_prediction_api)\n', (2167, 2195), False, 'from flask_restful import Resource, Api\n'), ((2040, 2063), 'flask.jsonify', 'jsonify', (["{'cost': cost}"], {}), "({'cost': cost})\n", (2047, 2063), False, 'from flask import jsonify, Blueprint, request\n'), ((517, 535), 'flask.request.get_json', 'request.get_json', ([], {}), '()\n', (533, 535), False, 'from flask import jsonify, Blueprint, request\n'), ((576, 694), 'services.building_vol_calculator.building_vol_metres_calculator.BuildingVolMetresCalculator', 'BuildingVolMetresCalculator', (["costPredictionParameters['floorArea']", "costPredictionParameters['averageFloorHeight']"], {}), "(costPredictionParameters['floorArea'],\n costPredictionParameters['averageFloorHeight'])\n", (603, 694), False, 'from services.building_vol_calculator.building_vol_metres_calculator import BuildingVolMetresCalculator\n'), ((1522, 1543), 'services.cost_prediction_model.cost_prediction_model.CostPredictionModel', 'CostPredictionModel', ([], {}), '()\n', (1541, 1543), False, 'from services.cost_prediction_model.cost_prediction_model import CostPredictionModel\n'), ((1701, 1762), 'services.error_handling.exceptions.cost_prediction_failed.CostPredictionFailed', 'CostPredictionFailed', (['error.args[0]'], {'response_status_code': '(500)'}), '(error.args[0], response_status_code=500)\n', (1721, 1762), False, 'from services.error_handling.exceptions.cost_prediction_failed import CostPredictionFailed\n'), ((1817, 1878), 'services.error_handling.exceptions.cost_prediction_failed.CostPredictionFailed', 'CostPredictionFailed', (['error.args[0]'], {'response_status_code': '(500)'}), '(error.args[0], response_status_code=500)\n', (1837, 1878), False, 'from services.error_handling.exceptions.cost_prediction_failed import CostPredictionFailed\n'), ((1932, 1993), 'services.error_handling.exceptions.cost_prediction_failed.CostPredictionFailed', 'CostPredictionFailed', (['error.args[0]'], {'response_status_code': '(500)'}), '(error.args[0], response_status_code=500)\n', (1952, 1993), False, 'from services.error_handling.exceptions.cost_prediction_failed import CostPredictionFailed\n')] |
# Copyright (c) 2013 Shotgun Software Inc.
#
# CONFIDENTIAL AND PROPRIETARY
#
# This work is provided "AS IS" and subject to the Shotgun Pipeline Toolkit
# Source Code License included in this distribution package. See LICENSE.
# By accessing, using, copying or modifying this work you indicate your
# agreement to the Shotgun Pipeline Toolkit Source Code License. All rights
# not expressly granted therein are reserved by Shotgun Software Inc.
from ..errors import TankError
from . import constants
from . import util
from . import console_utils
from .action_base import Action
import os
class SwitchAppAction(Action):
"""
Action that makes it easy to switch from one descriptor to another
"""
def __init__(self):
Action.__init__(self,
"switch_app",
Action.TK_INSTANCE,
"Switches an app from one code location to another.",
"Developer")
def run_interactive(self, log, args):
if len(args) < 4:
log.info("This command allows you to easily switch an app between different "
"locations. A location defines where toolkit picks and synchrononizes "
"App versions. It can be either the Toolkit App Store, a version control "
"system such as Git or a location on disk.")
log.info("")
log.info("Switching an app to use a raw disk location")
log.info("--------------------------------------------------")
log.info("If you want to do app development, it is handy to be able to "
"take an app in your configuration and tell it to load from a "
"specific folder on disk. The workflow is that you typically would "
"start off with a git repository (forked off one of Shotgun's git "
"repositories if you are modifying one of the standard toolkit apps). "
"Then, clone this repo into your local dev area where you intend to "
"make the actual changes. Now use the switch command to tell toolkit "
"to use this version of the code.")
log.info("")
log.info("Note! We advise against using dev locations in your primary configuration "
"when you want to do development work, start by cloning your primary "
"pipeline configuration. You can do this by right clicking on it in Shotgun.")
log.info("")
log.info("> Syntax: switch_app environment engine app path")
log.info("> Example: switch_app Asset tk-maya tk-multi-about /Users/foo/dev/tk-multi-about")
log.info("")
log.info("")
log.info("Switching an app to track a git repository")
log.info("--------------------------------------------------")
log.info("If you are using custom made apps or have modified Shotgun's built in apps "
"by forking them from github ('https://github.com/shotgunsoftware'), and you "
"have finished customization, you usually want to switch the app so that it "
"tracks your git repository instead of the Toolkit App Store. Toolkit will "
"read the list of tags from the repository and identify version-like tags "
"(such as 'v0.1.2' or 'v1.2.3.4' and use these to determine which version "
"is the latest one. If you create a new tag in the repository and then run "
"the Toolkit update checker, it will detect that a more recent version is "
"available and prompt you if you want to upgrade.")
log.info("")
log.info("> Syntax: switch_app environment engine app git_repo")
log.info("The git_repo part is a repository location that can be understood by git. "
"Examples include: ")
log.info(" - /path/to/repo.git")
log.info(" - user@remotehost:/path_to/repo.git")
log.info(" - git://github.com/manneohrstrom/tk-hiero-publish.git")
log.info(" - https://github.com/manneohrstrom/tk-hiero-publish.git")
log.info("")
log.info("")
log.info("Switching an app to track the Toolkit App Store")
log.info("--------------------------------------------------")
log.info("If you have been doing development and want to switch back to the "
"official app store version of an app, you can use the following syntax:")
log.info("")
log.info("> Syntax: switch_app environment engine app app_store")
log.info("> Example: switch_app Asset tk-maya tk-multi-about app_store")
log.info("")
log.info("")
log.info("For a list of environments, engines and apps, run the app_info command.")
log.info("")
log.info("If you add a %s flag, the original, non-structure-preserving "
"yaml parser will be used. This parser was used by default in core v0.17.x "
"and below." % constants.LEGACY_YAML_PARSER_FLAG)
log.info("")
return
(use_legacy_parser, args) = util.should_use_legacy_yaml_parser(args)
preserve_yaml = not use_legacy_parser
# get parameters
env_name = args[0]
engine_instance_name = args[1]
app_instance_name = args[2]
path = None
mode = None
fourth_param = args[3]
if fourth_param == "app_store":
mode = "app_store"
elif fourth_param.endswith(".git"):
mode = "git"
path = fourth_param
else:
mode = "dev"
path = fourth_param
# find descriptor
try:
env = self.tk.pipeline_configuration.get_environment(env_name, writable=True)
env.set_yaml_preserve_mode(preserve_yaml)
except Exception as e:
raise TankError("Environment '%s' could not be loaded! Error reported: %s" % (env_name, e))
# make sure the engine exists in the environment
if engine_instance_name not in env.get_engines():
raise TankError("Environment %s has no engine named %s!" % (env_name, engine_instance_name))
# and the app
apps_for_engine = env.get_apps(engine_instance_name)
if app_instance_name not in apps_for_engine:
raise TankError("Environment %s, engine %s has no app named '%s'! "
"Available app instances are: %s " % (env_name,
engine_instance_name,
app_instance_name,
", ".join(apps_for_engine) ))
# get the descriptor
descriptor = env.get_app_descriptor(engine_instance_name, app_instance_name)
log.info("")
if mode == "app_store":
new_descriptor = self.tk.pipeline_configuration.get_latest_app_descriptor(
{"type": "app_store", "name": descriptor.system_name}
)
elif mode == "dev":
if not os.path.exists(path):
raise TankError("Cannot find path '%s' on disk!" % path)
# run descriptor factory method
new_descriptor = self.tk.pipeline_configuration.get_app_descriptor(
{"type": "dev", "path": path}
)
elif mode == "git":
# run descriptor factory method
new_descriptor = self.tk.pipeline_configuration.get_latest_app_descriptor(
{"type": "git", "path": path}
)
else:
raise TankError("Unknown mode!")
# prompt user!
log.info("")
log.info("")
log.info("Current version")
log.info("------------------------------------")
for (k,v) in descriptor.get_dict().items():
log.info(" - %s: %s" % (k.capitalize(), v))
log.info("")
log.info("New version")
log.info("------------------------------------")
for (k,v) in new_descriptor.get_dict().items():
log.info(" - %s: %s" % (k.capitalize(), v))
log.info("")
if not console_utils.ask_yn_question("Okay to switch?"):
log.info("Switch aborted!")
return
if not new_descriptor.exists_local():
log.info("Downloading %s..." % new_descriptor)
new_descriptor.download_local()
# create required shotgun fields
new_descriptor.ensure_shotgun_fields_exist(self.tk)
# run post install hook
new_descriptor.run_post_install(self.tk)
# ensure that all required frameworks have been installed
# find the file where our item is being installed
(_, yml_file) = env.find_location_for_app(engine_instance_name, app_instance_name)
console_utils.ensure_frameworks_installed(log, self.tk, yml_file, new_descriptor, env, suppress_prompts=False)
# find the name of the engine
engine_system_name = env.get_engine_descriptor(engine_instance_name).system_name
# now get data for all new settings values in the config
params = console_utils.get_configuration(log, self.tk, new_descriptor, descriptor, False, engine_system_name)
# next step is to add the new configuration values to the environment
env.update_app_settings(engine_instance_name,
app_instance_name,
params,
new_descriptor.get_dict())
log.info("Switch complete!")
| [
"os.path.exists"
] | [((7518, 7538), 'os.path.exists', 'os.path.exists', (['path'], {}), '(path)\n', (7532, 7538), False, 'import os\n')] |
from .hunterdouglasplatinum import HunterDouglasPlatinumHub
import argparse
import sys
import asyncio
def err(msg):
sys.stderr.write(msg+"\n")
async def main():
parser = argparse.ArgumentParser()
parser.add_argument("ip",
help="ip address of the hub")
parser.add_argument("-d", "--shade",
help="name of the shade")
parser.add_argument("-n", "--scene",
help="name of the scene")
parser.add_argument("-l", "--level",
help="level of the shade <up/down/percentage>")
args = parser.parse_args()
if args.ip:
hub = await HunterDouglasPlatinumHub.create(args.ip)
if(args.shade):
shade = hub.get_shade(name=args.shade)
if(shade):
if args.level:
await shade.set_level(args.level)
else:
err("need to specify level using --level")
else:
err("Shade not found")
elif(args.scene):
scene = hub.get_scene(name=args.scene)
if(scene):
await scene.run()
else:
err('Scene not found')
else:
err('IP address of hub not specified')
if __name__ == "__main__":
asyncio.run(main())
| [
"sys.stderr.write",
"argparse.ArgumentParser"
] | [((121, 149), 'sys.stderr.write', 'sys.stderr.write', (["(msg + '\\n')"], {}), "(msg + '\\n')\n", (137, 149), False, 'import sys\n'), ((180, 205), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (203, 205), False, 'import argparse\n')] |
import os
import launch
import launch_ros.actions
from ament_index_python.packages import get_package_share_directory
def generate_launch_description():
graphbasedslam_param_dir = launch.substitutions.LaunchConfiguration(
'graphbasedslam_param_dir',
default=os.path.join(
get_package_share_directory('graph_based_slam'),
'param',
'graphbasedslam.yaml'))
graphbasedslam = launch_ros.actions.Node(
package='graph_based_slam',
executable='graph_based_slam_node',
parameters=[graphbasedslam_param_dir],
output='screen'
)
return launch.LaunchDescription([
launch.actions.DeclareLaunchArgument(
'graphbasedslam_param_dir',
default_value=graphbasedslam_param_dir,
description='Full path to graphbasedslam parameter file to load'),
graphbasedslam,
]) | [
"ament_index_python.packages.get_package_share_directory",
"launch.actions.DeclareLaunchArgument"
] | [((670, 849), 'launch.actions.DeclareLaunchArgument', 'launch.actions.DeclareLaunchArgument', (['"""graphbasedslam_param_dir"""'], {'default_value': 'graphbasedslam_param_dir', 'description': '"""Full path to graphbasedslam parameter file to load"""'}), "('graphbasedslam_param_dir',\n default_value=graphbasedslam_param_dir, description=\n 'Full path to graphbasedslam parameter file to load')\n", (706, 849), False, 'import launch\n'), ((308, 355), 'ament_index_python.packages.get_package_share_directory', 'get_package_share_directory', (['"""graph_based_slam"""'], {}), "('graph_based_slam')\n", (335, 355), False, 'from ament_index_python.packages import get_package_share_directory\n')] |
import pytest
import networkx as nx
from networkx.algorithms.community import (
greedy_modularity_communities,
modularity,
naive_greedy_modularity_communities,
)
@pytest.mark.parametrize(
"func", (greedy_modularity_communities, naive_greedy_modularity_communities)
)
def test_modularity_communities(func):
G = nx.karate_club_graph()
john_a = frozenset(
[8, 14, 15, 18, 20, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33]
)
mr_hi = frozenset([0, 4, 5, 6, 10, 11, 16, 19])
overlap = frozenset([1, 2, 3, 7, 9, 12, 13, 17, 21])
expected = {john_a, overlap, mr_hi}
assert set(func(G)) == expected
def test_modularity_communities_weighted():
G = nx.balanced_tree(2, 3)
for (a, b) in G.edges:
if ((a == 1) or (a == 2)) and (b != 0):
G[a][b]["weight"] = 10.0
else:
G[a][b]["weight"] = 1.0
expected = [{0, 1, 3, 4, 7, 8, 9, 10}, {2, 5, 6, 11, 12, 13, 14}]
assert greedy_modularity_communities(G, weight="weight") == expected
def test_resolution_parameter_impact():
G = nx.barbell_graph(5, 3)
gamma = 1
expected = [frozenset(range(5)), frozenset(range(8, 13)), frozenset(range(5, 8))]
assert greedy_modularity_communities(G, resolution=gamma) == expected
assert naive_greedy_modularity_communities(G, resolution=gamma) == expected
gamma = 2.5
expected = [{0, 1, 2, 3}, {9, 10, 11, 12}, {5, 6, 7}, {4}, {8}]
assert greedy_modularity_communities(G, resolution=gamma) == expected
assert naive_greedy_modularity_communities(G, resolution=gamma) == expected
gamma = 0.3
expected = [frozenset(range(8)), frozenset(range(8, 13))]
assert greedy_modularity_communities(G, resolution=gamma) == expected
assert naive_greedy_modularity_communities(G, resolution=gamma) == expected
| [
"networkx.algorithms.community.naive_greedy_modularity_communities",
"networkx.algorithms.community.greedy_modularity_communities",
"pytest.mark.parametrize",
"networkx.balanced_tree",
"networkx.karate_club_graph",
"networkx.barbell_graph"
] | [((178, 283), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""func"""', '(greedy_modularity_communities, naive_greedy_modularity_communities)'], {}), "('func', (greedy_modularity_communities,\n naive_greedy_modularity_communities))\n", (201, 283), False, 'import pytest\n'), ((333, 355), 'networkx.karate_club_graph', 'nx.karate_club_graph', ([], {}), '()\n', (353, 355), True, 'import networkx as nx\n'), ((703, 725), 'networkx.balanced_tree', 'nx.balanced_tree', (['(2)', '(3)'], {}), '(2, 3)\n', (719, 725), True, 'import networkx as nx\n'), ((1083, 1105), 'networkx.barbell_graph', 'nx.barbell_graph', (['(5)', '(3)'], {}), '(5, 3)\n', (1099, 1105), True, 'import networkx as nx\n'), ((971, 1020), 'networkx.algorithms.community.greedy_modularity_communities', 'greedy_modularity_communities', (['G'], {'weight': '"""weight"""'}), "(G, weight='weight')\n", (1000, 1020), False, 'from networkx.algorithms.community import greedy_modularity_communities, modularity, naive_greedy_modularity_communities\n'), ((1218, 1268), 'networkx.algorithms.community.greedy_modularity_communities', 'greedy_modularity_communities', (['G'], {'resolution': 'gamma'}), '(G, resolution=gamma)\n', (1247, 1268), False, 'from networkx.algorithms.community import greedy_modularity_communities, modularity, naive_greedy_modularity_communities\n'), ((1292, 1348), 'networkx.algorithms.community.naive_greedy_modularity_communities', 'naive_greedy_modularity_communities', (['G'], {'resolution': 'gamma'}), '(G, resolution=gamma)\n', (1327, 1348), False, 'from networkx.algorithms.community import greedy_modularity_communities, modularity, naive_greedy_modularity_communities\n'), ((1457, 1507), 'networkx.algorithms.community.greedy_modularity_communities', 'greedy_modularity_communities', (['G'], {'resolution': 'gamma'}), '(G, resolution=gamma)\n', (1486, 1507), False, 'from networkx.algorithms.community import greedy_modularity_communities, modularity, naive_greedy_modularity_communities\n'), ((1531, 1587), 'networkx.algorithms.community.naive_greedy_modularity_communities', 'naive_greedy_modularity_communities', (['G'], {'resolution': 'gamma'}), '(G, resolution=gamma)\n', (1566, 1587), False, 'from networkx.algorithms.community import greedy_modularity_communities, modularity, naive_greedy_modularity_communities\n'), ((1690, 1740), 'networkx.algorithms.community.greedy_modularity_communities', 'greedy_modularity_communities', (['G'], {'resolution': 'gamma'}), '(G, resolution=gamma)\n', (1719, 1740), False, 'from networkx.algorithms.community import greedy_modularity_communities, modularity, naive_greedy_modularity_communities\n'), ((1764, 1820), 'networkx.algorithms.community.naive_greedy_modularity_communities', 'naive_greedy_modularity_communities', (['G'], {'resolution': 'gamma'}), '(G, resolution=gamma)\n', (1799, 1820), False, 'from networkx.algorithms.community import greedy_modularity_communities, modularity, naive_greedy_modularity_communities\n')] |
# tcp client can be used for services, send garbage data, fuzz...
import socket
tarket_host = "www.google.com"
target_port = 80
# create socket object
## af_inet is saying we're going to use the standard ipv4 or hostname
## af_stream sates that this will be a simple tcp client.
client = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# connect the client
client.connect((tarket_host, target_port))
# data
data = "GET / HTTP/1.1\nHost: google.com\r\n\r\n"
# send some data
client.send(data.encode())
# receive some data
response = client.recv(4096)
# print(response)
if __name__ == "__main__":
print(response)
'''
some assumptions are being made here.
1) our connection will always succeed.
2) the server is always expecting us to send data first.
3) the server will alwasy send us back data in a timely fashion.
''' | [
"socket.socket"
] | [((291, 340), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_STREAM'], {}), '(socket.AF_INET, socket.SOCK_STREAM)\n', (304, 340), False, 'import socket\n')] |
import os
import re
import mne
import numpy as np
class SeegRecording():
def __init__(self, contacts, data, sampling_rate):
'''
contacts (list of tuples) is a list of all the contact labels and their
corresponding number
data (np.ndarray) is a numpy array of the seeg data [chans x time]
sampling_rate (float) is the sampling rate in Hz
'''
# Sort the contacts first
contacts, indices = zip(*sorted((c, i)
for i, c in enumerate(contacts)))
# set the contacts
self.contacts = contacts
self.ncontacts = len(self.contacts)
self.contact_names = [str(a) + str(b) for a, b in self.contacts]
# can't do assertion here if we trim contacts with a list...
assert data.ndim == 2
# assert data.shape[0] == self.ncontacts
self.data = data[indices, :]
self.sampling_rate = sampling_rate
nsamples = self.data.shape[1]
self.t = np.linspace(0, (nsamples - 1) *
(1. / self.sampling_rate), nsamples)
self.electrodes = {}
for i, (name, number) in enumerate(self.contacts):
if name not in self.electrodes:
self.electrodes[name] = []
self.electrodes[name].append(i)
self.set_bipolar()
@classmethod
def from_ades(cls, filename):
data_file = os.path.splitext(filename)[0] + ".dat"
contact_names = []
contacts = []
sampling_rate = None
nsamples = None
seeg_idxs = []
bad_channels = []
bad_file = filename + ".bad"
if os.path.isfile(bad_file):
with open(bad_file, 'r') as fd:
bad_channels = [ch.strip()
for ch in fd.readlines() if ch.strip() != ""]
with open(filename, 'r') as fd:
fd.readline() # ADES header file
kw, sampling_rate = [s.strip()
for s in fd.readline().strip().split('=')]
assert kw == 'samplingRate'
sampling_rate = float(sampling_rate)
kw, nsamples = [s.strip()
for s in fd.readline().strip().split('=')]
assert kw == 'numberOfSamples'
nsamples = int(nsamples)
channel_idx = 0
for line in fd.readlines():
if not line.strip():
continue
parts = [p.strip() for p in line.strip().split('=')]
if len(parts) > 1 and parts[1] == 'SEEG':
name, idx = re.match(
"([A-Za-z]+[']*)([0-9]+)", parts[0]).groups()
idx = int(idx)
if parts[0] not in bad_channels:
contacts.append((name, idx))
seeg_idxs.append(channel_idx)
channel_idx += 1
data = np.fromfile(data_file, dtype='f4')
ncontacts = data.size // nsamples
if data.size != nsamples * ncontacts:
print("!! data.size != nsamples*ncontacts")
print("!! %d != %d %d" % (data.size, nsamples, ncontacts))
print("!! Ignoring nsamples")
nsamples = int(data.size / ncontacts)
data = data.reshape((nsamples, ncontacts)).T
data = data[seeg_idxs, :]
return cls(contacts, data, sampling_rate)
@classmethod
def from_fif(cls, filename, drop_channels=None, rename_channels=None):
raw = mne.io.read_raw_fif(filename)
if rename_channels is not None:
raw.rename_channels(rename_channels)
return cls._from_mne_raw(raw, drop_channels)
@classmethod
def from_edf(cls, filename, drop_channels=None, rename_channels=None):
raw = mne.io.read_raw_edf(filename, preload=True)
if rename_channels is not None:
raw.rename_channels(rename_channels)
return cls._from_mne_raw(raw, drop_channels)
@classmethod
def _from_mne_raw(cls, raw, drop_channels=None):
contacts = []
seeg_idxs = []
if drop_channels is None:
drop_channels = []
for i, ch_name in enumerate(raw.ch_names):
if ch_name in raw.info['bads'] or ch_name in drop_channels:
continue
match = re.match("^([A-Za-z]+[']*)([0-9]+)$", ch_name)
if match is not None:
name, idx = match.groups()
contacts.append((name, int(idx)))
seeg_idxs.append(i)
return cls(contacts, raw.get_data()[seeg_idxs, :], raw.info['sfreq'])
def get_data_bipolar(self):
data_bipolar = np.zeros((len(self.bipolar), len(self.t)))
for i, (_, i1, i2) in enumerate(self.bipolar):
data_bipolar[i, :] = self.data[i1, :] - self.data[i2, :]
return data_bipolar
| [
"numpy.fromfile",
"mne.io.read_raw_fif",
"re.match",
"os.path.splitext",
"os.path.isfile",
"numpy.linspace",
"mne.io.read_raw_edf"
] | [((1067, 1136), 'numpy.linspace', 'np.linspace', (['(0)', '((nsamples - 1) * (1.0 / self.sampling_rate))', 'nsamples'], {}), '(0, (nsamples - 1) * (1.0 / self.sampling_rate), nsamples)\n', (1078, 1136), True, 'import numpy as np\n'), ((1725, 1749), 'os.path.isfile', 'os.path.isfile', (['bad_file'], {}), '(bad_file)\n', (1739, 1749), False, 'import os\n'), ((3022, 3056), 'numpy.fromfile', 'np.fromfile', (['data_file'], {'dtype': '"""f4"""'}), "(data_file, dtype='f4')\n", (3033, 3056), True, 'import numpy as np\n'), ((3611, 3640), 'mne.io.read_raw_fif', 'mne.io.read_raw_fif', (['filename'], {}), '(filename)\n', (3630, 3640), False, 'import mne\n'), ((3892, 3935), 'mne.io.read_raw_edf', 'mne.io.read_raw_edf', (['filename'], {'preload': '(True)'}), '(filename, preload=True)\n', (3911, 3935), False, 'import mne\n'), ((4432, 4478), 're.match', 're.match', (['"""^([A-Za-z]+[\']*)([0-9]+)$"""', 'ch_name'], {}), '("^([A-Za-z]+[\']*)([0-9]+)$", ch_name)\n', (4440, 4478), False, 'import re\n'), ((1485, 1511), 'os.path.splitext', 'os.path.splitext', (['filename'], {}), '(filename)\n', (1501, 1511), False, 'import os\n'), ((2697, 2742), 're.match', 're.match', (['"""([A-Za-z]+[\']*)([0-9]+)"""', 'parts[0]'], {}), '("([A-Za-z]+[\']*)([0-9]+)", parts[0])\n', (2705, 2742), False, 'import re\n')] |
# -*- coding: UTF-8 -*-
from pydub import AudioSegment
sound = AudioSegment.from_mp3('D:/360Downloads/caixi-from-net-common.mp3').set_frame_rate(11025)
sound.export('D:/360Downloads/from-net-common-3.wav', format='wav')
| [
"pydub.AudioSegment.from_mp3"
] | [((65, 131), 'pydub.AudioSegment.from_mp3', 'AudioSegment.from_mp3', (['"""D:/360Downloads/caixi-from-net-common.mp3"""'], {}), "('D:/360Downloads/caixi-from-net-common.mp3')\n", (86, 131), False, 'from pydub import AudioSegment\n')] |
import json
import unittest
from utils.factory import create_app
from utils.config import Config
class BaseTestCase(unittest.TestCase):
"""A base test case"""
def setUp(self):
app = create_app(Config())
app.app_context().push()
self.app = app.test_client()
def tearDown(self):
print("teardown complete")
class TestHealthEndpoint(BaseTestCase):
"""Test Health Endpoint"""
def setUp(self):
super(TestHealthEndpoint, self).setUp()
def test_returns_status_ok(self):
"""Should return status OK and HTTP 200 when hitting health endpoint"""
response = self.app.get('/health')
r = json.dumps(str(response.data))
self.assertEqual(200, response.status_code)
self.assertTrue('"status":"OK"', r) | [
"utils.config.Config"
] | [((211, 219), 'utils.config.Config', 'Config', ([], {}), '()\n', (217, 219), False, 'from utils.config import Config\n')] |
#!/usr/bin/env python
from __future__ import division, unicode_literals
import argparse
import os
import time
import cv2
from net.wyun.blankanswer import image_utils
from net.wyun.blankanswer.loader import SimplePreprocessor
default_buckets = '[[240,100], [320,80], [400,80],[400,100], [480,80], [480,100], [560,80], [560,100], [640,80],[640,100],\
[720,80], [720,100], [720,120], [720, 200], [800,100],[800,320], [1000,200]]'
outdir = 'uploads'
debug = True
current_milli_time = lambda: int(round(time.time() * 1000))
headers = {'content-type': 'application/json'}
from flask import Flask
app = Flask(__name__)
imageP = SimplePreprocessor.SimplePreprocessor(240, 95)
def get_model_api():
"""Returns lambda function for api"""
# initialize config for translate
parser = argparse.ArgumentParser(description='translate.py', formatter_class=argparse.ArgumentDefaultsHelpFormatter)
opt = parser.parse_args()
def model_api(request_id, filename, return_list):
"""
Args:
input_data: submitted to the API, json string
Returns:
output_data: after some transformation, to be
returned to the API
"""
# process input
res = {}
# request_id=str(uuid.uuid4())
res['id'] = request_id
start_t = current_milli_time()
img_file_path = filename # in png format
# preprocess image
# os.system('echo '+ str(request_id)+'_preprocessed.png ' +'>uploads/test.txt');
os.system('echo ' + filename + '>uploads/test.txt');
print (filename)
# src= 'uploads/test.txt'
# src_dir='uploads'
# print "src=", src
# print "src_dir=", src_dir
#all_scores, n_best_preds = translator.translate(src=opt.src, tgt=None, src_dir=opt.src_dir,
# batch_size=opt.batch_size, attn_debug=opt.attn_debug)
# hasAnswer: 1 means that there is answer in image; 0 means not
hasAnswer = 0
now_t = current_milli_time()
(isSuccess, hasAnswer, errmsg) = has_answer(filename)
if debug:
print ("time spent ", now_t - start_t)
# return the output for the api
if isSuccess:
has_an_answer = 0
if hasAnswer:
has_an_answer = 1
res['status'] = "success"
res['info'] = now_t - start_t
res['has_answer'] = has_an_answer
else:
res['status'] = "error"
res['info'] = errmsg
res['has_answer'] = 2
return_list.append(res)
# return res
return model_api
Threshold = 5.0e-05
def has_answer(imagePath):
'''
:param imagePath: path of the image file being uploaded from client
:return: (success, has_answer, errmsg)
'''
image = cv2.imread(imagePath)
height, width, depth = image.shape
if height < 15 or width < 45 or height > 1000 or width > 1000:
return (False, False, "image size (widthxheight) should be (45, 45) to (1000, 1000)")
image = imageP.preprocess(image)
gray_image = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
ret, th = cv2.threshold(gray_image, 0, 255, cv2.THRESH_OTSU)
white_left_right(th)
mask_inv = cv2.bitwise_not(th)
# calculate moments of binary image
M = cv2.moments(mask_inv)
# print M
# calculate x,y coordinate of center
if M["m00"] == 0:
print ("m00 is 0", imagePath)
return (False, False, "m00 is 0, bad format image!")
#cX = int(M["m10"] / M["m00"])
#cY = int(M["m01"] / M["m00"])
nu02 = M["nu02"]
print ("nu02", nu02)
hasAnswer = True
if nu02 < Threshold:
hasAnswer = False
return (True, hasAnswer, "")
def white_left_right(image):
for ind in range(25):
image[:, ind] = 255
for ind in range(215, 240, 1):
image[:, ind] = 255
def preprocess(l):
filename, postfix, output_filename, crop_blank_default_size, pad_size, buckets, downsample_ratio = l
postfix_length = len(postfix)
try:
im1 = image_utils.crop_image(filename, output_filename, crop_blank_default_size)
im2 = image_utils.pad_image(im1, output_filename, pad_size, buckets)
status = image_utils.downsample_image(im2, output_filename, downsample_ratio)
im1.close()
im2.close()
return True
except IOError:
app.logger.info("IOError in preprocesing")
return False
def detokenizer(s):
s = s.replace("\left{", "{")
s = s.replace("\left\(", "\(")
s = s.replace("\left[", "[")
s = s.replace("\right}", "}")
s = s.replace("\right\)", "\)")
s = s.replace("\right]", "]")
s = s.rstrip()
s = s.lstrip()
s2 = ""
for i, c in enumerate(s):
if c == " " and ('0' <= s[i - 1] <= '9' or s[i - 1] == '.'):
if s[i + 1].isalpha() or '0' <= s[i + 1] <= '9' or s[i + 1] == '.':
continue
s2 += c
return s2
| [
"argparse.ArgumentParser",
"flask.Flask",
"cv2.threshold",
"os.system",
"time.time",
"net.wyun.blankanswer.loader.SimplePreprocessor.SimplePreprocessor",
"net.wyun.blankanswer.image_utils.crop_image",
"net.wyun.blankanswer.image_utils.pad_image",
"net.wyun.blankanswer.image_utils.downsample_image",
... | [((605, 620), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (610, 620), False, 'from flask import Flask\n'), ((630, 676), 'net.wyun.blankanswer.loader.SimplePreprocessor.SimplePreprocessor', 'SimplePreprocessor.SimplePreprocessor', (['(240)', '(95)'], {}), '(240, 95)\n', (667, 676), False, 'from net.wyun.blankanswer.loader import SimplePreprocessor\n'), ((794, 906), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""translate.py"""', 'formatter_class': 'argparse.ArgumentDefaultsHelpFormatter'}), "(description='translate.py', formatter_class=\n argparse.ArgumentDefaultsHelpFormatter)\n", (817, 906), False, 'import argparse\n'), ((2873, 2894), 'cv2.imread', 'cv2.imread', (['imagePath'], {}), '(imagePath)\n', (2883, 2894), False, 'import cv2\n'), ((3150, 3189), 'cv2.cvtColor', 'cv2.cvtColor', (['image', 'cv2.COLOR_BGR2GRAY'], {}), '(image, cv2.COLOR_BGR2GRAY)\n', (3162, 3189), False, 'import cv2\n'), ((3205, 3255), 'cv2.threshold', 'cv2.threshold', (['gray_image', '(0)', '(255)', 'cv2.THRESH_OTSU'], {}), '(gray_image, 0, 255, cv2.THRESH_OTSU)\n', (3218, 3255), False, 'import cv2\n'), ((3296, 3315), 'cv2.bitwise_not', 'cv2.bitwise_not', (['th'], {}), '(th)\n', (3311, 3315), False, 'import cv2\n'), ((3365, 3386), 'cv2.moments', 'cv2.moments', (['mask_inv'], {}), '(mask_inv)\n', (3376, 3386), False, 'import cv2\n'), ((1525, 1576), 'os.system', 'os.system', (["('echo ' + filename + '>uploads/test.txt')"], {}), "('echo ' + filename + '>uploads/test.txt')\n", (1534, 1576), False, 'import os\n'), ((4118, 4192), 'net.wyun.blankanswer.image_utils.crop_image', 'image_utils.crop_image', (['filename', 'output_filename', 'crop_blank_default_size'], {}), '(filename, output_filename, crop_blank_default_size)\n', (4140, 4192), False, 'from net.wyun.blankanswer import image_utils\n'), ((4207, 4269), 'net.wyun.blankanswer.image_utils.pad_image', 'image_utils.pad_image', (['im1', 'output_filename', 'pad_size', 'buckets'], {}), '(im1, output_filename, pad_size, buckets)\n', (4228, 4269), False, 'from net.wyun.blankanswer import image_utils\n'), ((4288, 4356), 'net.wyun.blankanswer.image_utils.downsample_image', 'image_utils.downsample_image', (['im2', 'output_filename', 'downsample_ratio'], {}), '(im2, output_filename, downsample_ratio)\n', (4316, 4356), False, 'from net.wyun.blankanswer import image_utils\n'), ((504, 515), 'time.time', 'time.time', ([], {}), '()\n', (513, 515), False, 'import time\n')] |
"""
An experiment trying to bug out the geometry shader sprite culling.
If the culling algorithm is wrong sprites can disappear before they
leave the screen.
Simply run the program and move draw the sprites around using the mouse.
"""
from arcade.sprite import Sprite
import PIL
import arcade
class GeoCullingTest(arcade.Window):
def __init__(self):
super().__init__(800, 400, "Cull test", resizable=True)
self.proj = self.ctx.projection_2d
self.texture = arcade.Texture("weird_texture", image=PIL.Image.new("RGBA", (2048, 2), (255, 255, 255, 255)))
self.spritelist = arcade.SpriteList()
self.spritelist.append(Sprite(":resources:images/tiles/boxCrate_double.png", center_x=400, center_y=300, scale=6))
for i in range(0, 360, 36):
self.spritelist.append(
arcade.Sprite(texture=self.texture, center_x=400, center_y=300, angle=i)
)
self.spritelist.append(Sprite(":resources:images/items/gold_1.png", center_x=400, center_y=300))
def on_draw(self):
self.clear()
self.ctx.projection_2d = self.proj
self.spritelist.draw()
def on_resize(self, width, height):
super().on_resize(width, height)
self.proj = self.ctx.projection_2d
def on_mouse_drag(self, x: float, y: float, dx: float, dy: float, buttons: int, modifiers: int):
self.proj = (
self.proj[0] - dx,
self.proj[1] - dx,
self.proj[2] - dy,
self.proj[3] - dy,
)
window = GeoCullingTest()
arcade.run()
| [
"PIL.Image.new",
"arcade.sprite.Sprite",
"arcade.run",
"arcade.SpriteList",
"arcade.Sprite"
] | [((1564, 1576), 'arcade.run', 'arcade.run', ([], {}), '()\n', (1574, 1576), False, 'import arcade\n'), ((610, 629), 'arcade.SpriteList', 'arcade.SpriteList', ([], {}), '()\n', (627, 629), False, 'import arcade\n'), ((661, 755), 'arcade.sprite.Sprite', 'Sprite', (['""":resources:images/tiles/boxCrate_double.png"""'], {'center_x': '(400)', 'center_y': '(300)', 'scale': '(6)'}), "(':resources:images/tiles/boxCrate_double.png', center_x=400,\n center_y=300, scale=6)\n", (667, 755), False, 'from arcade.sprite import Sprite\n'), ((960, 1032), 'arcade.sprite.Sprite', 'Sprite', (['""":resources:images/items/gold_1.png"""'], {'center_x': '(400)', 'center_y': '(300)'}), "(':resources:images/items/gold_1.png', center_x=400, center_y=300)\n", (966, 1032), False, 'from arcade.sprite import Sprite\n'), ((527, 581), 'PIL.Image.new', 'PIL.Image.new', (['"""RGBA"""', '(2048, 2)', '(255, 255, 255, 255)'], {}), "('RGBA', (2048, 2), (255, 255, 255, 255))\n", (540, 581), False, 'import PIL\n'), ((841, 913), 'arcade.Sprite', 'arcade.Sprite', ([], {'texture': 'self.texture', 'center_x': '(400)', 'center_y': '(300)', 'angle': 'i'}), '(texture=self.texture, center_x=400, center_y=300, angle=i)\n', (854, 913), False, 'import arcade\n')] |
from hivemind import app
from flask import flash, redirect, render_template, request, url_for
from mcipc.query import Client as QClient
@app.route('/', methods=['GET', 'POST'])
def index():
if request.method == 'GET':
with QClient("diseased.horse", 25565) as q:
stats = q.full_stats
return render_template(
"index.html",
status="Unknown",
online_players=stats["num_players"],
total_players=stats["max_players"],
motd=status["hostname"],
players=["OO"],
)
else:
return str(request.form)
| [
"flask.render_template",
"mcipc.query.Client",
"hivemind.app.route"
] | [((139, 178), 'hivemind.app.route', 'app.route', (['"""/"""'], {'methods': "['GET', 'POST']"}), "('/', methods=['GET', 'POST'])\n", (148, 178), False, 'from hivemind import app\n'), ((342, 513), 'flask.render_template', 'render_template', (['"""index.html"""'], {'status': '"""Unknown"""', 'online_players': "stats['num_players']", 'total_players': "stats['max_players']", 'motd': "status['hostname']", 'players': "['OO']"}), "('index.html', status='Unknown', online_players=stats[\n 'num_players'], total_players=stats['max_players'], motd=status[\n 'hostname'], players=['OO'])\n", (357, 513), False, 'from flask import flash, redirect, render_template, request, url_for\n'), ((246, 278), 'mcipc.query.Client', 'QClient', (['"""diseased.horse"""', '(25565)'], {}), "('diseased.horse', 25565)\n", (253, 278), True, 'from mcipc.query import Client as QClient\n')] |
# Generated by Django 3.0.5 on 2020-04-19 18:06
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('basta', '0004_session_slug'),
]
operations = [
migrations.AlterField(
model_name='play',
name='cur_round',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='play_set', to='basta.Round'),
),
migrations.AlterField(
model_name='play',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='play_set', to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='round',
name='session',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='round_set', to='basta.Session', verbose_name='Session'),
),
]
| [
"django.db.migrations.swappable_dependency",
"django.db.models.ForeignKey"
] | [((227, 284), 'django.db.migrations.swappable_dependency', 'migrations.swappable_dependency', (['settings.AUTH_USER_MODEL'], {}), '(settings.AUTH_USER_MODEL)\n', (258, 284), False, 'from django.db import migrations, models\n'), ((462, 572), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""play_set"""', 'to': '"""basta.Round"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='play_set', to='basta.Round')\n", (479, 572), False, 'from django.db import migrations, models\n'), ((685, 806), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""play_set"""', 'to': 'settings.AUTH_USER_MODEL'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='play_set', to=settings.AUTH_USER_MODEL)\n", (702, 806), False, 'from django.db import migrations, models\n'), ((923, 1060), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""round_set"""', 'to': '"""basta.Session"""', 'verbose_name': '"""Session"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='round_set', to='basta.Session', verbose_name='Session')\n", (940, 1060), False, 'from django.db import migrations, models\n')] |
'''
Copyright 2015 University of Auckland
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
import sys
from PySide2 import QtCore
import os.path
VERSION_MAJOR = 0
VERSION_MINOR = 2
VERSION_PATCH = 0
VERSION_STRING = str(VERSION_MAJOR) + "." + str(VERSION_MINOR) + "." + str(VERSION_PATCH)
VERSION_LIST = [VERSION_MAJOR, VERSION_MINOR, VERSION_PATCH]
APPLICATION_NAME = 'Neon'
ORGANISATION_NAME = 'OpenCMISS'
ORGANISATION_DOMAIN = 'opencmiss.org'
EXTERNAL_BINARIES_DIR = os.path.realpath(os.path.join(os.path.dirname(__file__), '..', '..', '..', '..', 'bin'))
EXTERNAL_DATA_DIR = os.path.realpath(os.path.join(os.path.dirname(__file__), '..', '..', '..', '..', 'data'))
if sys.version_info > (3, 0):
PYTHON3 = True
else:
PYTHON3 = False
def setApplicationSettings(app):
app.setOrganizationDomain(ORGANISATION_DOMAIN)
app.setOrganizationName(ORGANISATION_NAME)
app.setApplicationName(APPLICATION_NAME)
app.setApplicationVersion(VERSION_STRING)
QtCore.QSettings.setDefaultFormat(QtCore.QSettings.IniFormat)
| [
"PySide2.QtCore.QSettings.setDefaultFormat"
] | [((1488, 1549), 'PySide2.QtCore.QSettings.setDefaultFormat', 'QtCore.QSettings.setDefaultFormat', (['QtCore.QSettings.IniFormat'], {}), '(QtCore.QSettings.IniFormat)\n', (1521, 1549), False, 'from PySide2 import QtCore\n')] |
#!/usr/bin/env python
import selenium, time, os, platform
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from getpass import getpass
import info
username = info.username
pw = info.password
if username =="":
username = input("Enter your BU username:")
if pw =="":
pw = getpass("Enter your password:")
#check operating system (windows or mac)
osys = platform.system()
if osys == 'Windows':
# PATH = "C:\Program Files (x86)\ChromeDriver\chromedriver.exe"
# web = webdriver.Chrome(PATH)
web = webdriver.Chrome('chromedriver.exe')
elif osys == 'Darwin':
web = webdriver.Chrome()
# make sure this path is correct
start = time.time()
web.get('https://www.bu.edu/shs/getting-started/using-patient-connect/')
wait = WebDriverWait(web,10)
wait.until(EC.element_to_be_clickable((By.XPATH, '//*[@id="post-7513"]/p[2]/a'))).click()
#switch tabs
web.switch_to.window(web.window_handles[1])
#Log in Screen
username_input = wait.until(EC.presence_of_element_located((By.XPATH, '//*[@id="j_username"]')))
username_input.send_keys(username)
pw_input = web.find_element_by_xpath('//*[@id="j_password"]')
pw_input.send_keys(pw)
login_button = web.find_element_by_xpath('/html/body/div[1]/div/form/button')
login_button.click()
print("Logging in...")
# # #Click Survey Button
survey_button = wait.until(
EC.element_to_be_clickable((By.LINK_TEXT, 'Complete Survey'))
)
survey_button.click()
#Click Continue Button
time.sleep(0.5)
continue_button = wait.until(
EC.presence_of_element_located((By.XPATH,'//*[@id="mainbody"]/div[2]/div[1]/div/div[2]/a'))
)
continue_button.click()
#Survey Questions
time.sleep(0.5)
web.find_element_by_xpath('//*[@id="mainbody"]/main/form/div[2]/fieldset/div/div[1]/div').click()
web.find_element_by_xpath('//*[@id="mainbody"]/main/form/div[3]/fieldset/div/div[1]/div').click()
web.find_element_by_xpath('//*[@id="mainbody"]/main/form/div[4]/fieldset/div/div[1]/div').click()
web.find_element_by_xpath('//*[@id="mainbody"]/main/form/div[5]/fieldset/div/div[1]/div').click()
web.find_element_by_xpath('//*[@id="mainbody"]/main/form/div[6]/fieldset/div/div[1]/div').click()
web.find_element_by_xpath('//*[@id="mainbody"]/main/form/div[7]/fieldset/div/div[1]/div').click()
web.find_element_by_xpath('//*[@id="mainbody"]/main/form/div[8]/fieldset/div/div[1]/div').click()
web.find_element_by_xpath('//*[@id="mainbody"]/main/form/div[9]/fieldset/div/div[1]/div').click()
print('Survey Complete')
#Submit Survey
web.find_element_by_xpath('//*[@id="mainbody"]/footer/div/div[2]/input').click()
print('Survey Submitted')
print('This took: %.2f seconds' % (time.time()-start))
| [
"selenium.webdriver.support.ui.WebDriverWait",
"selenium.webdriver.Chrome",
"getpass.getpass",
"time.sleep",
"platform.system",
"selenium.webdriver.support.expected_conditions.presence_of_element_located",
"time.time",
"selenium.webdriver.support.expected_conditions.element_to_be_clickable"
] | [((557, 574), 'platform.system', 'platform.system', ([], {}), '()\n', (572, 574), False, 'import selenium, time, os, platform\n'), ((848, 859), 'time.time', 'time.time', ([], {}), '()\n', (857, 859), False, 'import selenium, time, os, platform\n'), ((941, 963), 'selenium.webdriver.support.ui.WebDriverWait', 'WebDriverWait', (['web', '(10)'], {}), '(web, 10)\n', (954, 963), False, 'from selenium.webdriver.support.ui import WebDriverWait\n'), ((1657, 1672), 'time.sleep', 'time.sleep', (['(0.5)'], {}), '(0.5)\n', (1667, 1672), False, 'import selenium, time, os, platform\n'), ((1862, 1877), 'time.sleep', 'time.sleep', (['(0.5)'], {}), '(0.5)\n', (1872, 1877), False, 'import selenium, time, os, platform\n'), ((476, 507), 'getpass.getpass', 'getpass', (['"""Enter your password:"""'], {}), "('Enter your password:')\n", (483, 507), False, 'from getpass import getpass\n'), ((711, 747), 'selenium.webdriver.Chrome', 'webdriver.Chrome', (['"""chromedriver.exe"""'], {}), "('chromedriver.exe')\n", (727, 747), False, 'from selenium import webdriver\n'), ((1157, 1224), 'selenium.webdriver.support.expected_conditions.presence_of_element_located', 'EC.presence_of_element_located', (['(By.XPATH, \'//*[@id="j_username"]\')'], {}), '((By.XPATH, \'//*[@id="j_username"]\'))\n', (1187, 1224), True, 'from selenium.webdriver.support import expected_conditions as EC\n'), ((1538, 1599), 'selenium.webdriver.support.expected_conditions.element_to_be_clickable', 'EC.element_to_be_clickable', (["(By.LINK_TEXT, 'Complete Survey')"], {}), "((By.LINK_TEXT, 'Complete Survey'))\n", (1564, 1599), True, 'from selenium.webdriver.support import expected_conditions as EC\n'), ((1715, 1811), 'selenium.webdriver.support.expected_conditions.presence_of_element_located', 'EC.presence_of_element_located', (['(By.XPATH, \'//*[@id="mainbody"]/div[2]/div[1]/div/div[2]/a\')'], {}), '((By.XPATH,\n \'//*[@id="mainbody"]/div[2]/div[1]/div/div[2]/a\'))\n', (1745, 1811), True, 'from selenium.webdriver.support import expected_conditions as EC\n'), ((787, 805), 'selenium.webdriver.Chrome', 'webdriver.Chrome', ([], {}), '()\n', (803, 805), False, 'from selenium import webdriver\n'), ((975, 1044), 'selenium.webdriver.support.expected_conditions.element_to_be_clickable', 'EC.element_to_be_clickable', (['(By.XPATH, \'//*[@id="post-7513"]/p[2]/a\')'], {}), '((By.XPATH, \'//*[@id="post-7513"]/p[2]/a\'))\n', (1001, 1044), True, 'from selenium.webdriver.support import expected_conditions as EC\n'), ((2845, 2856), 'time.time', 'time.time', ([], {}), '()\n', (2854, 2856), False, 'import selenium, time, os, platform\n')] |
# -*- coding: utf-8 -*-
"""
Created on Wed Nov 16 15:31:16 2016
@author: shaw
"""
import xlrd
import matplotlib.pyplot as plt
from matplotlib.ticker import MultipleLocator, FormatStrFormatter
import numpy as np
from pylab import *
from itertools import product
from matplotlib.colors import LogNorm
#import gc
def config():
# gc.collect()
m = 2
n = 2
length = 230
wide = 200
cte = m / 2
num_l = length / m
num_w = wide / n
square = length * wide
pixel = m * n
j = int(square / pixel)
x1 = {1: 10, 2: 30, 3: 55, 4: 75, 5: 145, 6: 165, 7: 100, 8: 120, 9: 10, 10: 30,
11: 55, 12: 75, 13: 145, 14: 165, 15: 100, 16: 120, 17: 10, 18: 30, 19: 55, 20: 75,
21: 145, 22: 165, 23: 100, 24: 120, 25: 10, 26: 30, 27: 55, 28: 75, 29: 145, 30: 165,
31: 100, 32: 120, 33: 10, 34: 30, 35: 55, 36: 75, 37: 145, 38: 165, 39: 100, 40: 120,
41: 100, 42: 120, 43: 10, 44: 90, 45: 55, 46: 75, 47: 145, 48: 165, 49: 100, 50: 120,
51: 10, 52: 30, 53: 55, 54: 75, 55: 145, 56: 165, 57: 100, 58: 120, 59: 10, 60: 30,
61: 55, 62: 75, 63: 145, 64: 165, 65: 100, 66: 120, 67: 10, 68: 30, 69: 55, 70: 75,
71: 145, 72: 165, 73: 100, 74: 120, 75: 10, 76: 30, 77: 55, 78: 75, 79: 145, 80: 165,
97: 190, 99: 210, 100: 190, 101: 210, 102: 190, 103: 210, 104: 190, 105: 210, 106: 190, 107: 210,
108: 190, 109: 210, 110: 190, 112: 210, 113: 190, 114: 210, 115: 190, 116: 210, 117: 190, 118: 210}
y1 = {1: 190, 2: 190, 3: 190, 4: 190, 5: 90, 6: 90, 7: 190, 8: 190, 9: 170, 10: 170,
11: 170, 12: 170, 13: 70, 14: 70, 15: 170, 16: 170, 17: 150, 18: 150, 19: 150, 20: 150,
21: 50, 22: 50, 23: 150, 24: 150, 25: 130, 26: 130, 27: 130, 28: 130, 29:30, 30: 30,
31: 130, 32: 130, 33: 110, 34: 110, 35: 110, 36: 110, 37: 10, 38: 10, 39: 110, 40: 110,
41: 90, 42: 90, 43: 90, 44: 90, 45: 90, 46: 90, 47: 190, 48: 190, 49: 70, 50: 70,
51: 70, 52: 70, 53: 70, 54: 70, 55: 170, 56: 170, 57: 50, 58: 50, 59:50, 60: 50,
61: 50, 62: 50, 63: 150, 64: 150, 65: 30, 66: 30, 67: 30, 68: 30, 69: 30, 70: 30,
71: 130, 72: 130, 73: 10, 74: 10, 75: 10, 76: 10, 77: 10, 78: 10 , 79: 110, 80: 110,
97: 190, 99: 190, 100: 170, 101: 170, 102: 150, 103: 150, 104: 130, 105: 130, 106: 110, 107: 110,
108: 90, 109: 90, 110: 70, 112: 70, 113: 50, 114: 50, 115: 30, 116: 30, 117: 10, 118: 10}
return j, num_l, num_w, m, n, cte, x1, y1
def center_voxel(j,cte,num_l, num_w, m, n):
t = [[0 for col in range(2)] for row in range(j)]
count = 0
k = 0
l = 0
while(count < j):
tmp_y = cte + m * k
tmp_x = cte + n * l
if(l < num_l):
if(k < num_w):
t[count] = [tmp_x, tmp_y]
count += 1
k += 1
else:
k = 0
l += 1
v = np.array(t)
return v
def vector(path1, path2, x1, y1, v,j):
table1 = xlrd.open_workbook(path1)
table2 = xlrd.open_workbook(path2)
sh1 = table1.sheet_by_index(0)
sh2 = table2.sheet_by_index(0)
r1 = 1
r2 = 1
y = []
epc_index1 = 0
epc_index2 = 0
ante_index1 = 0
ante_index2 = 0
rssi1 = 0.0
rssi2 = 0.0
count = 0
num = 0
p = 10
a = []
while((r1+1 <= sh1.nrows-1)and(r2+1 <= sh2.nrows-1)):
epc_index1 = int(sh1.cell_value(r1, 0))
epc_index2 = int(sh2.cell_value(r2, 0))
epc_next1 = int(sh1.cell_value(r1+1, 0))
epc_next2 = int(sh2.cell_value(r2+1, 0))
ante_index1 = int(sh1.cell_value(r1, 1))
ante_index2 = int(sh2.cell_value(r2, 1))
ante_next1 = int(sh1.cell_value(r1+1, 1))
ante_next2 = int(sh2.cell_value(r2+1, 1))
if((epc_index1 == epc_index2) and (epc_index1 == epc_next1) and (epc_index2 == epc_next2)):
if(ante_index1 == ante_index2):
if(ante_index1 == ante_next1 and ante_index2 == ante_next2):
r1 = r1 + 1
r2 = r2 + 1
elif(ante_index1 == ante_next1 and ante_index2 != ante_next2):
r1 = r1 + 1
rssi1 = float(sh1.cell_value(r1, 6))
rssi2 = float(sh2.cell_value(r2, 6))
i = abs(rssi1 - rssi2)
y.append(i)
count = count + 1
for num in range(j):
d = ((x1[epc_index1]- v[num][0])**2+(y1[epc_index1] - v[num][1])**2)**0.5
if(d < p):
a.append(1)
else:
a.append(0)
elif(ante_index1 != ante_next1 and ante_index2 == ante_next2):
r2 = r2 + 1
rssi1 = float(sh1.cell_value(r1, 6))
rssi2 = float(sh2.cell_value(r2, 6))
i = abs(rssi1 - rssi2)
y.append(i)
count = count + 1
for num in range(j):
d = ((x1[epc_index1]- v[num][0])**2+(y1[epc_index1] - v[num][1])**2)**0.5
if(d < p):
a.append(1)
else:
a.append(0)
elif(ante_index1 != ante_next1 and ante_index2 != ante_next2):
num = 0
rssi1 = float(sh1.cell_value(r1, 6))
rssi2 = float(sh2.cell_value(r2, 6))
i = abs(rssi1 - rssi2)
y.append(i)
count = count + 1
for num in range(j):
# print y[epc_index1]
d = ((x1[epc_index1]- v[num][0])**2+(y1[epc_index1] - v[num][1])**2)**0.5
if(d < p):
a.append(1)
else:
a.append(0)
r1 = r1 + 1
r2 = r2 + 1
elif(ante_index1 > ante_index2):
if(ante_index1 != ante_next1 and ante_index2 != ante_next2):
rssi1 = float(sh1.cell_value(r1, 6))
rssi2 = -80
i = abs(rssi1 - rssi2)
y.append(i)
r2 = r2 +1
count = count + 1
for num in range(j):
d = ((x1[epc_index1]- v[num][0])**2+(y1[epc_index1] - v[num][1])**2)**0.5
if(d < p):
a.append(1)
else:
a.append(0)
else:
r2 = r2 + 1
elif(ante_index2 > ante_index1):
if(ante_index2 != ante_next2 and ante_index1 != ante_next1):
rssi1 = -80
rssi2 = float(sh2.cell_value(r2, 6))
i = abs(rssi1 - rssi2)
y.append(i)
r1 = r1 + 1
count = count + 1
for num in range(j):
d = ((x1[epc_index1]- v[num][0])**2+(y1[epc_index1] - v[num][1])**2)**0.5
if(d < p):
a.append(1)
else:
a.append(0)
else:
r1 = r1 + 1
elif((epc_index1 == epc_index2) and((epc_index1 != epc_next1) or(epc_index2 != epc_next2))):
if(ante_index1 == ante_index2):
rssi1 = float(sh1.cell_value(r1, 6))
rssi2 = float(sh2.cell_value(r2, 6))
i = abs(rssi1 - rssi2)
y.append(i)
count = count + 1
for num in range(j):
d = ((x1[epc_index1]- v[num][0])**2+(y1[epc_index1] - v[num][1])**2)**0.5
if(d < p):
a.append(1)
else:
a.append(0)
elif(ante_index1 > ante_index2):
rssi1 = float(sh1.cell_value(r1, 6))
rssi2 = -80
i = abs(rssi1 - rssi2)
y.append(i)
count = count + 1
for num in range(j):
d = ((x1[epc_index1]- v[num][0])**2+(y1[epc_index1] - v[num][1])**2)**0.5
if(d < p):
a.append(1)
else:
a.append(0)
elif(ante_index2 > ante_index1):
rssi1 = -80
rssi2 = float(sh2.cell_value(r2, 6))
i = abs(rssi1 - rssi2)
y.append(i)
count = count + 1
for num in range(j):
d = ((x1[epc_index1]- v[num][0])**2+(y1[epc_index1] - v[num][1])**2)**0.5
if(d < p):
a.append(1)
else:
a.append(0)
# elif()
r1 = r1 + 1
r2 = r2 + 1
elif(epc_index1 > epc_index2):
epc_before1 = int(sh1.cell_value(r1-1, 0))
epc_before2 = int(sh2.cell_value(r2-1, 0))
if(epc_before1 != epc_index2 and epc_index1 != epc_next2):
rssi1 = -80
rssi2 = float(sh2.cell_value(r2, 6))
i = abs(rssi1 - rssi2)
y.append(i)
r2 = r2 + 1
count = count + 1
for num in range(j):
d = ((x1[epc_index2]- v[num][0])**2+(y1[epc_index2] - v[num][1])**2)**0.5
if(d < p):
a.append(1)
else:
a.append(0)
else:
r2 = r2 + 1
elif(epc_index2 > epc_index1):
epc_before1 = int(sh1.cell_value(r1-1, 0))
epc_before2 = int(sh2.cell_value(r2-1, 0))
if(epc_before2 != epc_index1 and epc_index2 != epc_next1):
rssi1 = float(sh1.cell_value(r1, 6))
rssi2 = -80
i = abs(rssi1 - rssi2)
y.append(i)
r1 = r1 + 1
count = count + 1
for num in range(j):
d = ((x1[epc_index1]- v[num][0])**2+(y1[epc_index1] - v[num][1])**2)**0.5
if(d < p):
a.append(1)
else:
a.append(0)
else:
r1 = r1 + 1
# print count
b = np.array(a)
w = b.reshape(count, j)
# print b.size
# print w
# z = np.dot(w.T, w)
# print z
# q = np.identity(j)
t= np.mat(y)
# l = 40
# u = l * np.dot(q.T, q)
# print u
# o = np.mat((z + u))
# print o
# e = np.dot(w.T, t.T)
# print e
h = np.dot(w.T , t.T)
x = h.getA()
# f = open('C:/Users/songkai/Desktop/record/new/x.txt', 'w')
# for n in range(len(x[0])):
# f.write(x[0][n])
# f.close()
# plt.hist2d(230, 200, bins=40,weight = x)
# plt.colorbar()
# plt.show()
return x
#def vector_x(w, y,j):
# q = np.identity(j)
# t= np.matrix(y)
# print t
# l = 40
# x = (w.T * w + l * q.T * q).I * w.T * t.T
# count_length = length / n
# count_wide = wide / m
# x = t.reshape([count_wide, count_length])
# return x
def draw(v,x):
plt.xlim(0,230)
plt.ylim(0,200)
ax = plt.gca()
# ax.xaxis.set_minor_locator(MultipleLocator(2))
# ax.yaxis.set_minor_locator(MultipleLocator(2))
ax.xaxis.set_major_locator(MultipleLocator(20))
ax.yaxis.set_major_locator(MultipleLocator(20))
# plt.grid()
# plt.show()
# plt.imshow(x, extent=[0,230,0,200])
# plt.imsave('pic.jpg', 'JPG')
# pp1=amap(lambda ra: [ra[0],ra[1]],product(arange(0,230,2),arange(0,200,2)))
scatter(v[:,0],v[:,1],c=x, edgecolor="none")
return 0
def main():
origin = 'C:/Users/shaw/Desktop/record/new/origin_2.4m.xlsx'
stand1 = 'C:/Users/shaw/Desktop/record/new/stand1_0.6m.xlsx'
path1 = origin
path2 = stand1
(j, num_l, num_w, m, n, cte, x1, y1) = config()
v = center_voxel(j,cte,num_l, num_w, m, n)
x = vector(path1, path2, x1, y1, v,j)
# x = vector_x(w, y,j)
# print x
draw(v, x)
# show()
if __name__ == '__main__':
main() | [
"numpy.mat",
"matplotlib.ticker.MultipleLocator",
"matplotlib.pyplot.gca",
"xlrd.open_workbook",
"numpy.array",
"numpy.dot",
"matplotlib.pyplot.ylim",
"matplotlib.pyplot.xlim"
] | [((2998, 3009), 'numpy.array', 'np.array', (['t'], {}), '(t)\n', (3006, 3009), True, 'import numpy as np\n'), ((3082, 3107), 'xlrd.open_workbook', 'xlrd.open_workbook', (['path1'], {}), '(path1)\n', (3100, 3107), False, 'import xlrd\n'), ((3122, 3147), 'xlrd.open_workbook', 'xlrd.open_workbook', (['path2'], {}), '(path2)\n', (3140, 3147), False, 'import xlrd\n'), ((11222, 11233), 'numpy.array', 'np.array', (['a'], {}), '(a)\n', (11230, 11233), True, 'import numpy as np\n'), ((11369, 11378), 'numpy.mat', 'np.mat', (['y'], {}), '(y)\n', (11375, 11378), True, 'import numpy as np\n'), ((11525, 11541), 'numpy.dot', 'np.dot', (['w.T', 't.T'], {}), '(w.T, t.T)\n', (11531, 11541), True, 'import numpy as np\n'), ((12119, 12135), 'matplotlib.pyplot.xlim', 'plt.xlim', (['(0)', '(230)'], {}), '(0, 230)\n', (12127, 12135), True, 'import matplotlib.pyplot as plt\n'), ((12140, 12156), 'matplotlib.pyplot.ylim', 'plt.ylim', (['(0)', '(200)'], {}), '(0, 200)\n', (12148, 12156), True, 'import matplotlib.pyplot as plt\n'), ((12166, 12175), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (12173, 12175), True, 'import matplotlib.pyplot as plt\n'), ((12314, 12333), 'matplotlib.ticker.MultipleLocator', 'MultipleLocator', (['(20)'], {}), '(20)\n', (12329, 12333), False, 'from matplotlib.ticker import MultipleLocator, FormatStrFormatter\n'), ((12367, 12386), 'matplotlib.ticker.MultipleLocator', 'MultipleLocator', (['(20)'], {}), '(20)\n', (12382, 12386), False, 'from matplotlib.ticker import MultipleLocator, FormatStrFormatter\n')] |
from __future__ import print_function
import sys, random, json, os, tempfile
from collections import Counter
import numpy as np
INSIDE_BLENDER = True
try:
import bpy
from mathutils import Vector
except ImportError as e:
INSIDE_BLENDER = False
if INSIDE_BLENDER:
try:
import utils
except ImportError as e:
print("\nERROR")
print("Running render_images.py from Blender and cannot import utils.py.")
print("You may need to add a .pth file to the site-packages of Blender's")
print("bundled python with a command like this:\n")
print("echo $PWD >> $BLENDER/$VERSION/python/lib/python3.5/site-packages/clevr.pth")
print("\nWhere $BLENDER is the directory where Blender is installed, and")
print("$VERSION is your Blender version (such as 2.78).")
sys.exit(1)
def render_scene(args,
output_image='render.png',
output_scene='render_json',
output_blendfile=None,
objects=[],
**kwargs
):
# Load the main blendfile
bpy.ops.wm.open_mainfile(filepath=args.base_scene_blendfile)
# Load materials
utils.load_materials(args.material_dir)
# Set render arguments so we can get pixel coordinates later.
# We use functionality specific to the CYCLES renderer so BLENDER_RENDER
# cannot be used.
render_args = bpy.context.scene.render
render_args.engine = "CYCLES"
render_args.filepath = output_image
render_args.resolution_x = args.width
render_args.resolution_y = args.height
render_args.resolution_percentage = 100
render_args.tile_x = args.render_tile_size
render_args.tile_y = args.render_tile_size
if args.use_gpu == 1:
# Blender changed the API for enabling CUDA at some point
if bpy.app.version < (2, 78, 0):
bpy.context.user_preferences.system.compute_device_type = 'CUDA'
bpy.context.user_preferences.system.compute_device = 'CUDA_0'
else:
cycles_prefs = bpy.context.user_preferences.addons['cycles'].preferences
cycles_prefs.compute_device_type = 'CUDA'
# Some CYCLES-specific stuff
bpy.data.worlds['World'].cycles.sample_as_light = True
bpy.context.scene.cycles.blur_glossy = 2.0
bpy.context.scene.cycles.samples = args.render_num_samples
bpy.context.scene.cycles.transparent_min_bounces = args.render_min_bounces
bpy.context.scene.cycles.transparent_max_bounces = args.render_max_bounces
if args.use_gpu == 1:
bpy.context.scene.cycles.device = 'GPU'
# This will give ground-truth information about the scene and its objects
scene_struct = {
'image_filename': os.path.basename(output_image),
'objects': [],
'directions': {},
}
scene_struct.update(kwargs)
if bpy.app.version < (2, 80, 0):
bpy.ops.mesh.primitive_plane_add(radius=5)
else:
bpy.ops.mesh.primitive_plane_add(size=5)
plane = bpy.context.object
def rand(L):
return 2.0 * L * (random.random() - 0.5)
# Add random jitter to camera position
if args.camera_jitter > 0:
for i in range(3):
bpy.data.objects['Camera'].location[i] += rand(args.camera_jitter)
# Figure out the left, up, and behind directions along the plane and record
# them in the scene structure
camera = bpy.data.objects['Camera']
plane_normal = plane.data.vertices[0].normal
if bpy.app.version < (2, 80, 0):
cam_behind = camera.matrix_world.to_quaternion() * Vector((0, 0, -1))
cam_left = camera.matrix_world.to_quaternion() * Vector((-1, 0, 0))
cam_up = camera.matrix_world.to_quaternion() * Vector((0, 1, 0))
else:
cam_behind = camera.matrix_world.to_quaternion() @ Vector((0, 0, -1))
cam_left = camera.matrix_world.to_quaternion() @ Vector((-1, 0, 0))
cam_up = camera.matrix_world.to_quaternion() @ Vector((0, 1, 0))
plane_behind = (cam_behind - cam_behind.project(plane_normal)).normalized()
plane_left = (cam_left - cam_left.project(plane_normal)).normalized()
plane_up = cam_up.project(plane_normal).normalized()
# Delete the plane; we only used it for normals anyway. The base scene file
# contains the actual ground plane.
utils.delete_object(plane)
# Save all six axis-aligned directions in the scene struct
scene_struct['directions']['behind'] = tuple(plane_behind)
scene_struct['directions']['front'] = tuple(-plane_behind)
scene_struct['directions']['left'] = tuple(plane_left)
scene_struct['directions']['right'] = tuple(-plane_left)
scene_struct['directions']['above'] = tuple(plane_up)
scene_struct['directions']['below'] = tuple(-plane_up)
# Add random jitter to lamp positions
if args.key_light_jitter > 0:
for i in range(3):
bpy.data.objects['Lamp_Key'].location[i] += rand(args.key_light_jitter)
if args.back_light_jitter > 0:
for i in range(3):
bpy.data.objects['Lamp_Back'].location[i] += rand(args.back_light_jitter)
if args.fill_light_jitter > 0:
for i in range(3):
bpy.data.objects['Lamp_Fill'].location[i] += rand(args.fill_light_jitter)
# Now make some random objects
blender_objects = add_objects(args, scene_struct, camera, objects)
# Render the scene and dump the scene data structure
scene_struct['objects'] = objects
scene_struct['relationships'] = compute_all_relationships(scene_struct)
while True:
try:
bpy.ops.render.render(write_still=True)
break
except Exception as e:
print(e)
with open(output_scene, 'w') as f:
json.dump(scene_struct, f, indent=2)
if output_blendfile is not None:
bpy.ops.wm.save_as_mainfile(filepath=output_blendfile)
def add_objects(args, scene_struct, camera, objects):
"""
Add objects to the current blender scene
"""
blender_objects = []
for obj in objects:
# Actually add the object to the scene
utils.add_object(args.shape_dir,
obj["shape"],
obj["size"],
obj["location"],
theta=obj["rotation"])
bobj = bpy.context.object
blender_objects.append(bobj)
utils.add_material(obj["material"], Color=obj["color"])
obj["pixel_coords"] = utils.get_camera_coords(camera, bobj.location)
loc = np.array(bobj.location)
dim = np.array(bobj.dimensions)
half = dim / 2
corners = []
corners.append(loc + half * [1,1,1])
corners.append(loc + half * [1,1,-1])
corners.append(loc + half * [1,-1,1])
corners.append(loc + half * [1,-1,-1])
corners.append(loc + half * [-1,1,1])
corners.append(loc + half * [-1,1,-1])
corners.append(loc + half * [-1,-1,1])
corners.append(loc + half * [-1,-1,-1])
import mathutils
corners_camera_coords = np.array([ utils.get_camera_coords(camera, mathutils.Vector(tuple(corner)))
for corner in corners ])
xmax = np.amax(corners_camera_coords[:,0])
ymax = np.amax(corners_camera_coords[:,1])
xmin = np.amin(corners_camera_coords[:,0])
ymin = np.amin(corners_camera_coords[:,1])
obj["bbox"] = (float(xmin), float(ymin), float(xmax), float(ymax))
return blender_objects
def compute_all_relationships(scene_struct, eps=0.2):
"""
Computes relationships between all pairs of objects in the scene.
Returns a dictionary mapping string relationship names to lists of lists of
integers, where output[rel][i] gives a list of object indices that have the
relationship rel with object i. For example if j is in output['left'][i] then
object j is left of object i.
"""
all_relationships = {}
for name, direction_vec in scene_struct['directions'].items():
if name == 'above' or name == 'below': continue
all_relationships[name] = []
for i, obj1 in enumerate(scene_struct['objects']):
coords1 = obj1['location']
related = set()
for j, obj2 in enumerate(scene_struct['objects']):
if obj1 == obj2: continue
coords2 = obj2['location']
diff = [coords2[k] - coords1[k] for k in [0, 1, 2]]
dot = sum(diff[k] * direction_vec[k] for k in [0, 1, 2])
if dot > eps:
related.add(j)
all_relationships[name].append(sorted(list(related)))
return all_relationships
def check_visibility(blender_objects, min_pixels_per_object):
"""
Check whether all objects in the scene have some minimum number of visible
pixels; to accomplish this we assign random (but distinct) colors to all
objects, and render using no lighting or shading or antialiasing; this
ensures that each object is just a solid uniform color. We can then count
the number of pixels of each color in the output image to check the visibility
of each object.
Returns True if all objects are visible and False otherwise.
"""
f, path = tempfile.mkstemp(suffix='.png')
object_colors = render_shadeless(blender_objects, path=path)
img = bpy.data.images.load(path)
p = list(img.pixels)
color_count = Counter((p[i], p[i+1], p[i+2], p[i+3])
for i in range(0, len(p), 4))
os.remove(path)
if len(color_count) != len(blender_objects) + 1:
return False
for _, count in color_count.most_common():
if count < min_pixels_per_object:
return False
return True
def render_shadeless(blender_objects, path='flat.png'):
"""
Render a version of the scene with shading disabled and unique materials
assigned to all objects, and return a set of all colors that should be in the
rendered image. The image itself is written to path. This is used to ensure
that all objects will be visible in the final rendered scene.
"""
render_args = bpy.context.scene.render
# Cache the render args we are about to clobber
old_filepath = render_args.filepath
old_engine = render_args.engine
old_use_antialiasing = render_args.use_antialiasing
# Override some render settings to have flat shading
render_args.filepath = path
render_args.engine = 'BLENDER_RENDER'
render_args.use_antialiasing = False
# Move the lights and ground to layer 2 so they don't render
utils.set_layer(bpy.data.objects['Lamp_Key'], 2)
utils.set_layer(bpy.data.objects['Lamp_Fill'], 2)
utils.set_layer(bpy.data.objects['Lamp_Back'], 2)
utils.set_layer(bpy.data.objects['Ground'], 2)
# Add random shadeless materials to all objects
object_colors = set()
old_materials = []
for i, obj in enumerate(blender_objects):
old_materials.append(obj.data.materials[0])
bpy.ops.material.new()
mat = bpy.data.materials['Material']
mat.name = 'Material_%d' % i
while True:
r, g, b = [random.random() for _ in range(3)]
if (r, g, b) not in object_colors: break
object_colors.add((r, g, b))
mat.diffuse_color = [r, g, b]
mat.use_shadeless = True
obj.data.materials[0] = mat
# Render the scene
bpy.ops.render.render(write_still=True)
# Undo the above; first restore the materials to objects
for mat, obj in zip(old_materials, blender_objects):
obj.data.materials[0] = mat
# Move the lights and ground back to layer 0
utils.set_layer(bpy.data.objects['Lamp_Key'], 0)
utils.set_layer(bpy.data.objects['Lamp_Fill'], 0)
utils.set_layer(bpy.data.objects['Lamp_Back'], 0)
utils.set_layer(bpy.data.objects['Ground'], 0)
# Set the render settings back to what they were
render_args.filepath = old_filepath
render_args.engine = old_engine
render_args.use_antialiasing = old_use_antialiasing
return object_colors
| [
"utils.add_material",
"utils.get_camera_coords",
"numpy.array",
"sys.exit",
"bpy.data.images.load",
"utils.set_layer",
"os.remove",
"mathutils.Vector",
"utils.delete_object",
"bpy.ops.wm.open_mainfile",
"utils.load_materials",
"bpy.ops.mesh.primitive_plane_add",
"utils.add_object",
"bpy.op... | [((982, 1042), 'bpy.ops.wm.open_mainfile', 'bpy.ops.wm.open_mainfile', ([], {'filepath': 'args.base_scene_blendfile'}), '(filepath=args.base_scene_blendfile)\n', (1006, 1042), False, 'import bpy\n'), ((1065, 1104), 'utils.load_materials', 'utils.load_materials', (['args.material_dir'], {}), '(args.material_dir)\n', (1085, 1104), False, 'import utils\n'), ((4023, 4049), 'utils.delete_object', 'utils.delete_object', (['plane'], {}), '(plane)\n', (4042, 4049), False, 'import utils\n'), ((8613, 8644), 'tempfile.mkstemp', 'tempfile.mkstemp', ([], {'suffix': '""".png"""'}), "(suffix='.png')\n", (8629, 8644), False, 'import sys, random, json, os, tempfile\n'), ((8716, 8742), 'bpy.data.images.load', 'bpy.data.images.load', (['path'], {}), '(path)\n', (8736, 8742), False, 'import bpy\n'), ((8877, 8892), 'os.remove', 'os.remove', (['path'], {}), '(path)\n', (8886, 8892), False, 'import sys, random, json, os, tempfile\n'), ((9893, 9941), 'utils.set_layer', 'utils.set_layer', (["bpy.data.objects['Lamp_Key']", '(2)'], {}), "(bpy.data.objects['Lamp_Key'], 2)\n", (9908, 9941), False, 'import utils\n'), ((9944, 9993), 'utils.set_layer', 'utils.set_layer', (["bpy.data.objects['Lamp_Fill']", '(2)'], {}), "(bpy.data.objects['Lamp_Fill'], 2)\n", (9959, 9993), False, 'import utils\n'), ((9996, 10045), 'utils.set_layer', 'utils.set_layer', (["bpy.data.objects['Lamp_Back']", '(2)'], {}), "(bpy.data.objects['Lamp_Back'], 2)\n", (10011, 10045), False, 'import utils\n'), ((10048, 10094), 'utils.set_layer', 'utils.set_layer', (["bpy.data.objects['Ground']", '(2)'], {}), "(bpy.data.objects['Ground'], 2)\n", (10063, 10094), False, 'import utils\n'), ((10651, 10690), 'bpy.ops.render.render', 'bpy.ops.render.render', ([], {'write_still': '(True)'}), '(write_still=True)\n', (10672, 10690), False, 'import bpy\n'), ((10888, 10936), 'utils.set_layer', 'utils.set_layer', (["bpy.data.objects['Lamp_Key']", '(0)'], {}), "(bpy.data.objects['Lamp_Key'], 0)\n", (10903, 10936), False, 'import utils\n'), ((10939, 10988), 'utils.set_layer', 'utils.set_layer', (["bpy.data.objects['Lamp_Fill']", '(0)'], {}), "(bpy.data.objects['Lamp_Fill'], 0)\n", (10954, 10988), False, 'import utils\n'), ((10991, 11040), 'utils.set_layer', 'utils.set_layer', (["bpy.data.objects['Lamp_Back']", '(0)'], {}), "(bpy.data.objects['Lamp_Back'], 0)\n", (11006, 11040), False, 'import utils\n'), ((11043, 11089), 'utils.set_layer', 'utils.set_layer', (["bpy.data.objects['Ground']", '(0)'], {}), "(bpy.data.objects['Ground'], 0)\n", (11058, 11089), False, 'import utils\n'), ((2525, 2555), 'os.path.basename', 'os.path.basename', (['output_image'], {}), '(output_image)\n', (2541, 2555), False, 'import sys, random, json, os, tempfile\n'), ((2676, 2718), 'bpy.ops.mesh.primitive_plane_add', 'bpy.ops.mesh.primitive_plane_add', ([], {'radius': '(5)'}), '(radius=5)\n', (2708, 2718), False, 'import bpy\n'), ((2731, 2771), 'bpy.ops.mesh.primitive_plane_add', 'bpy.ops.mesh.primitive_plane_add', ([], {'size': '(5)'}), '(size=5)\n', (2763, 2771), False, 'import bpy\n'), ((5343, 5379), 'json.dump', 'json.dump', (['scene_struct', 'f'], {'indent': '(2)'}), '(scene_struct, f, indent=2)\n', (5352, 5379), False, 'import sys, random, json, os, tempfile\n'), ((5420, 5474), 'bpy.ops.wm.save_as_mainfile', 'bpy.ops.wm.save_as_mainfile', ([], {'filepath': 'output_blendfile'}), '(filepath=output_blendfile)\n', (5447, 5474), False, 'import bpy\n'), ((5683, 5786), 'utils.add_object', 'utils.add_object', (['args.shape_dir', "obj['shape']", "obj['size']", "obj['location']"], {'theta': "obj['rotation']"}), "(args.shape_dir, obj['shape'], obj['size'], obj['location'],\n theta=obj['rotation'])\n", (5699, 5786), False, 'import utils\n'), ((5934, 5989), 'utils.add_material', 'utils.add_material', (["obj['material']"], {'Color': "obj['color']"}), "(obj['material'], Color=obj['color'])\n", (5952, 5989), False, 'import utils\n'), ((6016, 6062), 'utils.get_camera_coords', 'utils.get_camera_coords', (['camera', 'bobj.location'], {}), '(camera, bobj.location)\n', (6039, 6062), False, 'import utils\n'), ((6074, 6097), 'numpy.array', 'np.array', (['bobj.location'], {}), '(bobj.location)\n', (6082, 6097), True, 'import numpy as np\n'), ((6108, 6133), 'numpy.array', 'np.array', (['bobj.dimensions'], {}), '(bobj.dimensions)\n', (6116, 6133), True, 'import numpy as np\n'), ((6711, 6747), 'numpy.amax', 'np.amax', (['corners_camera_coords[:, 0]'], {}), '(corners_camera_coords[:, 0])\n', (6718, 6747), True, 'import numpy as np\n'), ((6758, 6794), 'numpy.amax', 'np.amax', (['corners_camera_coords[:, 1]'], {}), '(corners_camera_coords[:, 1])\n', (6765, 6794), True, 'import numpy as np\n'), ((6805, 6841), 'numpy.amin', 'np.amin', (['corners_camera_coords[:, 0]'], {}), '(corners_camera_coords[:, 0])\n', (6812, 6841), True, 'import numpy as np\n'), ((6852, 6888), 'numpy.amin', 'np.amin', (['corners_camera_coords[:, 1]'], {}), '(corners_camera_coords[:, 1])\n', (6859, 6888), True, 'import numpy as np\n'), ((10287, 10309), 'bpy.ops.material.new', 'bpy.ops.material.new', ([], {}), '()\n', (10307, 10309), False, 'import bpy\n'), ((790, 801), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (798, 801), False, 'import sys, random, json, os, tempfile\n'), ((3316, 3334), 'mathutils.Vector', 'Vector', (['(0, 0, -1)'], {}), '((0, 0, -1))\n', (3322, 3334), False, 'from mathutils import Vector\n'), ((3388, 3406), 'mathutils.Vector', 'Vector', (['(-1, 0, 0)'], {}), '((-1, 0, 0))\n', (3394, 3406), False, 'from mathutils import Vector\n'), ((3458, 3475), 'mathutils.Vector', 'Vector', (['(0, 1, 0)'], {}), '((0, 1, 0))\n', (3464, 3475), False, 'from mathutils import Vector\n'), ((3539, 3557), 'mathutils.Vector', 'Vector', (['(0, 0, -1)'], {}), '((0, 0, -1))\n', (3545, 3557), False, 'from mathutils import Vector\n'), ((3611, 3629), 'mathutils.Vector', 'Vector', (['(-1, 0, 0)'], {}), '((-1, 0, 0))\n', (3617, 3629), False, 'from mathutils import Vector\n'), ((3681, 3698), 'mathutils.Vector', 'Vector', (['(0, 1, 0)'], {}), '((0, 1, 0))\n', (3687, 3698), False, 'from mathutils import Vector\n'), ((5207, 5246), 'bpy.ops.render.render', 'bpy.ops.render.render', ([], {'write_still': '(True)'}), '(write_still=True)\n', (5228, 5246), False, 'import bpy\n'), ((2840, 2855), 'random.random', 'random.random', ([], {}), '()\n', (2853, 2855), False, 'import sys, random, json, os, tempfile\n'), ((10417, 10432), 'random.random', 'random.random', ([], {}), '()\n', (10430, 10432), False, 'import sys, random, json, os, tempfile\n')] |
#!/usr/bin/env python2
#
# Copyright (C) 2019 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions
# and limitations under the License.
from os import makedirs
from os.path import exists, basename, join
from argparse import ArgumentParser
from action_detection.nn.monitors.factory import get_monitor
BASE_FILE_NAME = 'converted_model'
CKPT_FILE_NAME = '{}.ckpt'.format(BASE_FILE_NAME)
PB_FILE_NAME = '{}.pbtxt'.format(BASE_FILE_NAME)
FROZEN_FILE_NAME = 'frozen.pb'
def main():
"""Carry out model preparation for the export.
"""
parser = ArgumentParser()
parser.add_argument('--config', '-c', type=str, required=True, help='Path to config file')
parser.add_argument('--snapshot_path', '-s', type=str, required=True, default='', help='Path to model snapshot')
parser.add_argument('--output_dir', '-o', type=str, required=True, default='', help='Path to output directory')
args = parser.parse_args()
assert exists(args.config)
assert exists(args.snapshot_path + '.index')
if not exists(args.output_dir):
makedirs(args.output_dir)
task_monitor = get_monitor(args.config, snapshot_path=args.snapshot_path)
converted_snapshot_path = join(args.output_dir, CKPT_FILE_NAME)
task_monitor.eliminate_train_ops(converted_snapshot_path)
converted_model_path = '{}-{}'.format(converted_snapshot_path,
int(basename(args.snapshot_path).split('-')[-1]))
task_monitor.save_model_graph(converted_model_path, args.output_dir)
task_monitor.freeze_model_graph(converted_model_path,
join(args.output_dir, PB_FILE_NAME),
join(args.output_dir, FROZEN_FILE_NAME))
if __name__ == '__main__':
main()
| [
"os.path.exists",
"os.makedirs",
"argparse.ArgumentParser",
"os.path.join",
"os.path.basename",
"action_detection.nn.monitors.factory.get_monitor"
] | [((1025, 1041), 'argparse.ArgumentParser', 'ArgumentParser', ([], {}), '()\n', (1039, 1041), False, 'from argparse import ArgumentParser\n'), ((1413, 1432), 'os.path.exists', 'exists', (['args.config'], {}), '(args.config)\n', (1419, 1432), False, 'from os.path import exists, basename, join\n'), ((1444, 1481), 'os.path.exists', 'exists', (["(args.snapshot_path + '.index')"], {}), "(args.snapshot_path + '.index')\n", (1450, 1481), False, 'from os.path import exists, basename, join\n'), ((1573, 1631), 'action_detection.nn.monitors.factory.get_monitor', 'get_monitor', (['args.config'], {'snapshot_path': 'args.snapshot_path'}), '(args.config, snapshot_path=args.snapshot_path)\n', (1584, 1631), False, 'from action_detection.nn.monitors.factory import get_monitor\n'), ((1663, 1700), 'os.path.join', 'join', (['args.output_dir', 'CKPT_FILE_NAME'], {}), '(args.output_dir, CKPT_FILE_NAME)\n', (1667, 1700), False, 'from os.path import exists, basename, join\n'), ((1494, 1517), 'os.path.exists', 'exists', (['args.output_dir'], {}), '(args.output_dir)\n', (1500, 1517), False, 'from os.path import exists, basename, join\n'), ((1527, 1552), 'os.makedirs', 'makedirs', (['args.output_dir'], {}), '(args.output_dir)\n', (1535, 1552), False, 'from os import makedirs\n'), ((2091, 2126), 'os.path.join', 'join', (['args.output_dir', 'PB_FILE_NAME'], {}), '(args.output_dir, PB_FILE_NAME)\n', (2095, 2126), False, 'from os.path import exists, basename, join\n'), ((2164, 2203), 'os.path.join', 'join', (['args.output_dir', 'FROZEN_FILE_NAME'], {}), '(args.output_dir, FROZEN_FILE_NAME)\n', (2168, 2203), False, 'from os.path import exists, basename, join\n'), ((1877, 1905), 'os.path.basename', 'basename', (['args.snapshot_path'], {}), '(args.snapshot_path)\n', (1885, 1905), False, 'from os.path import exists, basename, join\n')] |
import os
import re
import shutil
from django.core.management.base import BaseCommand, CommandError
from wagtail.images.models import Image
class Command(BaseCommand):
help = 'Add Image from folder that you indicate'
IMAGE_FORMAT = (
'jpg',
'jpeg',
'webp',
'png',
'gif',
)
def add_arguments(self,parser):
parser.add_argument(
'-f',
'--folder',
nargs=1,
required=True,
help='Path to the folder with different images',
)
def handle(self, *args, **options):
path_folder = options['folder'][0]
if not os.path.exists(r'media'):
os.mkdir(r'media')
if os.path.exists(path_folder) and os.path.isdir(path_folder):
files_dir = os.listdir(path_folder)
images = []
image_names = []
for file in files_dir:
try:
format = re.search(r'\.(.*)$', file).group(1)
except:
continue
if format in self.IMAGE_FORMAT:
images.append(path_folder+'/'+file)
image_names.append(file)
for image in images:
shutil.copy2(image,r'media')
for name in image_names:
self.stdout.write(name + ' --- ' + self.style.SUCCESS('OK'))
Image.objects.create(file=name,title=name.split('.')[0])
else:
self.stdout.write(self.style.ERROR('Not dir or not exists'))
| [
"os.path.exists",
"os.listdir",
"shutil.copy2",
"os.path.isdir",
"os.mkdir",
"re.search"
] | [((655, 678), 'os.path.exists', 'os.path.exists', (['"""media"""'], {}), "('media')\n", (669, 678), False, 'import os\n'), ((693, 710), 'os.mkdir', 'os.mkdir', (['"""media"""'], {}), "('media')\n", (701, 710), False, 'import os\n'), ((723, 750), 'os.path.exists', 'os.path.exists', (['path_folder'], {}), '(path_folder)\n', (737, 750), False, 'import os\n'), ((755, 781), 'os.path.isdir', 'os.path.isdir', (['path_folder'], {}), '(path_folder)\n', (768, 781), False, 'import os\n'), ((807, 830), 'os.listdir', 'os.listdir', (['path_folder'], {}), '(path_folder)\n', (817, 830), False, 'import os\n'), ((1257, 1285), 'shutil.copy2', 'shutil.copy2', (['image', '"""media"""'], {}), "(image, 'media')\n", (1269, 1285), False, 'import shutil\n'), ((969, 996), 're.search', 're.search', (['"""\\\\.(.*)$"""', 'file'], {}), "('\\\\.(.*)$', file)\n", (978, 996), False, 'import re\n')] |
from pathlib import Path
import environ
# ENVIROMENT
# ------------------------------------------------------------------------------
ROOT_DIR = Path(__file__).resolve(strict=True).parent.parent.parent.parent
# document_service/
BASE_DIR = ROOT_DIR / "nauci_service"
APPS_DIR = BASE_DIR / "apps"
env = environ.Env()
READ_DOT_ENV_FILE = env.bool("DJANGO_READ_DOT_ENV_FILE", default=True)
if READ_DOT_ENV_FILE:
# OS environment variables take precedence over variables from .env
env.read_env(str(BASE_DIR / "config" / "envs" / ".env"))
# strip out any of the environment marked with our magic '<stub>' value.
# This is a limitation of parameter store, in that it must always have content.
for key, value in list(env.ENVIRON.items()):
if value == "<stub>":
env.ENVIRON.pop(key)
| [
"environ.Env",
"pathlib.Path"
] | [((307, 320), 'environ.Env', 'environ.Env', ([], {}), '()\n', (318, 320), False, 'import environ\n'), ((148, 162), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (152, 162), False, 'from pathlib import Path\n')] |
import array
import os
import socket
from warnings import warn
class NoFDError(RuntimeError):
"""Raised by :class:`FileDescriptor` methods if it was already closed/converted
"""
pass
class FileDescriptor:
"""A file descriptor received in a D-Bus message
This wrapper helps ensure that the file descriptor is closed exactly once.
If you don't explicitly convert or close the FileDescriptor object, it will
close its file descriptor when it goes out of scope, and emit a
ResourceWarning.
"""
__slots__ = ('_fd',)
_CLOSED = -1
_CONVERTED = -2
def __init__(self, fd):
self._fd = fd
def __repr__(self):
detail = self._fd
if self._fd == self._CLOSED:
detail = 'closed'
elif self._fd == self._CONVERTED:
detail = 'converted'
return f"<FileDescriptor ({detail})>"
def close(self):
"""Close the file descriptor
This can safely be called multiple times, but will raise RuntimeError
if called after converting it with one of the ``to_*`` methods.
This object can also be used in a ``with`` block, to close it on
leaving the block.
"""
if self._fd == self._CLOSED:
pass
elif self._fd == self._CONVERTED:
raise NoFDError("Can't close FileDescriptor after converting it")
else:
self._fd, fd = self._CLOSED, self._fd
os.close(fd)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
def __del__(self):
if self._fd >= 0:
warn(
f'FileDescriptor ({self._fd}) was neither closed nor converted',
ResourceWarning, stacklevel=2, source=self
)
self.close()
def _check(self):
if self._fd < 0:
detail = 'closed' if self._fd == self._CLOSED else 'converted'
raise NoFDError(f'FileDescriptor object was already {detail}')
def fileno(self):
"""Get the integer file descriptor
This does not change the state of the :class:`FileDescriptor` object,
unlike the ``to_*`` methods.
"""
self._check()
return self._fd
def to_raw_fd(self):
"""Convert to the low-level integer file descriptor::
raw_fd = fd.to_raw_fd()
os.write(raw_fd, b'xyz')
os.close(raw_fd)
The :class:`FileDescriptor` can't be used after calling this. The caller
is responsible for closing the file descriptor.
"""
self._check()
self._fd, fd = self._CONVERTED, self._fd
return fd
def to_file(self, mode, buffering=-1, encoding=None, errors=None, newline=None):
"""Convert to a Python file object::
with fd.to_file('w') as f:
f.write('xyz')
The arguments are the same as for the builtin :func:`open` function.
The :class:`FileDescriptor` can't be used after calling this. Closing
the file object will also close the file descriptor.
"""
self._check()
f = open(
self._fd, mode, buffering=buffering,
encoding=encoding, errors=errors, newline=newline
)
self._fd = self._CONVERTED
return f
def to_socket(self):
"""Convert to a socket object
This returns a standard library :func:`socket.socket` object::
with fd.to_socket() as sock:
b = sock.sendall(b'xyz')
The wrapper object can't be used after calling this. Closing the socket
object will also close the file descriptor.
"""
from socket import socket
self._check()
s = socket(fileno=self._fd)
self._fd = self._CONVERTED
return s
@classmethod
def from_ancdata(cls, ancdata) -> ['FileDescriptor']:
"""Make a list of FileDescriptor from received file descriptors
ancdata is a list of ancillary data tuples as returned by socket.recvmsg()
"""
fds = array.array("i") # Array of ints
for cmsg_level, cmsg_type, data in ancdata:
if cmsg_level == socket.SOL_SOCKET and cmsg_type == socket.SCM_RIGHTS:
# Append data, ignoring any truncated integers at the end.
fds.frombytes(data[:len(data) - (len(data) % fds.itemsize)])
return [cls(i) for i in fds]
_fds_buf_size_cache = None
def fds_buf_size():
# If there may be file descriptors, we try to read 1 message at a time.
# The reference implementation of D-Bus defaults to allowing 16 FDs per
# message, and the Linux kernel currently allows 253 FDs per sendmsg()
# call. So hopefully allowing 256 FDs per recvmsg() will always suffice.
global _fds_buf_size_cache
if _fds_buf_size_cache is None:
maxfds = 256
fd_size = array.array('i').itemsize
_fds_buf_size_cache = socket.CMSG_SPACE(maxfds * fd_size)
return _fds_buf_size_cache
| [
"array.array",
"socket.socket",
"os.close",
"socket.socket.CMSG_SPACE",
"warnings.warn"
] | [((3781, 3804), 'socket.socket', 'socket', ([], {'fileno': 'self._fd'}), '(fileno=self._fd)\n', (3787, 3804), False, 'from socket import socket\n'), ((4115, 4131), 'array.array', 'array.array', (['"""i"""'], {}), "('i')\n", (4126, 4131), False, 'import array\n'), ((4989, 5024), 'socket.socket.CMSG_SPACE', 'socket.CMSG_SPACE', (['(maxfds * fd_size)'], {}), '(maxfds * fd_size)\n', (5006, 5024), False, 'from socket import socket\n'), ((1650, 1767), 'warnings.warn', 'warn', (['f"""FileDescriptor ({self._fd}) was neither closed nor converted"""', 'ResourceWarning'], {'stacklevel': '(2)', 'source': 'self'}), "(f'FileDescriptor ({self._fd}) was neither closed nor converted',\n ResourceWarning, stacklevel=2, source=self)\n", (1654, 1767), False, 'from warnings import warn\n'), ((4933, 4949), 'array.array', 'array.array', (['"""i"""'], {}), "('i')\n", (4944, 4949), False, 'import array\n'), ((1456, 1468), 'os.close', 'os.close', (['fd'], {}), '(fd)\n', (1464, 1468), False, 'import os\n')] |
""" Renders the animation into a list of frames
"""
__all__ = ['OpenCvRenderer', 'FileRenderer']
from dataclasses import dataclass
import cv2
import imageio as iio
import numpy as np
from .animation import Frame, Animation
ESC = 27
@dataclass
class Options:
brightness: int = 100
cutoff: int = 0
@dataclass
class RenderedFrame:
buffer: np.ndarray
duration: int
def __init__(self, frame: Frame, options: Options) -> None:
buffer = np.array(frame.snapshot, np.int32)[:, :, ::-1]
if 0 < options.cutoff < 256:
buffer[buffer < options.cutoff] = 0
np.clip(buffer, 0, 255, out=buffer)
if 0 <= options.brightness < 256:
buffer *= options.brightness
buffer >>= 8
self.buffer = buffer.astype(np.uint8)
self.duration = frame.duration
def get_pixels(self, scale: int = 1) -> np.ndarray:
if scale < 0:
return self.buffer
h, w, _ = self.buffer.shape
return cv2.resize(self.buffer, (w * scale, h * scale), interpolation=cv2.INTER_NEAREST)
class Renderer(list):
def __init__(self, animation: Animation, *, brightness: int = 256, cutoff: int = 0) -> None:
self.loop_count = animation.loop_count
options = Options(brightness, cutoff)
super().__init__(RenderedFrame(frame, options) for frame in animation)
class OpenCvRenderer(Renderer):
def show(self, scale: int = 8, title : str = 'imagiCharm Preview'):
cv2.namedWindow(title)
def is_window_visible():
return cv2.getWindowProperty(title, cv2.WND_PROP_VISIBLE) >= 1
stop = False
repeats = 0
while 1:
for frame in self:
cv2.imshow(title, frame.get_pixels(scale))
key = cv2.waitKey(frame.duration)
if key == ESC or not is_window_visible():
stop = True
break
if stop:
break
if self.loop_count == 0:
continue
repeats += 1
if repeats >= self.loop_count:
break
if is_window_visible():
cv2.destroyWindow(title)
class FileRenderer(Renderer):
def save(self, path, scale: int = 8):
lc_filename = path.lower()
if lc_filename.endswith('.png'):
if len(self) == 1:
self.save_first_frame(path, scale)
else:
self.save_each_frame(path, scale)
elif lc_filename.endswith('.gif'):
self.save_animated_gif(path, scale)
else:
raise ValueError('Unknown image format, please save to PNG or GIF')
def save_each_frame(self, path: str, scale: int):
for i, frame in enumerate(self):
cv2.imwrite(f'{path[:-4]}.{i:04d}.png', self[i].get_pixels(scale))
def save_first_frame(self, path: str, scale: int):
cv2.imwrite(path, self[0].get_pixels(scale))
def save_animated_gif(self, path: str, scale: int):
frames = [frame.get_pixels(scale)[:, :, ::-1] for frame in self]
durations = [0.001 * frame.duration for frame in self]
# https://buildmedia.readthedocs.org/media/pdf/imageio/stable/imageio.pdf
iio.mimsave(path, frames, duration=durations, loop=self.loop_count)
| [
"numpy.clip",
"cv2.destroyWindow",
"numpy.array",
"cv2.getWindowProperty",
"imageio.mimsave",
"cv2.resize",
"cv2.waitKey",
"cv2.namedWindow"
] | [((609, 644), 'numpy.clip', 'np.clip', (['buffer', '(0)', '(255)'], {'out': 'buffer'}), '(buffer, 0, 255, out=buffer)\n', (616, 644), True, 'import numpy as np\n'), ((1002, 1087), 'cv2.resize', 'cv2.resize', (['self.buffer', '(w * scale, h * scale)'], {'interpolation': 'cv2.INTER_NEAREST'}), '(self.buffer, (w * scale, h * scale), interpolation=cv2.INTER_NEAREST\n )\n', (1012, 1087), False, 'import cv2\n'), ((1492, 1514), 'cv2.namedWindow', 'cv2.namedWindow', (['title'], {}), '(title)\n', (1507, 1514), False, 'import cv2\n'), ((3264, 3331), 'imageio.mimsave', 'iio.mimsave', (['path', 'frames'], {'duration': 'durations', 'loop': 'self.loop_count'}), '(path, frames, duration=durations, loop=self.loop_count)\n', (3275, 3331), True, 'import imageio as iio\n'), ((467, 501), 'numpy.array', 'np.array', (['frame.snapshot', 'np.int32'], {}), '(frame.snapshot, np.int32)\n', (475, 501), True, 'import numpy as np\n'), ((2185, 2209), 'cv2.destroyWindow', 'cv2.destroyWindow', (['title'], {}), '(title)\n', (2202, 2209), False, 'import cv2\n'), ((1568, 1618), 'cv2.getWindowProperty', 'cv2.getWindowProperty', (['title', 'cv2.WND_PROP_VISIBLE'], {}), '(title, cv2.WND_PROP_VISIBLE)\n', (1589, 1618), False, 'import cv2\n'), ((1798, 1825), 'cv2.waitKey', 'cv2.waitKey', (['frame.duration'], {}), '(frame.duration)\n', (1809, 1825), False, 'import cv2\n')] |
from django.db import models
from django.contrib.auth.models import User
from PIL import Image
from django import template
from django.contrib.auth.models import Group
class Helper(models.Model):
option = (
('Male', 'Male'),
('Female', 'Female')
)
user = models.OneToOneField(User, on_delete=models.CASCADE, null=True)
profile_picture = models.ImageField(
upload_to='helper_img', null=True, blank=True, default='helper_img/def_img.png')
contact_number = models.CharField(max_length=13)
cnic = models.CharField(max_length=15)
gender = models.CharField(max_length=10, choices=option, default='Male')
description = models.TextField(max_length=5000)
daily_work_rate = models.IntegerField()
profile_visible = models.BooleanField(null=True)
age = models.SmallIntegerField(null=True)
category = models.ManyToManyField('Category',null=True, blank=True)
province = models.CharField(max_length=50, null=True, blank=True)
city = models.CharField(max_length=50, null=True, blank=True)
area = models.CharField(max_length=50, null=True, blank=True)
rating = models.IntegerField(null=True)
def __str__(self):
return self.user.username
def save(self, *args, **kwargs):
super().save(*args, **kwargs)
img = Image.open(self.profile_picture.path)
output_size = (500, 500)
img.thumbnail(output_size)
img.save(self.profile_picture.path)
class Rewiew(models.Model):
user = models.ForeignKey(Helper, on_delete=models.SET_NULL, null=True)
star = models.IntegerField()
comment = models.TextField(max_length=1500, default='.')
def __str__(self):
return self.user.user.username
class Category(models.Model):
name = models.CharField(max_length=50)
def __str__(self):
return self.name
class Sub_Category(models.Model):
category = models.ForeignKey(Category, on_delete=models.CASCADE)
name = models.CharField(max_length=50)
def __str__(self):
return self.name
| [
"django.db.models.OneToOneField",
"PIL.Image.open",
"django.db.models.TextField",
"django.db.models.IntegerField",
"django.db.models.ForeignKey",
"django.db.models.ManyToManyField",
"django.db.models.BooleanField",
"django.db.models.SmallIntegerField",
"django.db.models.ImageField",
"django.db.mod... | [((285, 348), 'django.db.models.OneToOneField', 'models.OneToOneField', (['User'], {'on_delete': 'models.CASCADE', 'null': '(True)'}), '(User, on_delete=models.CASCADE, null=True)\n', (305, 348), False, 'from django.db import models\n'), ((371, 474), 'django.db.models.ImageField', 'models.ImageField', ([], {'upload_to': '"""helper_img"""', 'null': '(True)', 'blank': '(True)', 'default': '"""helper_img/def_img.png"""'}), "(upload_to='helper_img', null=True, blank=True, default=\n 'helper_img/def_img.png')\n", (388, 474), False, 'from django.db import models\n'), ((500, 531), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(13)'}), '(max_length=13)\n', (516, 531), False, 'from django.db import models\n'), ((543, 574), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(15)'}), '(max_length=15)\n', (559, 574), False, 'from django.db import models\n'), ((588, 651), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(10)', 'choices': 'option', 'default': '"""Male"""'}), "(max_length=10, choices=option, default='Male')\n", (604, 651), False, 'from django.db import models\n'), ((670, 703), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': '(5000)'}), '(max_length=5000)\n', (686, 703), False, 'from django.db import models\n'), ((726, 747), 'django.db.models.IntegerField', 'models.IntegerField', ([], {}), '()\n', (745, 747), False, 'from django.db import models\n'), ((770, 800), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (789, 800), False, 'from django.db import models\n'), ((811, 846), 'django.db.models.SmallIntegerField', 'models.SmallIntegerField', ([], {'null': '(True)'}), '(null=True)\n', (835, 846), False, 'from django.db import models\n'), ((862, 919), 'django.db.models.ManyToManyField', 'models.ManyToManyField', (['"""Category"""'], {'null': '(True)', 'blank': '(True)'}), "('Category', null=True, blank=True)\n", (884, 919), False, 'from django.db import models\n'), ((934, 988), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)', 'null': '(True)', 'blank': '(True)'}), '(max_length=50, null=True, blank=True)\n', (950, 988), False, 'from django.db import models\n'), ((1000, 1054), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)', 'null': '(True)', 'blank': '(True)'}), '(max_length=50, null=True, blank=True)\n', (1016, 1054), False, 'from django.db import models\n'), ((1066, 1120), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)', 'null': '(True)', 'blank': '(True)'}), '(max_length=50, null=True, blank=True)\n', (1082, 1120), False, 'from django.db import models\n'), ((1134, 1164), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (1153, 1164), False, 'from django.db import models\n'), ((1504, 1567), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Helper'], {'on_delete': 'models.SET_NULL', 'null': '(True)'}), '(Helper, on_delete=models.SET_NULL, null=True)\n', (1521, 1567), False, 'from django.db import models\n'), ((1579, 1600), 'django.db.models.IntegerField', 'models.IntegerField', ([], {}), '()\n', (1598, 1600), False, 'from django.db import models\n'), ((1615, 1661), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': '(1500)', 'default': '"""."""'}), "(max_length=1500, default='.')\n", (1631, 1661), False, 'from django.db import models\n'), ((1768, 1799), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)'}), '(max_length=50)\n', (1784, 1799), False, 'from django.db import models\n'), ((1900, 1953), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Category'], {'on_delete': 'models.CASCADE'}), '(Category, on_delete=models.CASCADE)\n', (1917, 1953), False, 'from django.db import models\n'), ((1965, 1996), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)'}), '(max_length=50)\n', (1981, 1996), False, 'from django.db import models\n'), ((1313, 1350), 'PIL.Image.open', 'Image.open', (['self.profile_picture.path'], {}), '(self.profile_picture.path)\n', (1323, 1350), False, 'from PIL import Image\n')] |
import re
from typing import Generator
from django.db import models, transaction, IntegrityError, InternalError
from django.db.models import Q
from django.contrib.auth import get_user_model
from django.core.exceptions import ValidationError
from django.utils.translation import gettext_lazy as _
from django.conf import settings
from base.models import Base
class Account(Base):
user = models.ForeignKey(
get_user_model(),
on_delete=models.PROTECT
)
balance = models.FloatField(_('Balance'), default=0.0)
currency = models.CharField(
_('Currency'),
choices=settings.CURRENCIES,
default='pln',
max_length=3
)
max_debit = models.FloatField(_('Max debit'), default=100.0)
ban = models.CharField(
_('Bank account number'),
max_length=32,
unique=True
)
@property
def iban(self) -> str:
return f'{settings.COUNTRY_CODE}{self.ban}'
def save(self, *args, **kwargs) -> None:
if not self.pk:
self.ban = self.generate_ban()
super().save(*args, **kwargs)
if not self.user:
self.user = self.create_uid
self.save()
@transaction.atomic
def transfer(self, to_ban: str, amount: float, name: str,
title: str) -> None:
"""
"""
if amount < 0:
raise ValidationError(
_('Amount must be greater than 0')
)
if amount > self.balance + self.max_debit:
raise ValidationError(_('You have not enough funds'))
if not self.valid_ban(to_ban):
raise ValidationError(_(f'BAN {to_ban} is not correct'))
Transaction.objects.create(
from_account=self,
from_ban=self.ban,
to_ban=to_ban,
amount=amount,
title=title,
name=name,
currency=self.currency
)
@classmethod
def generate_ban(cls) -> str:
# Customer number is number of all accounts in the system with leading
# zeros, first account will have 0000 0000 0000 0000 as customer number
# If number of accounts reaches the limit, then raise
customer = str(cls.objects.count())
if len(customer) > 16:
raise InternalError('Max number of accounts reached')
customer = str(cls.objects.count()).zfill(16)
# Change letter in country to numbers: A - 10, B - 11, C - 12 ...
country_code = ''.join([
str(ord(char) - 55) for char in settings.COUNTRY_CODE.upper()
])
# Compose temp BAN with country code and two zeros at the end
ban = f'{settings.BANK_ID}{settings.BRANCH_ID}' \
f'{customer}{country_code}00'
# Compute checksum (98 - ban % 97)
# First split into two equal parts to avoid computing on large numbers
first, second = ban[:len(ban)//2], ban[len(ban)//2:]
first = str(int(first) % 97)
checksum = str(98 - (int(first + second) % 97)).zfill(2)
return f'{checksum}{settings.BANK_ID}{settings.BRANCH_ID}{customer}'
@classmethod
def valid_ban(self, ban: str) -> bool:
if ban[0].isnumeric():
ban = f'{settings.COUNTRY_CODE}{ban}'
# Remove non alpha-numeric characers
ban = re.sub(r'\W+', '', ban)
# Change letter in country to numbers: A - 10, B - 11, C - 12 ...
ban = ''.join([
str(ord(char) - 55) for char in ban[:2].upper()
]) + ban[2:]
# Move first 6 chars to the end of the number
ban = ban[6:] + ban[:6]
if int(ban) % 97 == 1:
return True
return False
class Transaction(Base):
from_account = models.ForeignKey(
Account,
models.PROTECT,
verbose_name=_('From account'),
related_name='from_account'
)
to_account = models.ForeignKey(
Account,
models.PROTECT,
verbose_name=_('To account'),
related_name='to_account'
)
from_ban = models.CharField(_('From bank account number'), max_length=32)
to_ban = models.CharField(_('To bank account number'), max_length=32)
outer = models.BooleanField('Outer transfer')
currency = models.CharField(
_('Currency'),
choices=settings.CURRENCIES,
default='pln',
max_length=3
)
amount = models.FloatField(_('Amount'))
title = models.CharField(_('Title'), max_length=255)
name = models.CharField(_('Name and address'), max_length=255)
def save(self, *args, **kwargs):
"""
Prevent from modyfing records and validate fields
"""
if self.pk:
raise IntegrityError(
_('You can only add new transactions or read them')
)
if not Account.valid_ban(self.from_ban):
raise ValidationError(_(f'BAN {self.from_ban} is not correct'))
if not Account.valid_ban(self.to_ban):
raise ValidationError(_(f'BAN {self.to_ban} is not correct'))
if self.from_ban == self.to_ban:
raise ValidationError(
_('You cannot make a transfer within same account')
)
if self.amount < 0:
raise ValidationError(
_('Amount must be greater than 0')
)
self.outer = self.check_outer()
if not self.outer:
self.to_account = Account.objects.get(ban=self.to_ban)
super().save(*args, **kwargs)
self.from_account.balance -= self.amount
self.from_account.save()
if self.to_account:
self.to_account.balance += self.amount
self.to_account.save()
def check_outer(self) -> bool:
"""
Check if transfer is to another bank
"""
return self.to_ban[2:6] != settings.BANK_ID
def delete(self):
raise IntegrityError(
_('You can only add new transactions or read them')
)
| [
"django.contrib.auth.get_user_model",
"django.db.InternalError",
"django.conf.settings.COUNTRY_CODE.upper",
"django.utils.translation.gettext_lazy",
"django.db.models.BooleanField",
"re.sub"
] | [((4292, 4329), 'django.db.models.BooleanField', 'models.BooleanField', (['"""Outer transfer"""'], {}), "('Outer transfer')\n", (4311, 4329), False, 'from django.db import models, transaction, IntegrityError, InternalError\n'), ((424, 440), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (438, 440), False, 'from django.contrib.auth import get_user_model\n'), ((514, 526), 'django.utils.translation.gettext_lazy', '_', (['"""Balance"""'], {}), "('Balance')\n", (515, 526), True, 'from django.utils.translation import gettext_lazy as _\n'), ((583, 596), 'django.utils.translation.gettext_lazy', '_', (['"""Currency"""'], {}), "('Currency')\n", (584, 596), True, 'from django.utils.translation import gettext_lazy as _\n'), ((721, 735), 'django.utils.translation.gettext_lazy', '_', (['"""Max debit"""'], {}), "('Max debit')\n", (722, 735), True, 'from django.utils.translation import gettext_lazy as _\n'), ((789, 813), 'django.utils.translation.gettext_lazy', '_', (['"""Bank account number"""'], {}), "('Bank account number')\n", (790, 813), True, 'from django.utils.translation import gettext_lazy as _\n'), ((3396, 3419), 're.sub', 're.sub', (['"""\\\\W+"""', '""""""', 'ban'], {}), "('\\\\W+', '', ban)\n", (3402, 3419), False, 'import re\n'), ((4158, 4187), 'django.utils.translation.gettext_lazy', '_', (['"""From bank account number"""'], {}), "('From bank account number')\n", (4159, 4187), True, 'from django.utils.translation import gettext_lazy as _\n'), ((4235, 4262), 'django.utils.translation.gettext_lazy', '_', (['"""To bank account number"""'], {}), "('To bank account number')\n", (4236, 4262), True, 'from django.utils.translation import gettext_lazy as _\n'), ((4372, 4385), 'django.utils.translation.gettext_lazy', '_', (['"""Currency"""'], {}), "('Currency')\n", (4373, 4385), True, 'from django.utils.translation import gettext_lazy as _\n'), ((4507, 4518), 'django.utils.translation.gettext_lazy', '_', (['"""Amount"""'], {}), "('Amount')\n", (4508, 4518), True, 'from django.utils.translation import gettext_lazy as _\n'), ((4550, 4560), 'django.utils.translation.gettext_lazy', '_', (['"""Title"""'], {}), "('Title')\n", (4551, 4560), True, 'from django.utils.translation import gettext_lazy as _\n'), ((4607, 4628), 'django.utils.translation.gettext_lazy', '_', (['"""Name and address"""'], {}), "('Name and address')\n", (4608, 4628), True, 'from django.utils.translation import gettext_lazy as _\n'), ((2347, 2394), 'django.db.InternalError', 'InternalError', (['"""Max number of accounts reached"""'], {}), "('Max number of accounts reached')\n", (2360, 2394), False, 'from django.db import models, transaction, IntegrityError, InternalError\n'), ((3906, 3923), 'django.utils.translation.gettext_lazy', '_', (['"""From account"""'], {}), "('From account')\n", (3907, 3923), True, 'from django.utils.translation import gettext_lazy as _\n'), ((4068, 4083), 'django.utils.translation.gettext_lazy', '_', (['"""To account"""'], {}), "('To account')\n", (4069, 4083), True, 'from django.utils.translation import gettext_lazy as _\n'), ((6057, 6108), 'django.utils.translation.gettext_lazy', '_', (['"""You can only add new transactions or read them"""'], {}), "('You can only add new transactions or read them')\n", (6058, 6108), True, 'from django.utils.translation import gettext_lazy as _\n'), ((1454, 1488), 'django.utils.translation.gettext_lazy', '_', (['"""Amount must be greater than 0"""'], {}), "('Amount must be greater than 0')\n", (1455, 1488), True, 'from django.utils.translation import gettext_lazy as _\n'), ((1590, 1620), 'django.utils.translation.gettext_lazy', '_', (['"""You have not enough funds"""'], {}), "('You have not enough funds')\n", (1591, 1620), True, 'from django.utils.translation import gettext_lazy as _\n'), ((1697, 1730), 'django.utils.translation.gettext_lazy', '_', (['f"""BAN {to_ban} is not correct"""'], {}), "(f'BAN {to_ban} is not correct')\n", (1698, 1730), True, 'from django.utils.translation import gettext_lazy as _\n'), ((4839, 4890), 'django.utils.translation.gettext_lazy', '_', (['"""You can only add new transactions or read them"""'], {}), "('You can only add new transactions or read them')\n", (4840, 4890), True, 'from django.utils.translation import gettext_lazy as _\n'), ((4990, 5030), 'django.utils.translation.gettext_lazy', '_', (['f"""BAN {self.from_ban} is not correct"""'], {}), "(f'BAN {self.from_ban} is not correct')\n", (4991, 5030), True, 'from django.utils.translation import gettext_lazy as _\n'), ((5123, 5161), 'django.utils.translation.gettext_lazy', '_', (['f"""BAN {self.to_ban} is not correct"""'], {}), "(f'BAN {self.to_ban} is not correct')\n", (5124, 5161), True, 'from django.utils.translation import gettext_lazy as _\n'), ((5257, 5308), 'django.utils.translation.gettext_lazy', '_', (['"""You cannot make a transfer within same account"""'], {}), "('You cannot make a transfer within same account')\n", (5258, 5308), True, 'from django.utils.translation import gettext_lazy as _\n'), ((5404, 5438), 'django.utils.translation.gettext_lazy', '_', (['"""Amount must be greater than 0"""'], {}), "('Amount must be greater than 0')\n", (5405, 5438), True, 'from django.utils.translation import gettext_lazy as _\n'), ((2602, 2631), 'django.conf.settings.COUNTRY_CODE.upper', 'settings.COUNTRY_CODE.upper', ([], {}), '()\n', (2629, 2631), False, 'from django.conf import settings\n')] |
import os
import random
import numpy as np
import cv2
from lxml import etree
def mkdir(path):
if not os.path.exists(path):
os.makedirs(path)
def object_random(objects):
"""
random choice the object
:param objects: ['object1','object2',...]
:return: 'object3'
"""
return random.choice(objects)
def contrast_img(img1, c, b):
'''
:param img1: original image
:param c: > 1 brighter, < 1 darker
:param b: scalar added to each sum
:return: processed image
'''
rows, cols, chunnel = img1.shape
blank = np.zeros([rows, cols, chunnel], img1.dtype) # np.zeros(img1.shape, dtype=uint8)
dst = cv2.addWeighted(img1, c, blank, 1-c, b)
#cv2.imshow("process", dst)
return dst
def rotateMask(mask,angle):
h,w,c = mask.shape
max_h = np.max(mask.shape)+100
mask_ = np.zeros((max_h,max_h,c))
h_, w_ = int(abs(max_h-h)/2), int(abs(max_h-w)/2)
mask_[h_:(h_+h),w_:(w_+w),:] = mask
M = cv2.getRotationMatrix2D((max_h / 2, max_h / 2), angle, 1)
mask_rot = cv2.warpAffine(mask_, M, mask_.shape[::-1][1:])
mask = get_roi(mask_rot)
mask = mask.astype(np.uint8)
return mask
def get_roi(img):
'''
get rotation image
:param img:
:return:
'''
y_index, x_index = np.where((img != [0, 0, 0]).all(axis=2))
y_min, y_max = np.min(y_index), np.max(y_index)
x_min, x_max = np.min(x_index), np.max(x_index)
img_roi = img[y_min:y_max, x_min:x_max, :]
return img_roi
def occlusion_random():
"""
random decide whether to occlude
:return:
"""
p=random.random()
if p>0.5:
return True
else:
return False
def point_random(p_left_up, p_right_bottom):
'''
:param p_left_up: (xmin,ymin)
:param p_right_bottom: (xmax,ymax)
:return: x,y is not normal
'''
if p_left_up[0]>=p_right_bottom[0]:
y = p_left_up[0]
else:
y = random.randint(p_left_up[0], p_right_bottom[0])
if p_left_up[1]>=p_right_bottom[1]:
x = p_left_up[1]
else:
x = random.randint(p_left_up[1], p_right_bottom[1])
return [x,y]
def img_overlay(image1,image2,point,mask,occlusion):
"""
add image2 to image1 at (point[0],point[1]) with class point[2]
:param image1: background image,(height,width,3)
:param image2: sliding image adding to background image,(height,width,3)
:param point: point[x,y,class,i] indicate where to add and the class of image2
:param mask: creat the mask image with class value, (height,width,2),0 is object class, 1 is num of object
:param occlusion: decide whether the sliding image is occluded by background image, bool value
:return: added image,(height,width,3), and the mask is changed
"""
img1=image1.copy()
img2=image2
height,width,rgb=img1.shape
height_r,width_r,rgb_r=img2.shape
# x is height, y is width, but generally x is width, y is height
x=point[0]
y=point[1]
object=point[2]
# print '...',point[3]
if x+height_r>height or y+width_r>width:
return img1
for i in range(height_r):
for j in range(width_r):
if img2[i,j,0]<5 and img2[i,j,1]<5 and img2[i,j,2]<5:
img1[x+i,y+j,:]=img1[x+i,y+j,:]
else:
if mask[x+i,y+j,0]!=0:
img1[x+i,y+j,:]= img1[x+i,y+j,:] if occlusion else img2[i,j,:]
mask[x + i, y + j, 0] =mask[x + i, y + j, 0] if occlusion else object
mask[x + i, y + j, 1] = mask[x + i, y + j, 1] if occlusion else point[3]
else:
img1[x + i, y + j, :] = img2[i, j, :]
mask[x + i, y + j, 0] = object
mask[x + i, y + j, 1] = point[3]
return img1
def occlusion_ratio(mask,image2,point):
"""
compute the occlusion ration based on image1 and image2
:param mask: mask of synthetic image with lots of objects,(height,width,2)
:param image2: sliding image,(height,width,3)
:param point: [x,y,class,i]
:return:
"""
height, width, rgb = mask.shape
height_r, width_r, rgb_r = image2.shape
x=point[0]
y=point[1]
object=point[2]
if x+height_r>height or y+width_r>width:
return 1
total=0
occlusion=0
for i in range(height_r):
for j in range(width_r):
if image2[i,j,0]>4 or image2[i,j,1]>4 or image2[i,j,2]>4:
total=total+1
if mask[x + i, y + j, 0] != object or mask[x+i,y+j,1]!=point[3]:
occlusion = occlusion + 1
# print '...occlusion,total',occlusion,total
return float(occlusion)/float(total)
def pascal_xml(img_syn,mask,imgs_added,objects_added,points,ratio,path,name):
"""
write synthetic images to xml files like Pascal VOC2007
:param img_syn:
:param mask:
:param imgs_added:
:param objects_added:
:param points: [num][x,y,class,i]
:param ratio:
:param path: '/home/robot/Downloads/segmentation/dataset/data_sr300/VOCdevkit'
:param name: '000000'
:return:
"""
annotation_path=os.path.join(path,'VOC2007','Annotations',name+'.xml')
img_path=os.path.join(path,'VOC2007','JPEGImages',name+'.jpg')
if not os.path.exists(os.path.join(path,'VOC2007','JPEGImages')):
os.makedirs(os.path.join(path,'VOC2007','JPEGImages'))
if not os.path.exists(os.path.join(path,'VOC2007','Annotations')):
os.makedirs(os.path.join(path,'VOC2007','Annotations'))
cv2.imwrite(img_path,img_syn)
annotation=etree.Element("annotation")
etree.SubElement(annotation, "folder").text = "VOC2007"
etree.SubElement(annotation, "filename").text = name+'.jpg'
source = etree.SubElement(annotation, "source")
etree.SubElement(source, "database").text = "The VOC2007 Database"
etree.SubElement(source, "annotation").text = "PASCAL VOC2007"
etree.SubElement(source, "image").text = "flickr"
etree.SubElement(source, "flickrid").text = " "
owner = etree.SubElement(annotation, "owner")
etree.SubElement(owner, "flickrid").text = 'sjtu'
etree.SubElement(owner, "name").text = '<NAME>'
size = etree.SubElement(annotation, "size")
etree.SubElement(size, "width").text = '640'
etree.SubElement(size, "height").text = '480'
etree.SubElement(size, "depth").text = '3'
etree.SubElement(annotation, "segmented").text = '0'
for i,img in enumerate(imgs_added):
point=points[i]
# print '....',i,point
height,width,rgb=img.shape
xmin=point[1]
ymin=point[0]
xmax=point[1]+width
ymax=point[0]+height
ratio_object=occlusion_ratio(mask,img,point) # 1 is occlusion totally.
if ratio_object<1 and ratio_object>ratio:
key_object = etree.SubElement(annotation, "object")
etree.SubElement(key_object, "name").text = objects_added[i]
etree.SubElement(key_object, "difficult").text = '1'
etree.SubElement(key_object, "occlusion").text = str(ratio_object)
bndbox = etree.SubElement(key_object, "bndbox")
etree.SubElement(bndbox, "xmin").text = str(xmin)
etree.SubElement(bndbox, "ymin").text = str(ymin)
etree.SubElement(bndbox, "xmax").text = str(xmax)
etree.SubElement(bndbox, "ymax").text = str(ymax)
elif ratio_object<=ratio:
key_object = etree.SubElement(annotation, "object")
etree.SubElement(key_object, "name").text = objects_added[i]
etree.SubElement(key_object, "difficult").text = '0'
etree.SubElement(key_object, "occlusion").text = str(ratio_object)
bndbox = etree.SubElement(key_object, "bndbox")
etree.SubElement(bndbox, "xmin").text = str(xmin)
etree.SubElement(bndbox, "ymin").text = str(ymin)
etree.SubElement(bndbox, "xmax").text = str(xmax)
etree.SubElement(bndbox, "ymax").text = str(ymax)
doc = etree.ElementTree(annotation)
doc.write(open(annotation_path, "w"), pretty_print=True) | [
"lxml.etree.Element",
"cv2.imwrite",
"os.path.exists",
"random.choice",
"cv2.warpAffine",
"lxml.etree.SubElement",
"lxml.etree.ElementTree",
"os.makedirs",
"os.path.join",
"numpy.max",
"cv2.addWeighted",
"numpy.zeros",
"numpy.min",
"cv2.getRotationMatrix2D",
"random.random",
"random.ra... | [((309, 331), 'random.choice', 'random.choice', (['objects'], {}), '(objects)\n', (322, 331), False, 'import random\n'), ((572, 615), 'numpy.zeros', 'np.zeros', (['[rows, cols, chunnel]', 'img1.dtype'], {}), '([rows, cols, chunnel], img1.dtype)\n', (580, 615), True, 'import numpy as np\n'), ((663, 704), 'cv2.addWeighted', 'cv2.addWeighted', (['img1', 'c', 'blank', '(1 - c)', 'b'], {}), '(img1, c, blank, 1 - c, b)\n', (678, 704), False, 'import cv2\n'), ((850, 877), 'numpy.zeros', 'np.zeros', (['(max_h, max_h, c)'], {}), '((max_h, max_h, c))\n', (858, 877), True, 'import numpy as np\n'), ((980, 1037), 'cv2.getRotationMatrix2D', 'cv2.getRotationMatrix2D', (['(max_h / 2, max_h / 2)', 'angle', '(1)'], {}), '((max_h / 2, max_h / 2), angle, 1)\n', (1003, 1037), False, 'import cv2\n'), ((1053, 1100), 'cv2.warpAffine', 'cv2.warpAffine', (['mask_', 'M', 'mask_.shape[::-1][1:]'], {}), '(mask_, M, mask_.shape[::-1][1:])\n', (1067, 1100), False, 'import cv2\n'), ((1598, 1613), 'random.random', 'random.random', ([], {}), '()\n', (1611, 1613), False, 'import random\n'), ((5113, 5172), 'os.path.join', 'os.path.join', (['path', '"""VOC2007"""', '"""Annotations"""', "(name + '.xml')"], {}), "(path, 'VOC2007', 'Annotations', name + '.xml')\n", (5125, 5172), False, 'import os\n'), ((5181, 5239), 'os.path.join', 'os.path.join', (['path', '"""VOC2007"""', '"""JPEGImages"""', "(name + '.jpg')"], {}), "(path, 'VOC2007', 'JPEGImages', name + '.jpg')\n", (5193, 5239), False, 'import os\n'), ((5509, 5539), 'cv2.imwrite', 'cv2.imwrite', (['img_path', 'img_syn'], {}), '(img_path, img_syn)\n', (5520, 5539), False, 'import cv2\n'), ((5554, 5581), 'lxml.etree.Element', 'etree.Element', (['"""annotation"""'], {}), "('annotation')\n", (5567, 5581), False, 'from lxml import etree\n'), ((5719, 5757), 'lxml.etree.SubElement', 'etree.SubElement', (['annotation', '"""source"""'], {}), "(annotation, 'source')\n", (5735, 5757), False, 'from lxml import etree\n'), ((6014, 6051), 'lxml.etree.SubElement', 'etree.SubElement', (['annotation', '"""owner"""'], {}), "(annotation, 'owner')\n", (6030, 6051), False, 'from lxml import etree\n'), ((6169, 6205), 'lxml.etree.SubElement', 'etree.SubElement', (['annotation', '"""size"""'], {}), "(annotation, 'size')\n", (6185, 6205), False, 'from lxml import etree\n'), ((7993, 8022), 'lxml.etree.ElementTree', 'etree.ElementTree', (['annotation'], {}), '(annotation)\n', (8010, 8022), False, 'from lxml import etree\n'), ((106, 126), 'os.path.exists', 'os.path.exists', (['path'], {}), '(path)\n', (120, 126), False, 'import os\n'), ((136, 153), 'os.makedirs', 'os.makedirs', (['path'], {}), '(path)\n', (147, 153), False, 'import os\n'), ((815, 833), 'numpy.max', 'np.max', (['mask.shape'], {}), '(mask.shape)\n', (821, 833), True, 'import numpy as np\n'), ((1350, 1365), 'numpy.min', 'np.min', (['y_index'], {}), '(y_index)\n', (1356, 1365), True, 'import numpy as np\n'), ((1367, 1382), 'numpy.max', 'np.max', (['y_index'], {}), '(y_index)\n', (1373, 1382), True, 'import numpy as np\n'), ((1402, 1417), 'numpy.min', 'np.min', (['x_index'], {}), '(x_index)\n', (1408, 1417), True, 'import numpy as np\n'), ((1419, 1434), 'numpy.max', 'np.max', (['x_index'], {}), '(x_index)\n', (1425, 1434), True, 'import numpy as np\n'), ((1934, 1981), 'random.randint', 'random.randint', (['p_left_up[0]', 'p_right_bottom[0]'], {}), '(p_left_up[0], p_right_bottom[0])\n', (1948, 1981), False, 'import random\n'), ((2069, 2116), 'random.randint', 'random.randint', (['p_left_up[1]', 'p_right_bottom[1]'], {}), '(p_left_up[1], p_right_bottom[1])\n', (2083, 2116), False, 'import random\n'), ((5586, 5624), 'lxml.etree.SubElement', 'etree.SubElement', (['annotation', '"""folder"""'], {}), "(annotation, 'folder')\n", (5602, 5624), False, 'from lxml import etree\n'), ((5646, 5686), 'lxml.etree.SubElement', 'etree.SubElement', (['annotation', '"""filename"""'], {}), "(annotation, 'filename')\n", (5662, 5686), False, 'from lxml import etree\n'), ((5762, 5798), 'lxml.etree.SubElement', 'etree.SubElement', (['source', '"""database"""'], {}), "(source, 'database')\n", (5778, 5798), False, 'from lxml import etree\n'), ((5833, 5871), 'lxml.etree.SubElement', 'etree.SubElement', (['source', '"""annotation"""'], {}), "(source, 'annotation')\n", (5849, 5871), False, 'from lxml import etree\n'), ((5900, 5933), 'lxml.etree.SubElement', 'etree.SubElement', (['source', '"""image"""'], {}), "(source, 'image')\n", (5916, 5933), False, 'from lxml import etree\n'), ((5954, 5990), 'lxml.etree.SubElement', 'etree.SubElement', (['source', '"""flickrid"""'], {}), "(source, 'flickrid')\n", (5970, 5990), False, 'from lxml import etree\n'), ((6056, 6091), 'lxml.etree.SubElement', 'etree.SubElement', (['owner', '"""flickrid"""'], {}), "(owner, 'flickrid')\n", (6072, 6091), False, 'from lxml import etree\n'), ((6110, 6141), 'lxml.etree.SubElement', 'etree.SubElement', (['owner', '"""name"""'], {}), "(owner, 'name')\n", (6126, 6141), False, 'from lxml import etree\n'), ((6210, 6241), 'lxml.etree.SubElement', 'etree.SubElement', (['size', '"""width"""'], {}), "(size, 'width')\n", (6226, 6241), False, 'from lxml import etree\n'), ((6259, 6291), 'lxml.etree.SubElement', 'etree.SubElement', (['size', '"""height"""'], {}), "(size, 'height')\n", (6275, 6291), False, 'from lxml import etree\n'), ((6309, 6340), 'lxml.etree.SubElement', 'etree.SubElement', (['size', '"""depth"""'], {}), "(size, 'depth')\n", (6325, 6340), False, 'from lxml import etree\n'), ((6356, 6397), 'lxml.etree.SubElement', 'etree.SubElement', (['annotation', '"""segmented"""'], {}), "(annotation, 'segmented')\n", (6372, 6397), False, 'from lxml import etree\n'), ((5262, 5305), 'os.path.join', 'os.path.join', (['path', '"""VOC2007"""', '"""JPEGImages"""'], {}), "(path, 'VOC2007', 'JPEGImages')\n", (5274, 5305), False, 'import os\n'), ((5326, 5369), 'os.path.join', 'os.path.join', (['path', '"""VOC2007"""', '"""JPEGImages"""'], {}), "(path, 'VOC2007', 'JPEGImages')\n", (5338, 5369), False, 'import os\n'), ((5395, 5439), 'os.path.join', 'os.path.join', (['path', '"""VOC2007"""', '"""Annotations"""'], {}), "(path, 'VOC2007', 'Annotations')\n", (5407, 5439), False, 'import os\n'), ((5460, 5504), 'os.path.join', 'os.path.join', (['path', '"""VOC2007"""', '"""Annotations"""'], {}), "(path, 'VOC2007', 'Annotations')\n", (5472, 5504), False, 'import os\n'), ((6796, 6834), 'lxml.etree.SubElement', 'etree.SubElement', (['annotation', '"""object"""'], {}), "(annotation, 'object')\n", (6812, 6834), False, 'from lxml import etree\n'), ((7073, 7111), 'lxml.etree.SubElement', 'etree.SubElement', (['key_object', '"""bndbox"""'], {}), "(key_object, 'bndbox')\n", (7089, 7111), False, 'from lxml import etree\n'), ((6847, 6883), 'lxml.etree.SubElement', 'etree.SubElement', (['key_object', '"""name"""'], {}), "(key_object, 'name')\n", (6863, 6883), False, 'from lxml import etree\n'), ((6920, 6961), 'lxml.etree.SubElement', 'etree.SubElement', (['key_object', '"""difficult"""'], {}), "(key_object, 'difficult')\n", (6936, 6961), False, 'from lxml import etree\n'), ((6985, 7026), 'lxml.etree.SubElement', 'etree.SubElement', (['key_object', '"""occlusion"""'], {}), "(key_object, 'occlusion')\n", (7001, 7026), False, 'from lxml import etree\n'), ((7124, 7156), 'lxml.etree.SubElement', 'etree.SubElement', (['bndbox', '"""xmin"""'], {}), "(bndbox, 'xmin')\n", (7140, 7156), False, 'from lxml import etree\n'), ((7186, 7218), 'lxml.etree.SubElement', 'etree.SubElement', (['bndbox', '"""ymin"""'], {}), "(bndbox, 'ymin')\n", (7202, 7218), False, 'from lxml import etree\n'), ((7248, 7280), 'lxml.etree.SubElement', 'etree.SubElement', (['bndbox', '"""xmax"""'], {}), "(bndbox, 'xmax')\n", (7264, 7280), False, 'from lxml import etree\n'), ((7310, 7342), 'lxml.etree.SubElement', 'etree.SubElement', (['bndbox', '"""ymax"""'], {}), "(bndbox, 'ymax')\n", (7326, 7342), False, 'from lxml import etree\n'), ((7419, 7457), 'lxml.etree.SubElement', 'etree.SubElement', (['annotation', '"""object"""'], {}), "(annotation, 'object')\n", (7435, 7457), False, 'from lxml import etree\n'), ((7696, 7734), 'lxml.etree.SubElement', 'etree.SubElement', (['key_object', '"""bndbox"""'], {}), "(key_object, 'bndbox')\n", (7712, 7734), False, 'from lxml import etree\n'), ((7470, 7506), 'lxml.etree.SubElement', 'etree.SubElement', (['key_object', '"""name"""'], {}), "(key_object, 'name')\n", (7486, 7506), False, 'from lxml import etree\n'), ((7543, 7584), 'lxml.etree.SubElement', 'etree.SubElement', (['key_object', '"""difficult"""'], {}), "(key_object, 'difficult')\n", (7559, 7584), False, 'from lxml import etree\n'), ((7608, 7649), 'lxml.etree.SubElement', 'etree.SubElement', (['key_object', '"""occlusion"""'], {}), "(key_object, 'occlusion')\n", (7624, 7649), False, 'from lxml import etree\n'), ((7747, 7779), 'lxml.etree.SubElement', 'etree.SubElement', (['bndbox', '"""xmin"""'], {}), "(bndbox, 'xmin')\n", (7763, 7779), False, 'from lxml import etree\n'), ((7809, 7841), 'lxml.etree.SubElement', 'etree.SubElement', (['bndbox', '"""ymin"""'], {}), "(bndbox, 'ymin')\n", (7825, 7841), False, 'from lxml import etree\n'), ((7871, 7903), 'lxml.etree.SubElement', 'etree.SubElement', (['bndbox', '"""xmax"""'], {}), "(bndbox, 'xmax')\n", (7887, 7903), False, 'from lxml import etree\n'), ((7933, 7965), 'lxml.etree.SubElement', 'etree.SubElement', (['bndbox', '"""ymax"""'], {}), "(bndbox, 'ymax')\n", (7949, 7965), False, 'from lxml import etree\n')] |
import os
import platform
import sys
from os import listdir
from pathlib import Path
from src.create_dir import create_numbered_dirs, get_parent_dir
from src.validate_windows_file_name import is_valid_windows_file_name
def get_files_in(dir: str):
"""Returns a list of absolute paths to files sorted alphabetically in the specified folder"""
# TODO: ignore folders
# TODO: use OS independent path separator
return [dir + '\\' + file for file in listdir(dir) if os.path.isfile(os.path.join(dir, file))]
def create_new_dirs(parent_dir: str, number_of_dirs: int):
start = 1
return create_numbered_dirs(parent_dir, start, number_of_dirs)
def move_files(path_to_files: [str], destinations: [str], new_file_name: str):
"""
Assumes that every file in the parameter files is an absolute path to a file.
Also assumes that both parameters are non-empty lists
new_file_name does not change the file extension
"""
for index, source_path in enumerate(path_to_files):
original_file_name = os.path.basename(source_path)
file_extension = Path(original_file_name).suffix
# don't want to set this expression to new_file_name because it will overwrite the value
# and affect subsequent iterations of the loop
new_file_name2 = new_file_name + file_extension
dest = os.path.join(destinations[index], new_file_name2)
os.rename(source_path, dest)
print(f'Moved {original_file_name}...')
def ask_for_file_name():
question = 'What do you want to to use for the new file names ?'
if platform.system() == "Windows":
ask_for_file_name_on_windows(question)
print(question, sep='')
return input()
def ask_for_file_name_on_windows(question: str):
for i in range(3):
print(question, sep='')
file_name = input()
if is_valid_windows_file_name(file_name):
return file_name
print('Invalid file name. Try again.')
print('Error: you did not enter in a valid name after 3 attempts. Exiting...')
sys.exit()
def main():
parent_dir = get_parent_dir()
files = get_files_in(parent_dir)
if not files:
print(f'Cannot find any files in {parent_dir}')
sys.exit()
new_dirs = create_new_dirs(parent_dir, len(files))
if not new_dirs:
print(f'Could not create new directories. Perhaps those directories already exist in '
f'{parent_dir}?')
sys.exit()
move_files(files, new_dirs, ask_for_file_name())
if __name__ == '__main__':
main()
| [
"src.create_dir.create_numbered_dirs",
"os.listdir",
"pathlib.Path",
"src.validate_windows_file_name.is_valid_windows_file_name",
"os.rename",
"src.create_dir.get_parent_dir",
"os.path.join",
"platform.system",
"os.path.basename",
"sys.exit"
] | [((606, 661), 'src.create_dir.create_numbered_dirs', 'create_numbered_dirs', (['parent_dir', 'start', 'number_of_dirs'], {}), '(parent_dir, start, number_of_dirs)\n', (626, 661), False, 'from src.create_dir import create_numbered_dirs, get_parent_dir\n'), ((2058, 2068), 'sys.exit', 'sys.exit', ([], {}), '()\n', (2066, 2068), False, 'import sys\n'), ((2100, 2116), 'src.create_dir.get_parent_dir', 'get_parent_dir', ([], {}), '()\n', (2114, 2116), False, 'from src.create_dir import create_numbered_dirs, get_parent_dir\n'), ((1037, 1066), 'os.path.basename', 'os.path.basename', (['source_path'], {}), '(source_path)\n', (1053, 1066), False, 'import os\n'), ((1347, 1396), 'os.path.join', 'os.path.join', (['destinations[index]', 'new_file_name2'], {}), '(destinations[index], new_file_name2)\n', (1359, 1396), False, 'import os\n'), ((1405, 1433), 'os.rename', 'os.rename', (['source_path', 'dest'], {}), '(source_path, dest)\n', (1414, 1433), False, 'import os\n'), ((1585, 1602), 'platform.system', 'platform.system', ([], {}), '()\n', (1600, 1602), False, 'import platform\n'), ((1856, 1893), 'src.validate_windows_file_name.is_valid_windows_file_name', 'is_valid_windows_file_name', (['file_name'], {}), '(file_name)\n', (1882, 1893), False, 'from src.validate_windows_file_name import is_valid_windows_file_name\n'), ((2236, 2246), 'sys.exit', 'sys.exit', ([], {}), '()\n', (2244, 2246), False, 'import sys\n'), ((2459, 2469), 'sys.exit', 'sys.exit', ([], {}), '()\n', (2467, 2469), False, 'import sys\n'), ((463, 475), 'os.listdir', 'listdir', (['dir'], {}), '(dir)\n', (470, 475), False, 'from os import listdir\n'), ((1092, 1116), 'pathlib.Path', 'Path', (['original_file_name'], {}), '(original_file_name)\n', (1096, 1116), False, 'from pathlib import Path\n'), ((494, 517), 'os.path.join', 'os.path.join', (['dir', 'file'], {}), '(dir, file)\n', (506, 517), False, 'import os\n')] |