code
stringlengths 22
1.05M
| apis
listlengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
|---|---|---|
import base_tests as b
import pytest
import requests
import tests_helpers as h
@pytest.fixture(scope="module")
def main_app_url(module_scoped_container_getter):
""" Wait for the api from fastapi_main_app_main to become responsive """
return h.get_app_url(module_scoped_container_getter, "fastapi_main_app_main")
@pytest.fixture(scope="module")
def admin_url(module_scoped_container_getter):
""" Wait for the api from fastapi_main_app_main to become responsive """
return h.get_app_url(module_scoped_container_getter, "fastapi_main_app_admin")
@pytest.fixture(scope="module")
def admin_auth_headers(admin_url):
""" get auth token """
return h.get_auth_headers(admin_url)
@pytest.fixture(scope="module")
def initdb(admin_url, admin_auth_headers):
""" run api call with auth token """
return h.init_db(admin_url, admin_auth_headers, "preset_1")
def test_main_service_run(main_app_url):
b.test_main_service_run(main_app_url)
def test_admin_service_run(admin_url):
b.test_main_service_run(admin_url)
def test_presets_was_loaded(initdb):
b.test_presets_was_loaded(initdb)
def test_main_service_users(main_app_url, initdb):
""" run test that depend on data in DB """
result = requests.get(f"{main_app_url}/users").json()
assert result
assert result == {"count_users": 5}
def test_admin_service_drop(admin_auth_headers, admin_url):
b.test_admin_service_drop(admin_auth_headers, admin_url)
|
[
"base_tests.test_presets_was_loaded",
"base_tests.test_main_service_run",
"tests_helpers.get_app_url",
"tests_helpers.init_db",
"pytest.fixture",
"base_tests.test_admin_service_drop",
"requests.get",
"tests_helpers.get_auth_headers"
] |
[((82, 112), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""module"""'}), "(scope='module')\n", (96, 112), False, 'import pytest\n'), ((325, 355), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""module"""'}), "(scope='module')\n", (339, 355), False, 'import pytest\n'), ((566, 596), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""module"""'}), "(scope='module')\n", (580, 596), False, 'import pytest\n'), ((703, 733), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""module"""'}), "(scope='module')\n", (717, 733), False, 'import pytest\n'), ((251, 321), 'tests_helpers.get_app_url', 'h.get_app_url', (['module_scoped_container_getter', '"""fastapi_main_app_main"""'], {}), "(module_scoped_container_getter, 'fastapi_main_app_main')\n", (264, 321), True, 'import tests_helpers as h\n'), ((491, 562), 'tests_helpers.get_app_url', 'h.get_app_url', (['module_scoped_container_getter', '"""fastapi_main_app_admin"""'], {}), "(module_scoped_container_getter, 'fastapi_main_app_admin')\n", (504, 562), True, 'import tests_helpers as h\n'), ((670, 699), 'tests_helpers.get_auth_headers', 'h.get_auth_headers', (['admin_url'], {}), '(admin_url)\n', (688, 699), True, 'import tests_helpers as h\n'), ((829, 881), 'tests_helpers.init_db', 'h.init_db', (['admin_url', 'admin_auth_headers', '"""preset_1"""'], {}), "(admin_url, admin_auth_headers, 'preset_1')\n", (838, 881), True, 'import tests_helpers as h\n'), ((929, 966), 'base_tests.test_main_service_run', 'b.test_main_service_run', (['main_app_url'], {}), '(main_app_url)\n', (952, 966), True, 'import base_tests as b\n'), ((1012, 1046), 'base_tests.test_main_service_run', 'b.test_main_service_run', (['admin_url'], {}), '(admin_url)\n', (1035, 1046), True, 'import base_tests as b\n'), ((1090, 1123), 'base_tests.test_presets_was_loaded', 'b.test_presets_was_loaded', (['initdb'], {}), '(initdb)\n', (1115, 1123), True, 'import base_tests as b\n'), ((1406, 1462), 'base_tests.test_admin_service_drop', 'b.test_admin_service_drop', (['admin_auth_headers', 'admin_url'], {}), '(admin_auth_headers, admin_url)\n', (1431, 1462), True, 'import base_tests as b\n'), ((1237, 1274), 'requests.get', 'requests.get', (['f"""{main_app_url}/users"""'], {}), "(f'{main_app_url}/users')\n", (1249, 1274), False, 'import requests\n')]
|
#!/usr/local/bin/python
import sys, getopt
import pandas as pd
def usage():
print ('csv_to_html.py -h -i <input_csv> -o <output_html>')
sys.exit(2)
def main(argv):
try:
opts, args = getopt.getopt(argv,"hi:o:",["help","input_csv=","output_html="])
except getopt.GetoptError as err:
print(err)
usage()
sys.exit(2)
input_csv = ''
output_html = ''
for opt, arg in opts:
if opt in ("-h", "--help"):
usage()
elif opt in ("-i", "--input_csv"):
input_csv = arg
elif opt in ("-o", "--output_html"):
output_html = arg
else:
assert False, usage();
# Open the CSV for conversion
fd = pd.read_csv(input_csv)
# Use the .to_html() to get your table in html
fd.to_html(output_html, index=False, na_rep="", justify="center")
if __name__ == "__main__":
main(sys.argv[1:])
|
[
"pandas.read_csv",
"getopt.getopt",
"sys.exit"
] |
[((140, 151), 'sys.exit', 'sys.exit', (['(2)'], {}), '(2)\n', (148, 151), False, 'import sys, getopt\n'), ((614, 636), 'pandas.read_csv', 'pd.read_csv', (['input_csv'], {}), '(input_csv)\n', (625, 636), True, 'import pandas as pd\n'), ((190, 258), 'getopt.getopt', 'getopt.getopt', (['argv', '"""hi:o:"""', "['help', 'input_csv=', 'output_html=']"], {}), "(argv, 'hi:o:', ['help', 'input_csv=', 'output_html='])\n", (203, 258), False, 'import sys, getopt\n'), ((315, 326), 'sys.exit', 'sys.exit', (['(2)'], {}), '(2)\n', (323, 326), False, 'import sys, getopt\n')]
|
import bs4, requests, threading, datetime
import tkinter as tk
from tkmacosx import Button
from tkinter import messagebox
from functools import partial
class MainApp(tk.Tk):
def __init__(self):
super().__init__()
self.title("Stock Watcher")
self.geometry("600x600+0+0")
self.search_frame = tk.Frame(self, width=600, height=50)
self.label = tk.Label(self.search_frame, text="Stock Ticker:")
self.entry = tk.Entry(self.search_frame, width=25)
self.entry.focus()
self.entry.bind("<Return>", self.add_stock)
self.button = Button(self.search_frame, text="Add Ticket", bg="blue", fg="white", command=self.add_stock)
self.time = tk.Label(self.search_frame, fg="red", font=(None, '14', 'bold'))
self.stocks_frame = tk.Frame(self, width=600, height=550)
self.stocks_d = {}
self.row = 0
self.display()
self.update_time()
def display(self):
self.search_frame.pack()
self.label.pack(side=tk.LEFT)
self.entry.pack(side=tk.LEFT)
self.button.pack(side=tk.LEFT)
self.time.pack(side=tk.LEFT)
self.stocks_frame.pack()
def update_time(self):
self.time.config(text=datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"))
self.time.after(200, self.update_time)
def add_stock(self, arg=None):
try:
(self.price, self.change) = self.get_price(self.entry.get())
except IndexError:
messagebox.showinfo("Error", "Stock not found")
self.entry.delete(0, tk.END)
else:
if not self.change or not self.price:
messagebox.showinfo("Error", "Stock not found")
elif self.entry.get().upper() not in self.stocks_d:
self.stocks_d[self.entry.get().upper()] = [tk.Label(self.stocks_frame, text=self.entry.get().upper(), font=(None, '14', 'bold'))]
self.stocks_d[self.entry.get().upper()].append(tk.Label(self.stocks_frame, text=self.price, font=(None, '14', 'bold')))
if self.change[0] == '-':
self.stocks_d[self.entry.get().upper()].append(tk.Label(self.stocks_frame, text=self.change, font=(None, '14', 'bold'), fg="red"))
elif self.change[1] == '+':
self.stocks_d[self.entry.get().upper()].append(tk.Label(self.stocks_frame, text=self.change, font=(None, '14', 'bold'), fg="green"))
else:
self.stocks_d[self.entry.get().upper()].append(tk.Label(self.stocks_frame, text=self.change, font=(None, '14', 'bold'), fg="black"))
self.stocks_d[self.entry.get().upper()].append(Button(self.stocks_frame, text="Remove", bg="red", fg="white", command=partial(self.remove_stock, self.stocks_d[self.entry.get().upper()][0]['text'])))
self.stocks_d[self.entry.get().upper()][0].grid(row=self.row)
self.stocks_d[self.entry.get().upper()][1].grid(row=self.row, column=1)
self.stocks_d[self.entry.get().upper()][2].grid(row=self.row, column=2)
self.stocks_d[self.entry.get().upper()][3].grid(row=self.row, column=3)
self.row+=1
threading._start_new_thread(self.update_price, (self.entry.get().upper(),))
else:
messagebox.showinfo("Error", "Already added to watchlist")
self.entry.delete(0, tk.END)
def get_price(self, name):
self.res = requests.get('https://finance.yahoo.com/quote/'+name, headers={'User-agent': 'Mozilla/5.0'})
self.res.raise_for_status()
self.soup = bs4.BeautifulSoup(self.res.text, 'html.parser')
self.price = self.soup.find_all('div', {'class': 'My(6px) Pos(r) smartphone_Mt(6px)'})[0].find('span').text
self.change = self.soup.find_all('div', {'class': 'My(6px) Pos(r) smartphone_Mt(6px)'})[0].find_all('span')[1].text
return (self.price, self.change)
def update_price(self, name):
while True:
(self.price, self.change) = self.get_price(name)
try:
self.stocks_d[name][1].config(text=self.price)
if not self.price or not self.change:
pass
elif self.change[0] == "-":
self.stocks_d[name][2].config(text=self.change, fg="red")
elif self.change[0] == "+":
self.stocks_d[name][2].config(text=self.change, fg="green")
else:
self.stocks_d[name][2].config(text=self.change, fg="black")
except KeyError:
break
def remove_stock(self, name):
self.stocks_d[name][0].destroy()
self.stocks_d[name][1].destroy()
self.stocks_d[name][2].destroy()
self.stocks_d[name][3].destroy()
del self.stocks_d[name]
if __name__ == "__main__":
root = MainApp()
root.mainloop()
|
[
"tkmacosx.Button",
"tkinter.Entry",
"tkinter.messagebox.showinfo",
"datetime.datetime.now",
"requests.get",
"tkinter.Frame",
"bs4.BeautifulSoup",
"tkinter.Label"
] |
[((329, 365), 'tkinter.Frame', 'tk.Frame', (['self'], {'width': '(600)', 'height': '(50)'}), '(self, width=600, height=50)\n', (337, 365), True, 'import tkinter as tk\n'), ((387, 436), 'tkinter.Label', 'tk.Label', (['self.search_frame'], {'text': '"""Stock Ticker:"""'}), "(self.search_frame, text='Stock Ticker:')\n", (395, 436), True, 'import tkinter as tk\n'), ((458, 495), 'tkinter.Entry', 'tk.Entry', (['self.search_frame'], {'width': '(25)'}), '(self.search_frame, width=25)\n', (466, 495), True, 'import tkinter as tk\n'), ((597, 693), 'tkmacosx.Button', 'Button', (['self.search_frame'], {'text': '"""Add Ticket"""', 'bg': '"""blue"""', 'fg': '"""white"""', 'command': 'self.add_stock'}), "(self.search_frame, text='Add Ticket', bg='blue', fg='white', command\n =self.add_stock)\n", (603, 693), False, 'from tkmacosx import Button\n'), ((709, 773), 'tkinter.Label', 'tk.Label', (['self.search_frame'], {'fg': '"""red"""', 'font': "(None, '14', 'bold')"}), "(self.search_frame, fg='red', font=(None, '14', 'bold'))\n", (717, 773), True, 'import tkinter as tk\n'), ((803, 840), 'tkinter.Frame', 'tk.Frame', (['self'], {'width': '(600)', 'height': '(550)'}), '(self, width=600, height=550)\n', (811, 840), True, 'import tkinter as tk\n'), ((3512, 3611), 'requests.get', 'requests.get', (["('https://finance.yahoo.com/quote/' + name)"], {'headers': "{'User-agent': 'Mozilla/5.0'}"}), "('https://finance.yahoo.com/quote/' + name, headers={\n 'User-agent': 'Mozilla/5.0'})\n", (3524, 3611), False, 'import bs4, requests, threading, datetime\n'), ((3661, 3708), 'bs4.BeautifulSoup', 'bs4.BeautifulSoup', (['self.res.text', '"""html.parser"""'], {}), "(self.res.text, 'html.parser')\n", (3678, 3708), False, 'import bs4, requests, threading, datetime\n'), ((1517, 1564), 'tkinter.messagebox.showinfo', 'messagebox.showinfo', (['"""Error"""', '"""Stock not found"""'], {}), "('Error', 'Stock not found')\n", (1536, 1564), False, 'from tkinter import messagebox\n'), ((1686, 1733), 'tkinter.messagebox.showinfo', 'messagebox.showinfo', (['"""Error"""', '"""Stock not found"""'], {}), "('Error', 'Stock not found')\n", (1705, 1733), False, 'from tkinter import messagebox\n'), ((3356, 3414), 'tkinter.messagebox.showinfo', 'messagebox.showinfo', (['"""Error"""', '"""Already added to watchlist"""'], {}), "('Error', 'Already added to watchlist')\n", (3375, 3414), False, 'from tkinter import messagebox\n'), ((1249, 1272), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (1270, 1272), False, 'import bs4, requests, threading, datetime\n'), ((2007, 2078), 'tkinter.Label', 'tk.Label', (['self.stocks_frame'], {'text': 'self.price', 'font': "(None, '14', 'bold')"}), "(self.stocks_frame, text=self.price, font=(None, '14', 'bold'))\n", (2015, 2078), True, 'import tkinter as tk\n'), ((2189, 2276), 'tkinter.Label', 'tk.Label', (['self.stocks_frame'], {'text': 'self.change', 'font': "(None, '14', 'bold')", 'fg': '"""red"""'}), "(self.stocks_frame, text=self.change, font=(None, '14', 'bold'), fg\n ='red')\n", (2197, 2276), True, 'import tkinter as tk\n'), ((2384, 2473), 'tkinter.Label', 'tk.Label', (['self.stocks_frame'], {'text': 'self.change', 'font': "(None, '14', 'bold')", 'fg': '"""green"""'}), "(self.stocks_frame, text=self.change, font=(None, '14', 'bold'), fg\n ='green')\n", (2392, 2473), True, 'import tkinter as tk\n'), ((2559, 2648), 'tkinter.Label', 'tk.Label', (['self.stocks_frame'], {'text': 'self.change', 'font': "(None, '14', 'bold')", 'fg': '"""black"""'}), "(self.stocks_frame, text=self.change, font=(None, '14', 'bold'), fg\n ='black')\n", (2567, 2648), True, 'import tkinter as tk\n')]
|
from ncclient import manager
m = manager.connect(host='10.199.199.250', port='830', username='admin',
password='<PASSWORD>', device_params={'name':'iosxe'}, hostkey_verify=False)
for capability in m.server_capabilities:
print('*'* 50)
print(capability)
|
[
"ncclient.manager.connect"
] |
[((36, 191), 'ncclient.manager.connect', 'manager.connect', ([], {'host': '"""10.199.199.250"""', 'port': '"""830"""', 'username': '"""admin"""', 'password': '"""<PASSWORD>"""', 'device_params': "{'name': 'iosxe'}", 'hostkey_verify': '(False)'}), "(host='10.199.199.250', port='830', username='admin',\n password='<PASSWORD>', device_params={'name': 'iosxe'}, hostkey_verify=\n False)\n", (51, 191), False, 'from ncclient import manager\n')]
|
#!/usr/bin/env python3
from random import random, choice, seed
from copy import deepcopy
from math import pi
from bruhat.render.sat import Expr, System, Listener
from bruhat.render.front import RGBA, Compound, Translate
from bruhat.render.front import path, style, canvas, color
from bruhat.render.turtle import Turtle
from bruhat.argv import argv
class Magic(Listener):
did_layout = False
def __hash__(self):
return id(self)
# Are we like a dict, or a list ??
# Not sure...
def __len__(self):
return 0
def __getitem__(self, idx):
raise IndexError
EPSILON = 1e-6
class Box(Magic):
DEBUG = False
@classmethod
def promote(cls, item, align=None):
if isinstance(item, Box):
box = item
elif isinstance(item, str):
box = TextBox(item)
elif isinstance(item, (tuple, list)):
box = HBox(item)
elif item is None:
box = EmptyBox()
else:
raise TypeError(repr(item))
if align is not None:
box = AlignBox(box, align)
return box
@property
def width(self):
return self.left + self.right
@property
def height(self):
return self.top + self.bot
@property
def llx(self):
return self.x - self.left
@property
def lly(self):
return self.y - self.bot
@property
def ll(self):
return self.llx, self.lly
@property
def urx(self):
return self.x + self.right
@property
def ury(self):
return self.y + self.top
@property
def ur(self):
return self.urx, self.ury
@property
def midx(self):
return 0.5*(self.llx + self.urx)
@property
def midy(self):
return 0.5*(self.lly + self.ury)
@property
def mid(self):
return (self.midx, self.midy)
@property
def bound(self):
return self.llx, self.lly, self.urx, self.ury
def get_align(self, align):
llx, lly, urx, ury = self.bound
midx = 0.5*(llx + urx)
midy = 0.5*(lly + ury)
if align == "center":
x, y = midx, midy
elif align == "north":
x, y = midx, ury
elif align == "south":
x, y = midx, lly
elif align == "east":
x, y = urx, midy
elif align == "west":
x, y = llx, midy
elif align == "northeast":
x, y = urx, ury
elif align == "northwest":
x, y = llx, ury
elif align == "southeast":
x, y = urx, lly
elif align == "southwest":
x, y = llx, lly
else:
assert 0, "alignment %r not understood" % align
return x, y
def contain(self, x, y, system, weight=None):
system.add(x <= self.urx, weight)
system.add(x >= self.llx, weight)
system.add(y <= self.ury, weight)
system.add(y >= self.lly, weight)
def on_layout(self, cvs, system):
#assert not self.did_layout, "already called on_layout"
assert not self in system.memo, "duplicate box %s"%(self,)
system.memo.add(self)
if self.DEBUG:
print("%s.on_layout" % (self.__class__.__name__,))
for attr in 'x y left right top bot'.split():
stem = self.__class__.__name__ + '.' + attr
expr = getattr(self, attr, None)
if isinstance(expr, Expr):
system.listen_expr(self, attr, expr)
elif attr in self.__dict__:
pass
else:
# We don't try to minimize the absolute coordinate values.
weight = 1.0 if attr not in 'xy' else 0.0
vmin = None if attr in 'xy' else 0.
#v = system.get_var(stem, weight, vmin=vmin)
v = system.listen_var(self, attr, stem, weight, vmin=vmin)
setattr(self, attr, v)
self.did_layout = True
def assign_variables(self, system):
# set all our Variable attributes to actual solutions
attrs = list(self.__dict__.keys())
for attr in attrs:
value = getattr(self, attr)
if not isinstance(value, Expr):
continue
value = system[value]
setattr(self, attr, value)
def on_render(self, cvs, system):
if 1:
x = self.x
y = self.y
left = self.left
right = self.right
top = self.top
bot = self.bot
elif 0:
x = system[self.x]
y = system[self.y]
left = system[self.left]
right = system[self.right]
top = system[self.top]
bot = system[self.bot]
else:
self.assign_variables(system)
x = self.x
y = self.y
left = self.left
right = self.right
top = self.top
bot = self.bot
if not self.DEBUG:
return
assert type(x) is float, str(self)
#cvs.set_line_width(0.5)
cl = RGBA(1., 0., 0., 0.5)
r = 0.1
cvs.stroke(path.line(x-r, y-r, x+r, y+r), [cl]) #, style.linewidth.Thick])
cvs.stroke(path.line(x+r, y-r, x-r, y+r), [cl])
#bg = RGBA(0.5*random(), 0.5*random(), 0.5*random(), 0.5)
bg = RGBA(0.5, 0.5, 0., 0.1)
cvs.fill(path.rect(x-left, y-bot, left+right, top+bot), [bg])
cvs.stroke(path.rect(x-left, y-bot, left+right, top+bot), [cl])
system = None
def layout(self, cvs, x=0, y=0):
if self.system is None:
self.system = System()
system = self.system
system.memo = set() # hang this here
self.on_layout(cvs, system)
system.add(self.x == x)
system.add(self.y == y)
system.solve()
return system
def render(self, cvs, x=0, y=0):
#if not self.did_layout:
self.layout(cvs, x, y)
self.on_render(cvs, self.system)
self.system.refresh()
#class EmptyBox(Box):
# def __init__(self, top=0., bot=0., left=0., right=0.):
# self.top = top
# self.bot = bot
# self.left = left
# self.right = right
class EmptyBox(Box):
def __init__(self, top=None, bot=None, left=None, right=None):
if top is not None:
self.top = top
if bot is not None:
self.bot = bot
if left is not None:
self.left = left
if right is not None:
self.right = right
class MinBox(Box):
def __init__(self, min_top=0., min_bot=0., min_left=0., min_right=0.):
self.min_top = min_top
self.min_bot = min_bot
self.min_left = min_left
self.min_right = min_right
def on_layout(self, cvs, system):
Box.on_layout(self, cvs, system)
system.add(self.top >= self.min_top)
system.add(self.bot >= self.min_bot)
system.add(self.left >= self.min_left)
system.add(self.right >= self.min_right)
class CanBox(Box):
def __init__(self, cvs):
bound = cvs.get_bound_cairo()
bound = bound.scale_point_to_cm()
self.top = 0.
self.left = 0.
self.right = bound.width
self.bot = bound.height
x0, y0 = bound.llx, bound.ury # this becomes our top-left
self.x0 = x0
self.y0 = y0
self.cvs = cvs
def on_render(self, cvs, system):
Box.on_render(self, cvs, system)
x = system[self.x]
y = system[self.y]
x0, y0 = self.x0, self.y0
dx, dy = x-x0, y-y0
item = Compound([Translate(dx, dy), self.cvs])
cvs.append(item)
class StrokeBox(Box):
def __init__(self, width, height, rgba=(0., 0., 0., 1.)):
self.top = height
self.bot = 0.
self.left = 0.
self.right = width
self.rgba = rgba
def on_render(self, cvs, system):
Box.on_render(self, cvs, system)
x = system[self.x]
y = system[self.y]
cvs.stroke(path.rect(x, y, self.width, self.height), [RGBA(*self.rgba)])
class FillBox(StrokeBox):
def on_render(self, cvs, system):
Box.on_render(self, cvs, system)
x = system[self.x]
y = system[self.y]
cvs.fill(path.rect(x, y, self.width, self.height), [RGBA(*self.rgba)])
class TextBox(Box):
def __init__(self, text, weight=99.0):
self.text = text
# Use weight higher than the default weight of 1.0.
self.weight = weight
def on_layout(self, cvs, system):
Box.on_layout(self, cvs, system)
extents = cvs.text_extents(self.text)
dx, dy, width, height = extents
system.add(self.left + self.right == width+dx)
system.add(self.top + self.bot == height, self.weight)
system.add(self.left == 0)
assert dy >= 0., dy
system.add(self.top == dy)
def on_render(self, cvs, system):
Box.on_render(self, cvs, system)
x = system[self.x]
y = system[self.y]
cvs.text(x, y, self.text)
class ChildBox(Box):
"Has one child box"
def __init__(self, child):
self.child = Box.promote(child)
def __len__(self):
return 1
def __getitem__(self, idx):
return [self.child][idx]
def on_render(self, cvs, system):
Box.on_render(self, cvs, system)
self.child.on_render(cvs, system)
class MarginBox(ChildBox):
def __init__(self, child, margin):
ChildBox.__init__(self, child)
self.margin = margin
def SLOW_on_layout(self, cvs, system):
Box.on_layout(self, cvs, system)
child = self.child
child.on_layout(cvs, system)
system.add(self.x == child.x)
system.add(self.y == child.y)
margin = self.margin
system.add(self.left == child.left + margin)
system.add(self.right == child.right + margin)
system.add(self.top == child.top + margin)
system.add(self.bot == child.bot + margin)
def on_layout(self, cvs, system):
child = self.child
child.on_layout(cvs, system)
self.x = child.x
self.y = child.y
margin = self.margin
self.left = child.left + margin
self.right = child.right + margin
self.top = child.top + margin
self.bot = child.bot + margin
Box.on_layout(self, cvs, system)
class RectBox(MarginBox):
def __init__(self, child, margin=0, bg=None):
MarginBox.__init__(self, child, margin)
self.bg = bg
def on_render(self, cvs, system):
Box.on_render(self, cvs, system)
bg = self.bg
if bg is not None:
llx = system[self.llx]
lly = system[self.lly]
width = system[self.width]
height = system[self.height]
cvs.fill(path.rect(llx, lly, width, height), [bg])
self.child.on_render(cvs, system)
class AlignBox(ChildBox):
def __init__(self, child, align):
ChildBox.__init__(self, child)
self.align = align
assert align in ("center north south east west northeast "+
"northwest southeast southwest").split(), "align %r not understood"%align
def on_layout(self, cvs, system):
child = self.child
child.on_layout(cvs, system)
x, y = child.get_align(self.align)
self.x = x
self.y = y
self.left = x - child.llx
self.right = child.urx - x
self.bot = y - child.lly
self.top = child.ury - y
Box.on_layout(self, cvs, system)
class SlackBox(ChildBox):
def __init__(self, child):
ChildBox.__init__(self, child)
def on_layout(self, cvs, system):
child = self.child
child.on_layout(cvs, system)
# child anchor for self
self.x = child.x
self.y = child.y
Box.on_layout(self, cvs, system)
system.add(self.top >= child.top)
system.add(self.bot >= child.bot)
system.add(self.left >= child.left)
system.add(self.right >= child.right)
class CompoundBox(Box):
def __init__(self, boxs, weight=None, align=None):
assert len(boxs)
self.boxs = [Box.promote(box, align) for box in boxs]
self.weight = weight
def __len__(self):
return len(self.boxs)
def __getitem__(self, idx):
return self.boxs[idx]
def on_layout(self, cvs, system):
Box.on_layout(self, cvs, system)
for box in self.boxs:
box.on_layout(cvs, system)
def on_render(self, cvs, system):
Box.on_render(self, cvs, system)
for box in self.boxs:
box.on_render(cvs, system)
class MultiBox(CompoundBox):
def on_render(self, cvs, system):
# Don't call Box.on_render because I have no shape
for box in self.boxs:
box.on_render(cvs, system)
class OBox(CompoundBox):
"Overlay boxes on top of each other, with matching anchors"
strict = False
def on_layout(self, cvs, system):
CompoundBox.on_layout(self, cvs, system)
boxs = self.boxs
for box in boxs:
system.add(self.x == box.x) # align
system.add(self.y == box.y) # align
if self.strict:
system.add(box.left == self.left, self.weight)
system.add(box.right == self.right, self.weight)
system.add(box.top == self.top, self.weight)
system.add(box.bot == self.bot, self.weight)
else:
system.add(box.left <= self.left)
system.add(box.right <= self.right)
system.add(box.top <= self.top)
system.add(box.bot <= self.bot)
class HBox(CompoundBox):
"horizontal compound box: anchor left"
strict = False
def on_layout(self, cvs, system):
CompoundBox.on_layout(self, cvs, system)
boxs = self.boxs
system.add(self.left == 0.) # left anchor
left = self.x
for box in boxs:
system.add(self.y == box.y) # align
system.add(box.x - box.left == left)
left += box.width
if self.strict:
system.add(box.top == self.top, self.weight)
system.add(box.bot == self.bot, self.weight)
else:
system.add(box.top <= self.top)
system.add(box.bot <= self.bot)
system.add(self.x + self.width == left)
class StrictHBox(HBox):
strict = True
class VBox(CompoundBox):
"vertical compound box: anchor top"
strict = False
def on_layout(self, cvs, system):
CompoundBox.on_layout(self, cvs, system)
boxs = self.boxs
system.add(self.top == 0.) # top anchor
y = self.y
for box in boxs:
system.add(self.x == box.x) # align
system.add(box.y + box.top == y)
y -= box.height
if self.strict:
system.add(box.left == self.left, self.weight)
system.add(box.right == self.right, self.weight)
else:
system.add(box.left <= self.left)
system.add(box.right <= self.right)
system.add(self.y - self.bot == y)
class StrictVBox(VBox):
strict = True
class TableBox(CompoundBox):
def __init__(self, rows, hspace=0., vspace=0., grid=False):
assert len(rows), "no rows"
assert len(rows[0]), "no cols"
rows = [list(row) for row in rows] # copy
assert hspace >= 0.
assert vspace >= 0.
# if 0:
# # Get original shape
# m = len(rows) # rows
# n = len(rows[0]) # cols
#
# row = []
# for i in range(n):
# space = hspace if i+1<n else 0.
# row.append(MinBox(0., 0., 0., space))
# row.append(MinBox(0., 0., 0., 0.))
# rows.append(row)
#
# for i in range(m):
# space = vspace if i+1<m else 0.
# rows[i].append(MinBox(0., space, 0., 0.))
self.hspace = hspace
self.vspace = vspace
# Get new shape
m = len(rows) # rows
n = len(rows[0]) # cols
boxs = []
for row in rows:
assert len(row) == n
for box in row:
box = Box.promote(box)
boxs.append(box)
self.rows = rows
self.shape = m, n
# anchor is top left
self.top = 0.
self.left = 0.
self.grid = grid
CompoundBox.__init__(self, boxs)
def __getitem__(self, key):
assert type(key) is tuple
assert len(key) == 2
row, col = key
return self.rows[row][col]
def on_layout(self, cvs, system):
CompoundBox.on_layout(self, cvs, system)
m, n = self.shape
rows = self.rows
xs, ys = {}, {}
ws, hs = {}, {} # width's, height's
for i in range(m): # rows
ys[i] = system.get_var("TableBox.row(%d)"%i, weight=0.)
hs[i] = system.get_var("TableBox.height(%d)"%i, weight=1.) # minimize
for j in range(n): # cols
xs[j] = system.get_var("TableBox.col(%d)"%j, weight=0.)
ws[j] = system.get_var("TableBox.width(%d)"%j, weight=1.) # minimize
for i in range(m): # rows
for j in range(n): # cols
box = rows[i][j]
system.add(box.y == ys[i]) # align
system.add(box.x == xs[j]) # align
hspace = self.hspace
for i in range(m): # rows
x = self.x
for j in range(n): # cols
box = rows[i][j]
system.add(box.x - box.left >= x)
width = ws[j] # width of this col
x += width
system.add(box.x + box.right + hspace <= x)
system.add(self.x + self.width >= x)
vspace = self.vspace
for j in range(n): # cols
y = self.y
for i in range(m): # rows
box = rows[i][j]
system.add(box.y + box.top <= y)
height = hs[i]
y -= height
system.add(box.y - box.bot - vspace >= y)
system.add(self.y - self.height <= y)
self.vs = xs, ys, ws, hs
def on_render(self, cvs, system):
CompoundBox.on_render(self, cvs, system)
if not self.grid:
return
m, n = self.shape
xs, ys, ws, hs = self.vs
width = system[self.width]
height = system[self.height]
x = system[self.x]
y = system[self.y]
for j in range(n):
cvs.stroke(path.line(x, y, x, y-height))
x += system[ws[j]]
#cvs.stroke(path.line(x, y, x, y-height))
x = system[self.x]
y = system[self.y]
for i in range(m):
cvs.stroke(path.line(x, y, x+width, y))
y -= system[hs[i]]
x = system[self.x]
y = system[self.y]
cvs.stroke(path.rect(x, y-height, width, height))
class ArrowBox(Box):
#default_style = "flat"
default_style = "curve"
default_size = 0.15
default_attrs = [style.linewidth.thin, style.linecap.round,
style.linejoin.round]
def __init__(self, src, tgt, label=None, label_align=None,
style=None, size=None, pad_head=0.04, pad_tail=0.04,
attrs=None, weight=0.1):
assert isinstance(src, Box)
assert isinstance(tgt, Box)
self.src = src
self.tgt = tgt
assert src is not tgt, "self-arrow not implemented"
#if label is not None:
# label = Box.promote(label)
self.label = label
self.label_align = label_align
if style is None:
style = ArrowBox.default_style
self.style = style
if size is None:
size = ArrowBox.default_size
self.size = size
if attrs is None:
attrs = ArrowBox.default_attrs
self.attrs = attrs
self.weight = weight
def on_layout(self, cvs, system):
Box.on_layout(self, cvs, system)
self.x1 = x1 = system.get_var("ArrowBox.x1", weight=0.)
self.y1 = y1 = system.get_var("ArrowBox.y1", weight=0.)
x0, y0 = self.x, self.y
src = self.src
tgt = self.tgt
src.contain(x0, y0, system)
tgt.contain(x1, y1, system)
self.contain(x1, y1, system)
# With a lower weight, try to stay in the middle
weight = self.weight
system.add(x0 == src.midx, weight)
system.add(y0 == src.midy, weight)
system.add(x1 == tgt.midx, weight)
system.add(y1 == tgt.midy, weight)
def on_render(self, cvs, system):
Box.on_render(self, cvs, system)
x0 = system[self.x]
y0 = system[self.y]
x1 = system[self.x1]
y1 = system[self.y1]
turtle = Turtle(x0, y0)
turtle.moveto(x1, y1)
turtle.arrow(size=self.size, style=self.style)
turtle.stroke(attrs=self.attrs, cvs=cvs)
label = self.label
align = self.label_align
if label is None:
return
dx = x1-x0
dy = y1-y0
PIP = 0.04 # <------- MAGIC CONSTANT TODO
if align is not None:
pass
elif abs(dy)<EPSILON:
align = "south"
elif abs(dx)<EPSILON:
align = "west"
elif dx*dy > 0.:
align = "northwest"
PIP = 0.02
else:
align = "southwest"
PIP = 0.02
if isinstance(label, Box):
pass
else:
label = MarginBox(label, PIP)
label = AlignBox(label, align)
# TODO pad_head, pad_tail XXX
x = 0.5*(x0+x1)
y = 0.5*(y0+y1)
label.render(cvs, x, y)
#cvs.stroke(path.circle(x, y, 0.02), [color.rgb.red])
def test():
from bruhat.render import config
config(text="pdftex")
Box.DEBUG = False
tbox = lambda t: MarginBox(TextBox(t), 0.05)
rows = [
[r"A", r"B", r"C"],
[r"D", r"E", r"F"],
[r"G", r"H", r"I"],
]
boxs = [[tbox("$%s$"%c) for c in row] for row in rows]
arrows = []
for di in [-1, 0, 1]:
for dj in [-1, 0, 1]:
if di==0 and dj==0:
continue
label = r"$x$"
a = ArrowBox(boxs[1][1], boxs[1+di][1+dj], label=label)
arrows.append(a)
r = 1.1
table = TableBox(boxs, hspace=r, vspace=0.8*r)
box = MultiBox([table]+arrows)
cvs = canvas.canvas()
box.render(cvs)
cvs.writePDFfile("output.pdf")
if __name__ == "__main__":
test()
print("OK\n")
|
[
"bruhat.render.sat.System",
"bruhat.render.front.Translate",
"bruhat.render.front.canvas.canvas",
"bruhat.render.turtle.Turtle",
"bruhat.render.front.RGBA",
"bruhat.render.config",
"bruhat.render.front.path.line",
"bruhat.render.front.path.rect"
] |
[((22020, 22041), 'bruhat.render.config', 'config', ([], {'text': '"""pdftex"""'}), "(text='pdftex')\n", (22026, 22041), False, 'from bruhat.render import config\n'), ((22619, 22634), 'bruhat.render.front.canvas.canvas', 'canvas.canvas', ([], {}), '()\n', (22632, 22634), False, 'from bruhat.render.front import path, style, canvas, color\n'), ((5138, 5162), 'bruhat.render.front.RGBA', 'RGBA', (['(1.0)', '(0.0)', '(0.0)', '(0.5)'], {}), '(1.0, 0.0, 0.0, 0.5)\n', (5142, 5162), False, 'from bruhat.render.front import RGBA, Compound, Translate\n'), ((5394, 5418), 'bruhat.render.front.RGBA', 'RGBA', (['(0.5)', '(0.5)', '(0.0)', '(0.1)'], {}), '(0.5, 0.5, 0.0, 0.1)\n', (5398, 5418), False, 'from bruhat.render.front import RGBA, Compound, Translate\n'), ((20976, 20990), 'bruhat.render.turtle.Turtle', 'Turtle', (['x0', 'y0'], {}), '(x0, y0)\n', (20982, 20990), False, 'from bruhat.render.turtle import Turtle\n'), ((5195, 5232), 'bruhat.render.front.path.line', 'path.line', (['(x - r)', '(y - r)', '(x + r)', '(y + r)'], {}), '(x - r, y - r, x + r, y + r)\n', (5204, 5232), False, 'from bruhat.render.front import path, style, canvas, color\n'), ((5278, 5315), 'bruhat.render.front.path.line', 'path.line', (['(x + r)', '(y - r)', '(x - r)', '(y + r)'], {}), '(x + r, y - r, x - r, y + r)\n', (5287, 5315), False, 'from bruhat.render.front import path, style, canvas, color\n'), ((5435, 5488), 'bruhat.render.front.path.rect', 'path.rect', (['(x - left)', '(y - bot)', '(left + right)', '(top + bot)'], {}), '(x - left, y - bot, left + right, top + bot)\n', (5444, 5488), False, 'from bruhat.render.front import path, style, canvas, color\n'), ((5507, 5560), 'bruhat.render.front.path.rect', 'path.rect', (['(x - left)', '(y - bot)', '(left + right)', '(top + bot)'], {}), '(x - left, y - bot, left + right, top + bot)\n', (5516, 5560), False, 'from bruhat.render.front import path, style, canvas, color\n'), ((5674, 5682), 'bruhat.render.sat.System', 'System', ([], {}), '()\n', (5680, 5682), False, 'from bruhat.render.sat import Expr, System, Listener\n'), ((8085, 8125), 'bruhat.render.front.path.rect', 'path.rect', (['x', 'y', 'self.width', 'self.height'], {}), '(x, y, self.width, self.height)\n', (8094, 8125), False, 'from bruhat.render.front import path, style, canvas, color\n'), ((8325, 8365), 'bruhat.render.front.path.rect', 'path.rect', (['x', 'y', 'self.width', 'self.height'], {}), '(x, y, self.width, self.height)\n', (8334, 8365), False, 'from bruhat.render.front import path, style, canvas, color\n'), ((19080, 19119), 'bruhat.render.front.path.rect', 'path.rect', (['x', '(y - height)', 'width', 'height'], {}), '(x, y - height, width, height)\n', (19089, 19119), False, 'from bruhat.render.front import path, style, canvas, color\n'), ((7664, 7681), 'bruhat.render.front.Translate', 'Translate', (['dx', 'dy'], {}), '(dx, dy)\n', (7673, 7681), False, 'from bruhat.render.front import RGBA, Compound, Translate\n'), ((8128, 8144), 'bruhat.render.front.RGBA', 'RGBA', (['*self.rgba'], {}), '(*self.rgba)\n', (8132, 8144), False, 'from bruhat.render.front import RGBA, Compound, Translate\n'), ((8368, 8384), 'bruhat.render.front.RGBA', 'RGBA', (['*self.rgba'], {}), '(*self.rgba)\n', (8372, 8384), False, 'from bruhat.render.front import RGBA, Compound, Translate\n'), ((10890, 10924), 'bruhat.render.front.path.rect', 'path.rect', (['llx', 'lly', 'width', 'height'], {}), '(llx, lly, width, height)\n', (10899, 10924), False, 'from bruhat.render.front import path, style, canvas, color\n'), ((18732, 18762), 'bruhat.render.front.path.line', 'path.line', (['x', 'y', 'x', '(y - height)'], {}), '(x, y, x, y - height)\n', (18741, 18762), False, 'from bruhat.render.front import path, style, canvas, color\n'), ((18947, 18976), 'bruhat.render.front.path.line', 'path.line', (['x', 'y', '(x + width)', 'y'], {}), '(x, y, x + width, y)\n', (18956, 18976), False, 'from bruhat.render.front import path, style, canvas, color\n')]
|
import os
import subprocess
from telegram import Update
from telegram.ext import CallbackContext
def create_download_list_from_link(link: str, link_type: str, list_path: str):
subprocess.run(
[
'spotdl',
f'--{link_type}',
link,
"--write-to",
list_path
],
)
def download_from_list(list_path: str, download_path: str):
process = subprocess.Popen(
[
'spotdl',
'--list', list_path,
"-f", download_path,
"--overwrite", "skip"
],
stdout=subprocess.PIPE
)
process.wait()
subprocess.run(['rm', list_path])
def send_songs_from_directory(
directory_path: str,
update: Update,
context: CallbackContext):
directory = os.listdir(directory_path)
for file in directory:
result = context.bot.send_audio(
chat_id=update.effective_chat.id,
audio=open(f'{directory_path}/{file}', 'rb')
)
subprocess.run(['rm', '-r', directory_path])
|
[
"subprocess.run",
"subprocess.Popen",
"os.listdir"
] |
[((183, 258), 'subprocess.run', 'subprocess.run', (["['spotdl', f'--{link_type}', link, '--write-to', list_path]"], {}), "(['spotdl', f'--{link_type}', link, '--write-to', list_path])\n", (197, 258), False, 'import subprocess\n'), ((420, 541), 'subprocess.Popen', 'subprocess.Popen', (["['spotdl', '--list', list_path, '-f', download_path, '--overwrite', 'skip']"], {'stdout': 'subprocess.PIPE'}), "(['spotdl', '--list', list_path, '-f', download_path,\n '--overwrite', 'skip'], stdout=subprocess.PIPE)\n", (436, 541), False, 'import subprocess\n'), ((642, 675), 'subprocess.run', 'subprocess.run', (["['rm', list_path]"], {}), "(['rm', list_path])\n", (656, 675), False, 'import subprocess\n'), ((813, 839), 'os.listdir', 'os.listdir', (['directory_path'], {}), '(directory_path)\n', (823, 839), False, 'import os\n'), ((1026, 1070), 'subprocess.run', 'subprocess.run', (["['rm', '-r', directory_path]"], {}), "(['rm', '-r', directory_path])\n", (1040, 1070), False, 'import subprocess\n')]
|
# Import
from sudoku import Sudoku
# Backtrack solver class
class BacktrackSolver:
# Constructor
def __init__(self, matrix):
self.sudoku = Sudoku(matrix)
# Find next empty position in board
def nextEmpty(self):
for k in range(81):
if self.sudoku.board[k] == 0:
return k
return 81
# Solve sudoku
def solve(self):
k = self.nextEmpty()
if k == 81:
print(f'Solved? {self.sudoku.isSolved()}\n')
return self.sudoku.display()
for v in range(1, 10):
self.sudoku.add(k // 9, k % 9, v)
if self.sudoku.isValid():
self.solve()
self.sudoku.remove(k // 9, k % 9)
# Testing
if __name__ == '__main__':
sample = [
0, 9, 0, 0, 0, 6, 0, 4, 0,
0, 0, 5, 3, 0, 0, 0, 0, 8,
0, 0, 0, 0, 7, 0, 2, 0, 0,
0, 0, 1, 0, 5, 0, 0, 0, 3,
0, 6, 0, 0, 0, 9, 0, 7, 0,
2, 0, 0, 0, 8, 4, 1, 0, 0,
0, 0, 3, 0, 1, 0, 0, 0, 0,
8, 0, 0, 0, 0, 2, 5, 0, 0,
0, 5, 0, 4, 0, 0, 0, 8, 0
]
solver = BacktrackSolver(sample)
solver.solve()
|
[
"sudoku.Sudoku"
] |
[((148, 162), 'sudoku.Sudoku', 'Sudoku', (['matrix'], {}), '(matrix)\n', (154, 162), False, 'from sudoku import Sudoku\n')]
|
from src.model.game import Game
from src.model.human_connections import InitialisePlayer, SavePlayer
from src.model.main import welcoming, blackjack_welcome, blackjack_rule_display
# @click.group()
def blackjack():
welcoming()
player_initialiser = InitialisePlayer()
human_player = player_initialiser.initialise()
saved_players = SavePlayer()
key = human_player.first_name + human_player.last_name + human_player.gender
if key in saved_players.players:
human_player = saved_players.players[key]
blackjack_welcome(human_player)
blackjack_rule_display()
game = Game(human_player)
game.start()
if __name__ == '__main__':
blackjack()
|
[
"src.model.main.blackjack_rule_display",
"src.model.human_connections.SavePlayer",
"src.model.main.welcoming",
"src.model.human_connections.InitialisePlayer",
"src.model.game.Game",
"src.model.main.blackjack_welcome"
] |
[((221, 232), 'src.model.main.welcoming', 'welcoming', ([], {}), '()\n', (230, 232), False, 'from src.model.main import welcoming, blackjack_welcome, blackjack_rule_display\n'), ((258, 276), 'src.model.human_connections.InitialisePlayer', 'InitialisePlayer', ([], {}), '()\n', (274, 276), False, 'from src.model.human_connections import InitialisePlayer, SavePlayer\n'), ((349, 361), 'src.model.human_connections.SavePlayer', 'SavePlayer', ([], {}), '()\n', (359, 361), False, 'from src.model.human_connections import InitialisePlayer, SavePlayer\n'), ((535, 566), 'src.model.main.blackjack_welcome', 'blackjack_welcome', (['human_player'], {}), '(human_player)\n', (552, 566), False, 'from src.model.main import welcoming, blackjack_welcome, blackjack_rule_display\n'), ((571, 595), 'src.model.main.blackjack_rule_display', 'blackjack_rule_display', ([], {}), '()\n', (593, 595), False, 'from src.model.main import welcoming, blackjack_welcome, blackjack_rule_display\n'), ((608, 626), 'src.model.game.Game', 'Game', (['human_player'], {}), '(human_player)\n', (612, 626), False, 'from src.model.game import Game\n')]
|
"""
Created on 24 Apr 2017
@author: <NAME> (<EMAIL>)
"""
from scs_core.gas.pid.pid import PID
from scs_core.gas.pid.pid_temp_comp import PIDTempComp
# --------------------------------------------------------------------------------------------------------------------
PIDTempComp.init() # must be initialised before sensors
PID.init()
|
[
"scs_core.gas.pid.pid_temp_comp.PIDTempComp.init",
"scs_core.gas.pid.pid.PID.init"
] |
[((273, 291), 'scs_core.gas.pid.pid_temp_comp.PIDTempComp.init', 'PIDTempComp.init', ([], {}), '()\n', (289, 291), False, 'from scs_core.gas.pid.pid_temp_comp import PIDTempComp\n'), ((334, 344), 'scs_core.gas.pid.pid.PID.init', 'PID.init', ([], {}), '()\n', (342, 344), False, 'from scs_core.gas.pid.pid import PID\n')]
|
__all__ = [
"SourceLocation",
"set_location",
"UNKNOWN_LOCATION",
]
from dataclasses import FrozenInstanceError, replace
from typing import Any, NamedTuple, TypeVar
T = TypeVar("T")
class SourceLocation(NamedTuple):
"""Class representing a location within an input string."""
pos: int
lineno: int
colno: int
@property
def unknown(self) -> bool:
"""Whether the location is unknown.
>>> location = UNKNOWN_LOCATION
>>> location.unknown
True
"""
return self.pos < 0
def format(self, filename: str, message: str) -> str:
"""Return a message formatted with the given filename and the current location.
>>> SourceLocation(42, 3, 12).format("path/to/file.txt", "Some error message")
'path/to/file.txt:3:12: Some error message'
"""
return f"{filename}:{self.lineno}:{self.colno}: {message}"
def with_horizontal_offset(self, offset: int) -> "SourceLocation":
"""Create a modified source location along the horizontal axis.
>>> SourceLocation(0, 1, 1).with_horizontal_offset(41)
SourceLocation(pos=41, lineno=1, colno=42)
"""
if self.unknown:
return self
return SourceLocation(self.pos + offset, self.lineno, self.colno + offset)
UNKNOWN_LOCATION = SourceLocation(pos=-1, lineno=0, colno=0)
def set_location(
obj: T,
location: Any = UNKNOWN_LOCATION,
end_location: Any = UNKNOWN_LOCATION,
) -> T:
"""Set the location and end_location attributes.
The function returns the given object or a new instance if the object
is a namedtuple or a frozen dataclass. The location can be copied from another
object with location and end_location attributes.
>>> token = Token("number", "123", UNKNOWN_LOCATION, UNKNOWN_LOCATION)
>>> updated_token = set_location(token, SourceLocation(15, 6, 1))
>>> updated_token
Token(type='number', value='123', location=SourceLocation(pos=15, lineno=6, colno=1), end_location=SourceLocation(pos=15, lineno=6, colno=1))
>>> updated_token = set_location(
... updated_token,
... end_location=updated_token.location.with_horizontal_offset(len(updated_token.value)),
... )
>>> set_location(token, updated_token)
Token(type='number', value='<PASSWORD>', location=SourceLocation(pos=15, lineno=6, colno=1), end_location=SourceLocation(pos=18, lineno=6, colno=4))
"""
if not isinstance(end_location, SourceLocation):
end_location = getattr(end_location, "end_location", UNKNOWN_LOCATION)
if not isinstance(location, SourceLocation):
if end_location.unknown:
end_location = getattr(location, "end_location", UNKNOWN_LOCATION)
location = getattr(location, "location", UNKNOWN_LOCATION)
if location.unknown:
location = getattr(obj, "location", location)
if end_location.unknown:
end_location = getattr(obj, "end_location", end_location)
end_location = max(location, end_location)
if isinstance(obj, tuple):
return obj._replace(location=location, end_location=end_location) # type: ignore
try:
obj.location = location # type: ignore
obj.end_location = end_location # type: ignore
except FrozenInstanceError:
return replace(obj, location=location, end_location=end_location)
return obj
|
[
"typing.TypeVar",
"dataclasses.replace"
] |
[((184, 196), 'typing.TypeVar', 'TypeVar', (['"""T"""'], {}), "('T')\n", (191, 196), False, 'from typing import Any, NamedTuple, TypeVar\n'), ((3330, 3388), 'dataclasses.replace', 'replace', (['obj'], {'location': 'location', 'end_location': 'end_location'}), '(obj, location=location, end_location=end_location)\n', (3337, 3388), False, 'from dataclasses import FrozenInstanceError, replace\n')]
|
from bayes_implicit_solvent.utils import remove_top_right_spines
from pickle import load
experiment_number = 5
with open('results/experiment_{}_radii_samples.pkl'.format(experiment_number), 'rb') as f:
radii_samples = load(f)
import matplotlib.pyplot as plt
import numpy as np
log_ps = np.load('results/experiment_{}_log_ps.npy'.format(experiment_number))
n_types_trace = [len(r) for r in radii_samples]
max_n_dimensions = max(n_types_trace)
print(max_n_dimensions)
fig = plt.figure(figsize=(4,8))
traces = []
for i in range(max_n_dimensions):
trace = []
for r in radii_samples:
if len(r) > i:
trace.append(r[i])
else:
trace.append(np.nan)
traces.append(trace)
ax = plt.subplot(3,1,1)
# plot branching
for trace in traces:
plt.plot(trace)
#plt.xlabel('iteration')
plt.ylabel('radius')
remove_top_right_spines(ax)
# plot # types trace
ax = plt.subplot(3,1,2)
plt.plot(n_types_trace)
#plt.xlabel('iteration')
plt.ylabel('# GB types')
plt.yticks([1,10,20])
remove_top_right_spines(ax)
# plot log-probability trace
ax = plt.subplot(3,1,3)
plt.plot(log_ps)
plt.xlabel('iteration')
plt.ylim(log_ps[10], max(log_ps) + 0.05 * (max(log_ps) - log_ps[10]))
plt.ylabel('log posterior')
remove_top_right_spines(ax)
plt.tight_layout()
plt.savefig('figures/experiment_{}_branching.png'.format(experiment_number),
bbox_inches='tight', dpi=300)
plt.close()
|
[
"matplotlib.pyplot.subplot",
"matplotlib.pyplot.plot",
"matplotlib.pyplot.close",
"matplotlib.pyplot.yticks",
"matplotlib.pyplot.figure",
"bayes_implicit_solvent.utils.remove_top_right_spines",
"pickle.load",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.tight_layout"
] |
[((485, 511), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(4, 8)'}), '(figsize=(4, 8))\n', (495, 511), True, 'import matplotlib.pyplot as plt\n'), ((734, 754), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(3)', '(1)', '(1)'], {}), '(3, 1, 1)\n', (745, 754), True, 'import matplotlib.pyplot as plt\n'), ((836, 856), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""radius"""'], {}), "('radius')\n", (846, 856), True, 'import matplotlib.pyplot as plt\n'), ((857, 884), 'bayes_implicit_solvent.utils.remove_top_right_spines', 'remove_top_right_spines', (['ax'], {}), '(ax)\n', (880, 884), False, 'from bayes_implicit_solvent.utils import remove_top_right_spines\n'), ((912, 932), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(3)', '(1)', '(2)'], {}), '(3, 1, 2)\n', (923, 932), True, 'import matplotlib.pyplot as plt\n'), ((931, 954), 'matplotlib.pyplot.plot', 'plt.plot', (['n_types_trace'], {}), '(n_types_trace)\n', (939, 954), True, 'import matplotlib.pyplot as plt\n'), ((980, 1004), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""# GB types"""'], {}), "('# GB types')\n", (990, 1004), True, 'import matplotlib.pyplot as plt\n'), ((1005, 1028), 'matplotlib.pyplot.yticks', 'plt.yticks', (['[1, 10, 20]'], {}), '([1, 10, 20])\n', (1015, 1028), True, 'import matplotlib.pyplot as plt\n'), ((1027, 1054), 'bayes_implicit_solvent.utils.remove_top_right_spines', 'remove_top_right_spines', (['ax'], {}), '(ax)\n', (1050, 1054), False, 'from bayes_implicit_solvent.utils import remove_top_right_spines\n'), ((1090, 1110), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(3)', '(1)', '(3)'], {}), '(3, 1, 3)\n', (1101, 1110), True, 'import matplotlib.pyplot as plt\n'), ((1109, 1125), 'matplotlib.pyplot.plot', 'plt.plot', (['log_ps'], {}), '(log_ps)\n', (1117, 1125), True, 'import matplotlib.pyplot as plt\n'), ((1126, 1149), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""iteration"""'], {}), "('iteration')\n", (1136, 1149), True, 'import matplotlib.pyplot as plt\n'), ((1220, 1247), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""log posterior"""'], {}), "('log posterior')\n", (1230, 1247), True, 'import matplotlib.pyplot as plt\n'), ((1248, 1275), 'bayes_implicit_solvent.utils.remove_top_right_spines', 'remove_top_right_spines', (['ax'], {}), '(ax)\n', (1271, 1275), False, 'from bayes_implicit_solvent.utils import remove_top_right_spines\n'), ((1277, 1295), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (1293, 1295), True, 'import matplotlib.pyplot as plt\n'), ((1416, 1427), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (1425, 1427), True, 'import matplotlib.pyplot as plt\n'), ((225, 232), 'pickle.load', 'load', (['f'], {}), '(f)\n', (229, 232), False, 'from pickle import load\n'), ((795, 810), 'matplotlib.pyplot.plot', 'plt.plot', (['trace'], {}), '(trace)\n', (803, 810), True, 'import matplotlib.pyplot as plt\n')]
|
# Copyright 2019-2022 SURF.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Any, Dict, Optional, cast
from structlog import get_logger
from orchestrator.distlock.distlock_manager import DistLockManager
from orchestrator.settings import AppSettings
logger = get_logger(__name__)
async def empty_fn(*args: tuple, **kwargs: Dict[str, Any]) -> None:
return
class WrappedDistLockManager:
def __init__(self, wrappee: Optional[DistLockManager] = None) -> None:
self.wrapped_distlock_manager = wrappee
def update(self, wrappee: DistLockManager) -> None:
self.wrapped_distlock_manager = wrappee
logger.warning("DistLockManager object configured, all methods referencing `distlock_manager` should work.")
def __getattr__(self, attr: str) -> Any:
if not isinstance(self.wrapped_distlock_manager, DistLockManager):
if "_" in attr:
logger.warning("No DistLockManager configured, but attempting to access class methods")
return
raise RuntimeWarning(
"No DistLockManager configured at this time. Please set ENABLE_DISTLOCK_MANAGER "
"and DISTLOCK_BACKEND in OrchestratorCore base_settings"
)
if attr != "enabled" and not self.wrapped_distlock_manager.enabled:
logger.warning("Distributed Locking is disabled, unable to access class methods")
return empty_fn
return getattr(self.wrapped_distlock_manager, attr)
wrapped_distlock_manager = WrappedDistLockManager()
distlock_manager = cast(DistLockManager, wrapped_distlock_manager)
# The Global DistLockManager is set after calling this function
def init_distlock_manager(settings: AppSettings) -> DistLockManager:
wrapped_distlock_manager.update(
DistLockManager(
settings.ENABLE_DISTLOCK_MANAGER, settings.DISTLOCK_BACKEND, (settings.CACHE_HOST, settings.CACHE_PORT)
)
)
return distlock_manager
__all__ = [
"distlock_manager",
"init_distlock_manager",
]
|
[
"typing.cast",
"orchestrator.distlock.distlock_manager.DistLockManager",
"structlog.get_logger"
] |
[((774, 794), 'structlog.get_logger', 'get_logger', (['__name__'], {}), '(__name__)\n', (784, 794), False, 'from structlog import get_logger\n'), ((2080, 2127), 'typing.cast', 'cast', (['DistLockManager', 'wrapped_distlock_manager'], {}), '(DistLockManager, wrapped_distlock_manager)\n', (2084, 2127), False, 'from typing import Any, Dict, Optional, cast\n'), ((2308, 2432), 'orchestrator.distlock.distlock_manager.DistLockManager', 'DistLockManager', (['settings.ENABLE_DISTLOCK_MANAGER', 'settings.DISTLOCK_BACKEND', '(settings.CACHE_HOST, settings.CACHE_PORT)'], {}), '(settings.ENABLE_DISTLOCK_MANAGER, settings.DISTLOCK_BACKEND,\n (settings.CACHE_HOST, settings.CACHE_PORT))\n', (2323, 2432), False, 'from orchestrator.distlock.distlock_manager import DistLockManager\n')]
|
import pytest
import os
from videohash.videoduration import video_duration
this_dir = os.path.dirname(os.path.realpath(__file__))
def test_video_duration():
video_path = (
this_dir
+ os.path.sep
+ os.path.pardir
+ os.path.sep
+ "assets"
+ os.path.sep
+ "rocket.mkv"
)
assert (video_duration(video_path) - 52.08) < 0.1
|
[
"os.path.realpath",
"videohash.videoduration.video_duration"
] |
[((103, 129), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (119, 129), False, 'import os\n'), ((349, 375), 'videohash.videoduration.video_duration', 'video_duration', (['video_path'], {}), '(video_path)\n', (363, 375), False, 'from videohash.videoduration import video_duration\n')]
|
#
# Copyright (c) Memfault, Inc.
# See License.txt for details
#
"""
Shim around mflt_build_id to keep the original fw_build_id.py file (this file) working as before.
See mflt-build-id/src/mflt_build_id/__init__.py for actual source code.
"""
import os
import sys
scripts_dir = os.path.dirname(os.path.realpath(__file__))
bundled_mflt_build_id_src_dir = os.path.join(scripts_dir, "mflt-build-id", "src")
if os.path.exists(bundled_mflt_build_id_src_dir):
# Released SDK:
sys.path.insert(0, bundled_mflt_build_id_src_dir)
from mflt_build_id import * # noqa
if __name__ == "__main__":
main() # noqa
|
[
"os.path.realpath",
"os.path.join",
"sys.path.insert",
"os.path.exists"
] |
[((357, 406), 'os.path.join', 'os.path.join', (['scripts_dir', '"""mflt-build-id"""', '"""src"""'], {}), "(scripts_dir, 'mflt-build-id', 'src')\n", (369, 406), False, 'import os\n'), ((411, 456), 'os.path.exists', 'os.path.exists', (['bundled_mflt_build_id_src_dir'], {}), '(bundled_mflt_build_id_src_dir)\n', (425, 456), False, 'import os\n'), ((297, 323), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (313, 323), False, 'import os\n'), ((482, 531), 'sys.path.insert', 'sys.path.insert', (['(0)', 'bundled_mflt_build_id_src_dir'], {}), '(0, bundled_mflt_build_id_src_dir)\n', (497, 531), False, 'import sys\n')]
|
import psutil
import time as t # somehow the datetime fucks it up ``
from datetime import datetime
from subprocess import call
from prettytable import PrettyTable
import fcntl
import socket
import struct
from getmac import get_mac_address
#TODO => Add Date and time
def CFMMAIN():
cpufreq = psutil.cpu_freq()
print("CPU USAGE PER CORE")
for i, percentage in enumerate(psutil.cpu_percent(percpu=True, interval=1)):
print(f"")
def CFM():
cpufreq = psutil.cpu_freq()
CPIFF = PrettyTable(['CPU USAGE PER CORE'])
cpfi = psutil.cpu_percent(percpu=True, interval=1)
CPIFF.add_row([
cpfi
])
print(cpfi)
def time():
date_table = PrettyTable(['Date/Time Running/boot'])
Datenow = str(datetime.now())
date_table.add_row([Datenow])
print(date_table)
def cpuinf():
import psutil
cpufreq = psutil.cpu_freq()
ccore = psutil.cpu_count(logical=False)
clcore = psutil.cpu_count(logical=True)
cpuinf = PrettyTable(["CPU Cores", "Total", "Frequency"])
freqcc = f' {cpufreq.current:.2f}'
cpuinf.add_row([
ccore,
clcore,
freqcc
])
print(cpuinf)
def tab_macPT2():
I_mac = get_mac_address(interface="eth1")
eth_mac = get_mac_address(interface="docker0")
eth2_mac = get_mac_address(interface="vmnet8")
eth1_mac = get_mac_address(interface="vmnet1")
Ii_mac = get_mac_address(interface="eth0")
eth11_mac = get_mac_address(interface="wlan0")
eth21_mac = get_mac_address(interface="wlan2")
eth91_mac = get_mac_address(interface="wlan1")
eth911_mac = get_mac_address(interface="lo")
Mac_table = PrettyTable(["Mac Addresses Tied To Interface"])
Mac_table.add_row([I_mac])
Mac_table.add_row([eth_mac])
Mac_table.add_row([eth2_mac])
Mac_table.add_row([eth1_mac])
Mac_table.add_row([Ii_mac])
Mac_table.add_row([eth11_mac])
Mac_table.add_row([eth21_mac])
Mac_table.add_row([eth91_mac])
Mac_table.add_row([eth911_mac])
print(Mac_table)
def arp():
from getmac import getmac
getmac.PORT = 55555
IPNET = getmac.get_mac_address(ip="10.0.0.1", network_request=True)
tab = PrettyTable(['Network Mac'])
tab.add_row([IPNET])
print(tab)
def tab_uname():
import platform
uname = platform.uname()
uname_table = PrettyTable(['Platform information'])
uname_table.add_row([
uname.node,
])
uname_table.add_row([
uname.system,
])
uname_table.add_row([
uname.release,
])
uname_table.add_row([
uname.machine,
])
print(uname_table)
def getHwAddr(ifname):
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
info = fcntl.ioctl(s.fileno(), 0x8927, struct.pack('256s', bytes(ifname, 'utf-8')[:15]))
return ':'.join('%02x' % b for b in info[18:24])
def main():
A = getHwAddr('eth0')
B = getHwAddr('docker0')
C = getHwAddr('wlan0')
D = getHwAddr('wlan2')
E = getHwAddr('wlan1')
F = getHwAddr('eth1')
G = getHwAddr('vmnet1')
H = getHwAddr('vmnet8')
table_mac2 = PrettyTable(['Macs In Range'])
table_mac2.add_row([A, B, C, D, E, F, G, H])
print(table_mac2)
#TODO => FI~X THISSSS
def mac_sesh():
import netifaces
A = netifaces.interfaces()
B = netifaces.ifaddresses('eth0')[netifaces.AF_LINK]
print(B)
def main_main():
while True:
call("clear")
table = PrettyTable(['Network', 'Status', 'Speed'])
for key in psutil.net_if_stats().keys():
name = key
up = "Up" if psutil.net_if_stats()[key].isup else "Down"
speed = psutil.net_if_stats()[key].speed
table.add_row([name, up, speed])
print(table)
memory_table = PrettyTable(["Total", "Used", "Available", "Percentage"])
vm = psutil.virtual_memory()
memory_table.add_row([
vm.total,
vm.used,
vm.available,
vm.percent
])
print(memory_table)
process_table = PrettyTable(['PID', 'PNAME', 'STATUS',
'CPU', 'NUM THREADS'])
for process in psutil.pids()[-10:]:
try:
p = psutil.Process(process)
process_table.add_row([
str(process),
p.name(),
p.status(),
str(p.cpu_percent())+"%",
p.num_threads()
])
except Exception as e:
pass
print(process_table)
tab_uname()
tab_macPT2()
cpuinf()
t.sleep(1)
if __name__ == "__main__":
main_main()
# items and tests that ruined the entire script or didnt fit within tabulates data struct
#tabuname
#main
#mac_shesh
#tab_macPT2
#CPUINFO
#ARP
|
[
"netifaces.interfaces",
"psutil.virtual_memory",
"getmac.getmac.get_mac_address",
"psutil.Process",
"psutil.cpu_freq",
"socket.socket",
"platform.uname",
"getmac.get_mac_address",
"time.sleep",
"netifaces.ifaddresses",
"subprocess.call",
"prettytable.PrettyTable",
"psutil.pids",
"psutil.net_if_stats",
"datetime.datetime.now",
"psutil.cpu_percent",
"psutil.cpu_count"
] |
[((299, 316), 'psutil.cpu_freq', 'psutil.cpu_freq', ([], {}), '()\n', (314, 316), False, 'import psutil\n'), ((475, 492), 'psutil.cpu_freq', 'psutil.cpu_freq', ([], {}), '()\n', (490, 492), False, 'import psutil\n'), ((505, 540), 'prettytable.PrettyTable', 'PrettyTable', (["['CPU USAGE PER CORE']"], {}), "(['CPU USAGE PER CORE'])\n", (516, 540), False, 'from prettytable import PrettyTable\n'), ((552, 595), 'psutil.cpu_percent', 'psutil.cpu_percent', ([], {'percpu': '(True)', 'interval': '(1)'}), '(percpu=True, interval=1)\n', (570, 595), False, 'import psutil\n'), ((682, 721), 'prettytable.PrettyTable', 'PrettyTable', (["['Date/Time Running/boot']"], {}), "(['Date/Time Running/boot'])\n", (693, 721), False, 'from prettytable import PrettyTable\n'), ((862, 879), 'psutil.cpu_freq', 'psutil.cpu_freq', ([], {}), '()\n', (877, 879), False, 'import psutil\n'), ((892, 923), 'psutil.cpu_count', 'psutil.cpu_count', ([], {'logical': '(False)'}), '(logical=False)\n', (908, 923), False, 'import psutil\n'), ((937, 967), 'psutil.cpu_count', 'psutil.cpu_count', ([], {'logical': '(True)'}), '(logical=True)\n', (953, 967), False, 'import psutil\n'), ((981, 1029), 'prettytable.PrettyTable', 'PrettyTable', (["['CPU Cores', 'Total', 'Frequency']"], {}), "(['CPU Cores', 'Total', 'Frequency'])\n", (992, 1029), False, 'from prettytable import PrettyTable\n'), ((1192, 1225), 'getmac.get_mac_address', 'get_mac_address', ([], {'interface': '"""eth1"""'}), "(interface='eth1')\n", (1207, 1225), False, 'from getmac import get_mac_address\n'), ((1240, 1276), 'getmac.get_mac_address', 'get_mac_address', ([], {'interface': '"""docker0"""'}), "(interface='docker0')\n", (1255, 1276), False, 'from getmac import get_mac_address\n'), ((1292, 1327), 'getmac.get_mac_address', 'get_mac_address', ([], {'interface': '"""vmnet8"""'}), "(interface='vmnet8')\n", (1307, 1327), False, 'from getmac import get_mac_address\n'), ((1343, 1378), 'getmac.get_mac_address', 'get_mac_address', ([], {'interface': '"""vmnet1"""'}), "(interface='vmnet1')\n", (1358, 1378), False, 'from getmac import get_mac_address\n'), ((1392, 1425), 'getmac.get_mac_address', 'get_mac_address', ([], {'interface': '"""eth0"""'}), "(interface='eth0')\n", (1407, 1425), False, 'from getmac import get_mac_address\n'), ((1442, 1476), 'getmac.get_mac_address', 'get_mac_address', ([], {'interface': '"""wlan0"""'}), "(interface='wlan0')\n", (1457, 1476), False, 'from getmac import get_mac_address\n'), ((1493, 1527), 'getmac.get_mac_address', 'get_mac_address', ([], {'interface': '"""wlan2"""'}), "(interface='wlan2')\n", (1508, 1527), False, 'from getmac import get_mac_address\n'), ((1544, 1578), 'getmac.get_mac_address', 'get_mac_address', ([], {'interface': '"""wlan1"""'}), "(interface='wlan1')\n", (1559, 1578), False, 'from getmac import get_mac_address\n'), ((1596, 1627), 'getmac.get_mac_address', 'get_mac_address', ([], {'interface': '"""lo"""'}), "(interface='lo')\n", (1611, 1627), False, 'from getmac import get_mac_address\n'), ((1644, 1692), 'prettytable.PrettyTable', 'PrettyTable', (["['Mac Addresses Tied To Interface']"], {}), "(['Mac Addresses Tied To Interface'])\n", (1655, 1692), False, 'from prettytable import PrettyTable\n'), ((2102, 2161), 'getmac.getmac.get_mac_address', 'getmac.get_mac_address', ([], {'ip': '"""10.0.0.1"""', 'network_request': '(True)'}), "(ip='10.0.0.1', network_request=True)\n", (2124, 2161), False, 'from getmac import getmac\n'), ((2172, 2200), 'prettytable.PrettyTable', 'PrettyTable', (["['Network Mac']"], {}), "(['Network Mac'])\n", (2183, 2200), False, 'from prettytable import PrettyTable\n'), ((2293, 2309), 'platform.uname', 'platform.uname', ([], {}), '()\n', (2307, 2309), False, 'import platform\n'), ((2328, 2365), 'prettytable.PrettyTable', 'PrettyTable', (["['Platform information']"], {}), "(['Platform information'])\n", (2339, 2365), False, 'from prettytable import PrettyTable\n'), ((2642, 2690), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_DGRAM'], {}), '(socket.AF_INET, socket.SOCK_DGRAM)\n', (2655, 2690), False, 'import socket\n'), ((3087, 3117), 'prettytable.PrettyTable', 'PrettyTable', (["['Macs In Range']"], {}), "(['Macs In Range'])\n", (3098, 3117), False, 'from prettytable import PrettyTable\n'), ((3262, 3284), 'netifaces.interfaces', 'netifaces.interfaces', ([], {}), '()\n', (3282, 3284), False, 'import netifaces\n'), ((384, 427), 'psutil.cpu_percent', 'psutil.cpu_percent', ([], {'percpu': '(True)', 'interval': '(1)'}), '(percpu=True, interval=1)\n', (402, 427), False, 'import psutil\n'), ((740, 754), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (752, 754), False, 'from datetime import datetime\n'), ((3293, 3322), 'netifaces.ifaddresses', 'netifaces.ifaddresses', (['"""eth0"""'], {}), "('eth0')\n", (3314, 3322), False, 'import netifaces\n'), ((3398, 3411), 'subprocess.call', 'call', (['"""clear"""'], {}), "('clear')\n", (3402, 3411), False, 'from subprocess import call\n'), ((3428, 3471), 'prettytable.PrettyTable', 'PrettyTable', (["['Network', 'Status', 'Speed']"], {}), "(['Network', 'Status', 'Speed'])\n", (3439, 3471), False, 'from prettytable import PrettyTable\n'), ((3755, 3812), 'prettytable.PrettyTable', 'PrettyTable', (["['Total', 'Used', 'Available', 'Percentage']"], {}), "(['Total', 'Used', 'Available', 'Percentage'])\n", (3766, 3812), False, 'from prettytable import PrettyTable\n'), ((3826, 3849), 'psutil.virtual_memory', 'psutil.virtual_memory', ([], {}), '()\n', (3847, 3849), False, 'import psutil\n'), ((4036, 4097), 'prettytable.PrettyTable', 'PrettyTable', (["['PID', 'PNAME', 'STATUS', 'CPU', 'NUM THREADS']"], {}), "(['PID', 'PNAME', 'STATUS', 'CPU', 'NUM THREADS'])\n", (4047, 4097), False, 'from prettytable import PrettyTable\n'), ((4657, 4667), 'time.sleep', 't.sleep', (['(1)'], {}), '(1)\n', (4664, 4667), True, 'import time as t\n'), ((4166, 4179), 'psutil.pids', 'psutil.pids', ([], {}), '()\n', (4177, 4179), False, 'import psutil\n'), ((3491, 3512), 'psutil.net_if_stats', 'psutil.net_if_stats', ([], {}), '()\n', (3510, 3512), False, 'import psutil\n'), ((4224, 4247), 'psutil.Process', 'psutil.Process', (['process'], {}), '(process)\n', (4238, 4247), False, 'import psutil\n'), ((3633, 3654), 'psutil.net_if_stats', 'psutil.net_if_stats', ([], {}), '()\n', (3652, 3654), False, 'import psutil\n'), ((3569, 3590), 'psutil.net_if_stats', 'psutil.net_if_stats', ([], {}), '()\n', (3588, 3590), False, 'import psutil\n')]
|
#!/usr/bin/python
# Write it as a python script for portability
import glob
import os
from subprocess import check_call
version = os.environ['ISTIO_VERSION']
opj = os.path.join
# We don't care about the platform as we only use yaml files
check_call(
[
"curl",
"-o",
"istio.tar.gz",
"-L",
"https://github.com/istio/istio/releases/download/{version}/istio-{version}-linux.tar.gz".format(
version=version
),
]
)
check_call(["tar", "xf", "istio.tar.gz"])
check_call(["kubectl", "create", "ns", "istio-system"])
check_call(["kubectl", "label", "namespace", "default", "istio-injection=enabled"])
istio = "istio-{}".format(version)
for f in glob.glob(opj(istio, "install", "kubernetes", "helm", "istio-init", "files", "crd*.yaml")):
check_call(["kubectl", "apply", "-f", f])
check_call(["kubectl", "apply", "-f", opj(istio, "install", "kubernetes", "istio-demo-auth.yaml")])
check_call(
["kubectl", "wait", "deployments", "--all", "--for=condition=Available", "-n", "istio-system", "--timeout=300s"]
)
check_call(["kubectl", "apply", "-f", opj(istio, "samples", "bookinfo", "platform", "kube", "bookinfo.yaml")])
check_call(["kubectl", "wait", "pods", "--all", "--for=condition=Ready", "--timeout=300s"])
check_call(["kubectl", "apply", "-f", opj(istio, "samples", "bookinfo", "networking", "bookinfo-gateway.yaml")])
check_call(["kubectl", "wait", "pods", "--all", "--for=condition=Ready", "--timeout=300s"])
|
[
"subprocess.check_call"
] |
[((480, 521), 'subprocess.check_call', 'check_call', (["['tar', 'xf', 'istio.tar.gz']"], {}), "(['tar', 'xf', 'istio.tar.gz'])\n", (490, 521), False, 'from subprocess import check_call\n'), ((523, 578), 'subprocess.check_call', 'check_call', (["['kubectl', 'create', 'ns', 'istio-system']"], {}), "(['kubectl', 'create', 'ns', 'istio-system'])\n", (533, 578), False, 'from subprocess import check_call\n'), ((579, 666), 'subprocess.check_call', 'check_call', (["['kubectl', 'label', 'namespace', 'default', 'istio-injection=enabled']"], {}), "(['kubectl', 'label', 'namespace', 'default',\n 'istio-injection=enabled'])\n", (589, 666), False, 'from subprocess import check_call\n'), ((948, 1076), 'subprocess.check_call', 'check_call', (["['kubectl', 'wait', 'deployments', '--all', '--for=condition=Available',\n '-n', 'istio-system', '--timeout=300s']"], {}), "(['kubectl', 'wait', 'deployments', '--all',\n '--for=condition=Available', '-n', 'istio-system', '--timeout=300s'])\n", (958, 1076), False, 'from subprocess import check_call\n'), ((1191, 1286), 'subprocess.check_call', 'check_call', (["['kubectl', 'wait', 'pods', '--all', '--for=condition=Ready', '--timeout=300s']"], {}), "(['kubectl', 'wait', 'pods', '--all', '--for=condition=Ready',\n '--timeout=300s'])\n", (1201, 1286), False, 'from subprocess import check_call\n'), ((1397, 1492), 'subprocess.check_call', 'check_call', (["['kubectl', 'wait', 'pods', '--all', '--for=condition=Ready', '--timeout=300s']"], {}), "(['kubectl', 'wait', 'pods', '--all', '--for=condition=Ready',\n '--timeout=300s'])\n", (1407, 1492), False, 'from subprocess import check_call\n'), ((805, 846), 'subprocess.check_call', 'check_call', (["['kubectl', 'apply', '-f', f]"], {}), "(['kubectl', 'apply', '-f', f])\n", (815, 846), False, 'from subprocess import check_call\n')]
|
import RPi.GPIO as GPIO
class rgb_strip(object):
def __init__(self, pins, frequency):
GPIO.setmode(GPIO.BCM)
self.__pins = dict()
for set_color, pin in pins.items():
GPIO.setup(pin, GPIO.OUT)
self.__pins['RED'] = GPIO.PWM(pins["red"], frequency)
self.__pins['RED'].start(0)
self.__pins['GREEN'] = GPIO.PWM(pins["green"], frequency)
self.__pins['GREEN'].start(0)
self.__pins['BLUE'] = GPIO.PWM(pins["blue"], frequency)
self.__pins['BLUE'].start(0)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.cleanup()
return True
def set_color(self, red, green, blue):
self.__pins['RED'].ChangeDutyCycle(red)
self.__pins['GREEN'].ChangeDutyCycle(green)
self.__pins['BLUE'].ChangeDutyCycle(blue)
print('[RGB]: R: %s G: %s B: %s' % (red, green, blue))
def cleanup(self):
for color, pin in self.__pins.items():
pin.stop()
GPIO.cleanup()
|
[
"RPi.GPIO.setup",
"RPi.GPIO.setmode",
"RPi.GPIO.PWM",
"RPi.GPIO.cleanup"
] |
[((92, 114), 'RPi.GPIO.setmode', 'GPIO.setmode', (['GPIO.BCM'], {}), '(GPIO.BCM)\n', (104, 114), True, 'import RPi.GPIO as GPIO\n'), ((235, 267), 'RPi.GPIO.PWM', 'GPIO.PWM', (["pins['red']", 'frequency'], {}), "(pins['red'], frequency)\n", (243, 267), True, 'import RPi.GPIO as GPIO\n'), ((323, 357), 'RPi.GPIO.PWM', 'GPIO.PWM', (["pins['green']", 'frequency'], {}), "(pins['green'], frequency)\n", (331, 357), True, 'import RPi.GPIO as GPIO\n'), ((414, 447), 'RPi.GPIO.PWM', 'GPIO.PWM', (["pins['blue']", 'frequency'], {}), "(pins['blue'], frequency)\n", (422, 447), True, 'import RPi.GPIO as GPIO\n'), ((918, 932), 'RPi.GPIO.cleanup', 'GPIO.cleanup', ([], {}), '()\n', (930, 932), True, 'import RPi.GPIO as GPIO\n'), ((182, 207), 'RPi.GPIO.setup', 'GPIO.setup', (['pin', 'GPIO.OUT'], {}), '(pin, GPIO.OUT)\n', (192, 207), True, 'import RPi.GPIO as GPIO\n')]
|
import speech_recognition as sr
from Workspace import *
import os
file_name = 'backup.txt'
def recognizeaudio(return_existing = False, save_current = False):
if return_existing and os.path.exists(os.path.join(os.getcwd(), file_name)):
with open(file_name, 'r') as backup:
line = backup.readline()[: -1]
return line
r = sr.Recognizer()
mic = sr.Microphone()
try:
rec = ""
while(True):
with mic as source:
print("Please say the equation:")
r.adjust_for_ambient_noise(source)
audio = r.listen(source)
rec = str(r.recognize_google(audio))
print("You said: " + rec)
reaction = input("Does that look good? [Y/N] ")
if 'Y' in reaction:
break
if save_current:
with open(file_name, 'w+') as writer:
writer.write(rec + "\n")
return rec
except Exception as e:
print(e)
return None
#"+ x + 1 ^ 2"
def adjust_binomals(string_part):
index = 0
while(index < len(string_part)):
if(string_part[index] == '^'):
x_index = index
while(string_part[x_index] != '*'):
x_index -= 1
replacement = "(" + string_part[x_index + 1: index] + ")^"
pot = replacement.index('x')
if pot != 1:
replacement = replacement[:pot] + "*" + replacement[pot:]
string_part = string_part[:x_index + 1] + replacement + string_part[index + 1:]
index = x_index + len(replacement)
index += 1
return string_part
def convert_audio_to_equation(recognized_text, debug = False):
recognized_text = recognized_text.replace(" raised to ", "^")
recognized_text = recognized_text.replace("is equal to", "=")
recognized_text = recognized_text.replace(" * ", "*")
recognized_text = recognized_text.replace("X", "x")
split = recognized_text.index("=")
part_1 = recognized_text[0: split].strip()
part_1 = adjust_binomals(part_1)
part_2 = recognized_text[split + 1: ].strip()
part_2 = adjust_binomals(part_2)
combined = part_1 + " - (" + part_2 + ")"
if debug:
print("Formatted to: " + combined)
expression_solver(combined, debugging = debug)
if __name__ == '__main__':
#example_input = "1 + 4X raised to 2 is equal to 9"
#convert_audio_to_equation(example_input)
user_input = recognizeaudio(return_existing = False, save_current= False)
convert_audio_to_equation(user_input, debug = False)
|
[
"os.getcwd",
"speech_recognition.Recognizer",
"speech_recognition.Microphone"
] |
[((364, 379), 'speech_recognition.Recognizer', 'sr.Recognizer', ([], {}), '()\n', (377, 379), True, 'import speech_recognition as sr\n'), ((390, 405), 'speech_recognition.Microphone', 'sr.Microphone', ([], {}), '()\n', (403, 405), True, 'import speech_recognition as sr\n'), ((216, 227), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (225, 227), False, 'import os\n')]
|
import random
from pypy.module._cffi_backend.handle import CffiHandles
class PseudoWeakRef(object):
_content = 42
def __call__(self):
return self._content
def test_cffi_handles_1():
ch = CffiHandles(None)
expected_content = {}
for i in range(10000):
index = ch.reserve_next_handle_index()
assert 0 <= index < len(ch.handles)
assert ch.handles[index]() is None
pwr = PseudoWeakRef()
expected_content[index] = pwr
ch.handles[index] = pwr
assert len(ch.handles) <= 16384
for index, pwr in expected_content.items():
assert ch.handles[index] is pwr
def test_cffi_handles_2():
ch = CffiHandles(None)
expected_content = {}
for i in range(10000):
index = ch.reserve_next_handle_index()
assert 0 <= index < len(ch.handles)
assert ch.handles[index]() is None
pwr = PseudoWeakRef()
expected_content[index] = pwr
ch.handles[index] = pwr
#
if len(expected_content) > 20:
r = random.choice(list(expected_content))
pwr = expected_content.pop(r)
pwr._content = None
#
assert len(ch.handles) < 100
for index, pwr in expected_content.items():
assert ch.handles[index] is pwr
|
[
"pypy.module._cffi_backend.handle.CffiHandles"
] |
[((212, 229), 'pypy.module._cffi_backend.handle.CffiHandles', 'CffiHandles', (['None'], {}), '(None)\n', (223, 229), False, 'from pypy.module._cffi_backend.handle import CffiHandles\n'), ((678, 695), 'pypy.module._cffi_backend.handle.CffiHandles', 'CffiHandles', (['None'], {}), '(None)\n', (689, 695), False, 'from pypy.module._cffi_backend.handle import CffiHandles\n')]
|
import shutil
import subprocess
import uuid
from json import JSONDecodeError
from typing import List, Union, Tuple
import requests
import json
import logging
import math
from pathlib import Path
from eppy.modeleditor import IDF
import esoreader
import pandas as pd
import numpy as np
from pandas import Series
from firepy.model import HVAC, Heating, Cooling, NaturalVentilation
from firepy.model.building import Construction, OpaqueMaterial, WindowMaterial, ObjectLibrary, BuildingSurface, \
Building, Zone, Ref
logger = logging.getLogger(__name__)
class RemoteConnection:
def __init__(self, host: str, port: int):
self.host = host
self.port = port
if not host.startswith('http'):
self.host = 'http://' + self.host
self.url = '{host}:{port}'.format(host=self.host, port=self.port)
def setup(self, name: str = None, epw: str = None, idd: str = None, variables: dict = None):
"""
:param name: name of the calculation setup
:param epw: full epw string
:param idd: full idd string
:param variables: dict of variables
:return:
"""
url = self.url + '/setup'
if epw is not None:
if name is None:
raise Exception('Please provide a name for the setup')
logger.debug('Setting up EPW on server')
requests.post(url=url, params={'name': name, 'type': 'epw'}, data=epw)
if idd is not None:
logger.debug('Setting up IDD on server')
requests.post(url=url, params={'type': 'idd'}, data=idd)
if variables is not None:
logger.debug('Setting up variables dict')
requests.post(url=url, params={'type': 'vars'}, json=variables)
def check(self, name) -> bool:
url = self.url + '/check'
response = requests.get(url=url, params={'name': name})
if response.text == "OK":
logger.debug('Server check response: {}'.format(response.text))
return True
else:
logger.debug('Server check response: {}'.format(response.text))
return False
def run(self, name: str, idf: IDF, sim_id: str = None) -> str:
url = self.url + '/run'
logger.debug('Running simulation at: {}'.format(url))
data = idf.idfstr()
params = {'name': name}
if sim_id is not None:
params['id'] = sim_id
response = requests.post(url=url, params=params, data=data)
return response.text
def results(self, variables: List[str], name: str, sim_id: str, typ: str, period: str):
url = self.url + '/results'
logger.debug('Requesting results from: {}'.format(url))
payload = {'variables': variables, 'name': name, 'id': sim_id, 'type': typ, 'period': period}
response = requests.get(url=url, params=payload)
logger.debug('Response from server: {}'.format(response.text))
return response
def results_detailed(self, variable: str, name: str, sim_id: str, typ: str, period: str):
url = self.url + '/results/detailed'
logger.debug('Requesting detailed results from: {}'.format(url))
payload = {'variable': variable, 'name': name, 'id': sim_id, 'type': typ, 'period': period}
response = requests.get(url=url, params=payload)
return response.json()
def clean_up(self, name: str) -> str:
url = self.url + '/cleanup'
logger.debug('Cleaning up server')
response = requests.get(url=url, params={'name': name})
return response.text
def drop_result(self, name: str, sim_id: str) -> str:
url = self.url + '/cleanup/result'
logger.debug('Deleting result on server for id: {id}'.format(id=sim_id))
response = requests.get(url=url, params={'name': name, 'id': sim_id})
return response.text
class EnergyPlusSimulation:
# TODO separate remote and local class
var_dict = {
'zone': {
'heating': 'Zone Ideal Loads Supply Air Total Heating Energy',
'cooling': 'Zone Ideal Loads Supply Air Total Cooling Energy',
'infiltration': 'Zone Infiltration Total Heat Loss Energy',
'solar gains': 'Zone Windows Total Transmitted Solar Radiation Energy',
'glazing loss': 'Zone Windows Total Heat Loss Energy',
'opaque loss': 'Zone Opaque Surface Outside Face Conduction Loss Energy',
'ventilation': 'Zone Ventilation Sensible Heat Loss Energy',
'lights': 'Zone Lights Electric Energy',
'equipment': 'Zone Electric Equipment Electric Energy',
'other': 'Zone Other Equipment Total Heating Energy',
'people': 'Zone People Total Heating Energy'
},
'surface': {
'opaque loss': 'Surface Average Face Conduction Heat Transfer Energy',
'glazing loss': 'Surface Window Heat Loss Energy',
'glazing gain': 'Surface Window Heat Gain Energy',
'conduction rate': 'Surface Average Face Conduction Heat Transfer Rate per Area'
},
'balance': {
'internal gain': 'Zone Air Heat Balance Internal Convective Heat Gain Rate',
'convective': 'Zone Air Heat Balance Surface Convection Rate',
'interzone air': 'Zone Air Heat Balance Interzone Air Transfer Rate',
'outdoor air': 'Zone Air Heat Balance Outdoor Air Transfer Rate',
'system air': 'Zone Air Heat Balance System Air Transfer Rate',
'system convective': 'Zone Air Heat Balance System Convective Heat Gain Rate',
'air storage': 'Zone Air Heat Balance Air Energy Storage Rate',
'deviation': 'Zone Air Heat Balance Deviation Rate'
}
}
units = {
'heating': 'J',
'cooling': '-J',
'infiltration': '-J',
'solar gains': 'J',
'glazing loss': '-J',
'opaque loss': '-J',
'ventilation': '-J',
'lights': 'J',
'equipment': 'J',
'people': 'J',
'other': 'J',
'glazing gain': 'J',
'conduction rate': 'W/m2',
'internal gain': 'W',
'convective': 'W',
'interzone air': 'W',
'outdoor ait': 'W',
'system air': 'W',
'system convective': 'W',
'air storage': 'W',
'deviation': 'W',
}
def __init__(self, idf: IDF = None, epw_path: str = None, epw: str = None, output_freq: str = 'monthly',
typ: str = 'local', output_directory: str = None, remote_server: RemoteConnection = None,
ep_exe_path = None):
"""
A class to run EnergyPlus simulations either locally or remotely on a server
:param idf: eppy IDF instance to hold model information
:param epw_path: path to the weather file
:param epw: full epw string from the weather file
:param output_freq: output will be saved at this frequency and any lower frequency
(e.g. monthly, annual, runperiod)
:param typ: 'local' or 'remote'; either output_directory need to be set (for local), or server (for remote)
:param output_directory: a directory path to save EnergyPlus output to
:param remote_server: a RemoteConnection instance that can connect to the EnergyPlus server
"""
self.idf = idf
if self.idf is not None:
self.idf.epw = epw_path
self.typ = typ
if epw is None:
if epw_path is not None:
with open(epw_path, 'r') as epw_file: # 'rb' for binary open?
self.epw = epw_file.read()
else:
self.epw = None
else:
self.epw = epw
if typ == 'local':
self.output_directory = output_directory
if typ == 'remote':
self.server = remote_server
self.output_frequency = output_freq
self.ep_exe_path = ep_exe_path
@property
def output_frequency(self):
return self._output_frequency
@output_frequency.setter
def output_frequency(self, output_freq: str):
self._output_frequency = []
freq_list = ['runperiod', 'annual', 'monthly', 'daily', 'hourly', 'timestep']
if output_freq not in freq_list:
raise Exception('Parameter "output_freq" can be one of: {i}'.format(i=', '.join(freq_list)))
freq_index = freq_list.index(output_freq)
self._output_frequency = freq_list[:freq_index+1]
def run(self, **kwargs) -> str:
if self.idf is None:
raise Exception('No idf set, unable to run simulation')
if self.typ == 'local':
local_response = self.run_local(**kwargs)
return local_response
elif self.typ == 'remote':
server_response = self.run_remote(**kwargs)
return server_response
def run_local(self, name: str, sim_id: str = None) -> str:
if sim_id is None:
sim_id = str(uuid.uuid1())
output_path = Path(f'{self.output_directory}_{name}') / sim_id
if not output_path.exists():
output_path.mkdir(parents=True)
# self.idf.run(output_directory=str(output_path))
idf_address = output_path / "model.idf"
with idf_address.open('w') as idf_file:
idf_file.write(self.idf.idfstr())
# compose Energy Plus command
cmd = [self.ep_exe_path]
cmd += ["--output-directory", str(output_path)] # output folder
cmd += ["--weather", self.idf.epw] # weather file
cmd += ["--idd", self.idf.iddname] # input data dictionary
cmd += [str(idf_address)] # idf input file
subprocess.run(cmd)
return sim_id
def run_remote(self, name: str, force_setup: bool = False, sim_id: str = None) -> str:
# check first
if not self.server.check(name) or force_setup:
self.setup_server(name=name)
# than tun
if sim_id is not None:
server_response = self.server.run(name=name, idf=self.idf, sim_id=sim_id)
else:
server_response = self.server.run(name=name, idf=self.idf)
return server_response
def setup_server(self, name: str, epw: str = None):
variables = {'var_dict': EnergyPlusSimulation.var_dict, 'units': EnergyPlusSimulation.units}
if self.epw is None and epw is None:
raise Exception('No epw is set, please provide epw before setting up the server')
if epw is not None:
self.epw = epw
self.server.setup(name=name, epw=self.epw, variables=variables)
# optionally we could set the idd
def clear_outputs(self):
# clear all output variables
while len(self.idf.idfobjects['Output:Variable']) > 0:
self.idf.popidfobject('Output:Variable', 0)
def set_outputs(self, *args, typ: str = None):
"""
options:
ZONE ENERGY (from Honeybee)
Zone Ideal Loads Supply Air {type} {energy} Cooling Energy
for type in [Total, Sensible, Latent]
for energy in [Heating, Cooling]
Cooling Coil Electric Energy
Chiller Electric Energy
Boiler Heating Energy
Heating Coil Total Heating Energy
Heating Coil Gas Energy
Heating Coil Electric Energy
Humidifier Electric Energy
Fan Electric Energy
Zone Ventilation Fan Electric Energy
Zone Lights Electric Energy
Zone Electric Equipment Electric Energy
Earth Tube Fan Electric Energy
Pump Electric Energy
Zone VRF Air Terminal Cooling Electric Energy
Zone VRF Air Terminal Heating Electric Energy
VRF Heat Pump Cooling Electric Energy
VRF Heat Pump Heating Electric Energy
ZONE GAINS AND LOSSES (from Honeybee)
Zone Windows Total Transmitted Solar Radiation Energy
Zone Ventilation Sensible Heat Loss Energy
Zone Ventilation Sensible Heat Gain Energy
Zone People {type} Heating Energy
Zone Ideal Loads Zone {type} Heating Energy
Zone Ideal Loads Zone {type} Cooling Energy
Zone Infiltration {type} Heat Loss Energy
Zone Infiltration {type} Heat Gain Energy
for type in [Total, Sensible, Latent]
ZONE COMFORT (from Honeybee)
Zone Operative Temperature
Zone Mean Air Temperature
Zone Mean Radiant Temperature
Zone Air Relative Humidity
COMFORT MAP (from Honeybee)
Zone Ventilation Standard Density Volume Flow Rate
Zone Infiltration Standard Density Volume Flow Rate
Zone Mechanical Ventilation Standard Density Volume Flow Rate
Zone Air Heat Balance Internal Convective Heat Gain Rate
Zone Air Heat Balance Surface Convection Rate
Zone Air Heat Balance System Air Transfer Rate
Surface Window System Solar Transmittance
HVAC METRICS (from Honeybee)
System Node Standard Density Volume Flow Rate
System Node Temperature
System Node Relative Humidity
Zone Cooling Setpoint Not Met Time
Zone Heating Setpoint Not Met Time
SURFACE TEMPERATURE (from Honeybee)
Surface Outside Face Temperature
Surface Inside Face Temperature
SURFACE ENERGY (from Honeybee)
Surface Average Face Conduction Heat Transfer Energy
Surface Window Heat Loss Energy
Surface Window Heat Gain Energy
GLAZING SOLAR (from Honeybee)
Surface Window Transmitted Beam Solar Radiation Energy
Surface Window Transmitted Diffuse Solar Radiation Energy
Surface Window Transmitted Solar Radiation Energy
:param args: 'heating' / 'cooling' / etc.
:param typ: 'zone' / 'surface'
:return: None
"""
if 'all' in args:
if typ is not None:
for var in EnergyPlusSimulation.var_dict[typ.lower()].values():
self.add_variable(var)
else:
for typ in EnergyPlusSimulation.var_dict.keys():
for var in EnergyPlusSimulation.var_dict[typ].values():
self.add_variable(var)
else:
if typ is not None:
for var in args:
self.add_variable(EnergyPlusSimulation.var_dict[typ.lower()][var])
else:
raise Exception('Please specify output type: {t}'.format(
t=' or '.join(EnergyPlusSimulation.var_dict.keys())))
def add_variable(self, var_name: str):
variable_names = [ov.Variable_Name for ov in self.idf.idfobjects['Output:Variable']]
if var_name not in variable_names:
for output_freq in self.output_frequency:
self.idf.newidfobject(
key='Output:Variable',
Key_Value='*',
Variable_Name=var_name,
Reporting_Frequency=output_freq
)
else:
output_frequencies = [output_var.Reporting_Frequency
for output_var in self.idf.idfobjects['Output:Variable']
if output_var.Variable_Name == var_name]
for output_freq in self.output_frequency:
if output_freq not in output_frequencies:
self.idf.newidfobject(
key='Output:Variable',
Key_Value='*',
Variable_Name=var_name,
Reporting_Frequency=output_freq
)
def results(self, variables: Union[str, List[str]], name: str = None, sim_id: str = None,
typ: str = 'zone', period: str = 'monthly'):
if self.typ == 'local':
if name is None:
raise Exception('Please provide "name" to access local results')
if sim_id is None:
raise Exception('Please provide simulation id to access local results')
return self.results_local(variables=variables, name=name, sim_id=sim_id, typ=typ, period=period)
elif self.typ == 'remote':
if name is None:
raise Exception('Please provide "name" to access remote results')
if sim_id is None:
raise Exception('Please provide simulation id to access remote results')
return self.results_remote(variables=variables, name=name, sim_id=sim_id, typ=typ, period=period)
def results_local(self, variables: Union[str, List[str]], name: str, sim_id: str,
typ: str = 'zone', period: str = 'monthly'):
if variables == 'all':
variables = EnergyPlusSimulation.var_dict[typ.lower()].keys()
elif isinstance(variables, str):
variables = [variables]
simulation_address = Path(f'{self.output_directory}_{name}') / sim_id
if not simulation_address.exists():
message = 'No result directory for id: {i}'.format(i=sim_id)
logger.debug(message)
return message
end_path = simulation_address / 'eplusout.end'
with end_path.open('r') as end_file:
end_success = end_file.readline()
logger.debug(end_success)
if 'EnergyPlus Completed Successfully' not in end_success:
message = 'Simulation failed for id: {i}'.format(i=sim_id)
logger.info(message)
return message
eso_path = simulation_address / 'eplusout.eso'
if not eso_path.exists():
message = 'No result for id: {i}'.format(i=sim_id)
logger.debug(message)
return message
eso = esoreader.read_from_path(str(eso_path))
res_dfs = []
for var in variables:
var_name = EnergyPlusSimulation.var_dict[typ][var]
df = eso.to_frame(var_name, frequency=period)
df = df.sum(axis='columns')
df.name = var
if EnergyPlusSimulation.units[var] == 'J': # Convert to kWh
df /= (3.6*1e6)
elif EnergyPlusSimulation.units[var] == '-J':
df /= -(3.6 * 1e6)
res_dfs.append(df)
return pd.concat(res_dfs, axis='columns')
def results_remote(self, variables: Union[str, List[str]], name: str, sim_id: str,
typ: str = 'zone', period: str = 'monthly') -> pd.DataFrame:
if variables == 'all':
variables = EnergyPlusSimulation.var_dict[typ.lower()].keys()
elif isinstance(variables, str):
variables = [variables]
response = self.server.results(variables, name, sim_id, typ, period)
try:
return pd.read_json(response.json(), orient='split')
except JSONDecodeError:
return response.text
def results_detailed(self, variable: str, name: str = None, sim_id: str = None,
typ: str = 'zone', period: str = 'monthly'):
if self.typ == 'local':
if name is None:
raise Exception('Please provide "name" to access local results')
if sim_id is None:
raise Exception('Please provide "simulation id" to access local results')
return self.results_detailed_local(variable=variable, name=name, sim_id=sim_id,
typ=typ, period=period)
elif self.typ == 'remote':
if name is None:
raise Exception('Please provide "name" to access remote results')
if sim_id is None:
raise Exception('Please provide "simulation id" to access remote results')
return self.results_detailed_remote(variable=variable, name=name, sim_id=sim_id,
typ=typ, period=period)
def results_detailed_local(self, variable: str, name: str, sim_id: str, typ: str, period: str):
simulation_address = Path(f'{self.output_directory}_{name}') / sim_id
eso_path = simulation_address / 'eplusout.eso'
eso = esoreader.read_from_path(str(eso_path))
var_name = EnergyPlusSimulation.var_dict[typ][variable]
df = eso.to_frame(var_name, frequency=period)
if EnergyPlusSimulation.units[variable] == 'J': # Convert to kWh
df /= (3.6 * 1e6)
elif EnergyPlusSimulation.units[variable] == '-J':
df /= -(3.6 * 1e6)
return df
def results_detailed_remote(self, variable: str, name: str, sim_id: str,
typ: str, period: str):
response_json = self.server.results_detailed(variable=variable, name=name, sim_id=sim_id,
typ=typ, period=period)
return pd.read_json(response_json, orient='split')
def drop_local_result(self, name: str, sim_id: str):
simulation_address = Path(f'{self.output_directory}_{name}') / sim_id
if simulation_address.exists():
shutil.rmtree(simulation_address)
class SteadyStateCalculation:
month_lengths = [
744,
672,
744,
720,
744,
720,
744,
744,
720,
744,
720,
744
] # in hours
year_length = 8760
def __init__(self, weather_data: Union[pd.DataFrame, Path, str] = None):
self.weather_data = weather_data
@property
def weather_data(self) -> pd.DataFrame:
return self._weather_data
@weather_data.setter
def weather_data(self, data):
if isinstance(data, pd.DataFrame):
self._weather_data = data
elif isinstance(data, (Path, str)):
if isinstance(data, str):
data = Path(data)
self._weather_data = pd.read_csv(str(data), header=[0,1], index_col=[0,1])
elif data is None:
self._weather_data = None
else:
raise Exception('Only Path, str or pandas DataFrame can be parsed to weather data')
@staticmethod
def generate_weather_data(epw: Path = None) -> pd.DataFrame:
"""
Generate weather data as pandas DataFrame if epw is supplied, weather data will be calculated
from the epw, if not, a blank DataFrame will be created to be filled by the user
:param epw: the weather file in .epw format
:return: pandas DataFrame
"""
index_labels = [('Monthly', '{m:02n}'.format(m=m)) for m in range(1, 13)] + [('Yearly', 'Yearly')]
column_labels = [('External Temperature', 'Mean')]
orientation_list = ['North', 'NorthEast', 'East', 'SouthEast', 'South', 'SouthWest', 'West', 'NorthWest']
column_labels += [('Total Solar Radiation Energy', orientation) for orientation in orientation_list]
cols = pd.MultiIndex.from_tuples(column_labels)
indx = pd.MultiIndex.from_tuples(index_labels)
weather_data = pd.DataFrame(columns=cols, index=indx)
if epw is None:
return weather_data
else:
raise Exception('epw data generation is not implemented yet')
def u_value(self, construction: Ref, library: ObjectLibrary, surface_type="WALL"):
"""
works with opaque constructions and simple glazing system
TODO inhomogenity in construction
TODO effect of screws, fixing elements
TODO air layers
"""
surface_heat_resistance = {
# (R_si, R_se)
'ROOF': (0.1, 0.04),
'CEILING': (0.1, 0.04),
'EXPOSEDFLOOR': (0.17, 0.04),
'FLOOR': (0.17, 0.04),
'SLABONGRADE': (0.17, 0.04),
'WALL': (0.13, 0.04)
# to be continued...
}
try:
rs_i, rs_e = surface_heat_resistance[surface_type.upper()]
except KeyError:
raise Exception('No heat transfer coefficient defined for surface of type: {st}'.format(st=surface_type))
r_value = rs_i + rs_e
u_value_win = 0
construction_obj = library.get(construction)
for mat in construction_obj.Layers:
material = library.get(mat)
if isinstance(material, OpaqueMaterial):
r_value += material.Thickness / material.Conductivity
elif isinstance(material, WindowMaterial):
u_value_win = material.UValue
else:
message = "Layer in construction needs to be either OpaqueMaterial or WindowMaterial: "
message += "{material} - {t}".format(material=material.RefName, t=material.ObjType)
raise Exception(message)
if u_value_win != 0:
u_value = u_value_win
else:
u_value = 1 / r_value
return u_value
def u_value_floor_to_ground(self, surface: BuildingSurface, wall_thickness: float, library: ObjectLibrary,
soil_type='sand') -> float:
"""
Calculate U value of a floor-to-ground based on ISO 13370 Standard
:param surface:
:param wall_thickness:
:param library:
:param soil_type:
:return:
"""
# TODO underground wall!
if surface.SurfaceType.lower() not in ['slabongrade', 'floor']:
raise Exception(
"U value calculation of Floor to Ground not suitable for {st}!".format(st=surface.SurfaceType))
soil_conductivity = {
# W/mK
'clay': 1.5, # agyag
'slit': 1.5, # iszap
'sand': 2.0, # homok
'gravel': 2.0, # kavics
'stone': 3.5
}
soil_heat_store_capacity = {
# J/m3K
'clay': 3e6,
'slit': 3e6,
'sand': 2e6,
'gravel': 2e6,
'stone': 2e6
}
def heat_resistance(construction):
r_value = 0
for mat in construction.Layers:
material = library.get(mat)
if isinstance(material, OpaqueMaterial):
r_value += material.Thickness / material.Conductivity
else:
raise Exception('Cannot calculate R value for material: {}'.format(material))
return r_value
# TODO perimeter of Building, not one surface!!
# TODO distinguish between heated and non-heated in perimeter!
# characteristic size:
B = surface.area() / (0.5 * surface.perimeter())
# Resistance values
R_si, R_se = 0.17, 0.04
R_f = heat_resistance(library.get(surface.Construction))
Lambda = soil_conductivity[soil_type]
# equivalent thickness:
w = wall_thickness
d_t = w + Lambda * (R_f + R_si)
if d_t >= B: # equivalent_thickness >= characteristic_size
u_value = Lambda / (0.457 * B + d_t)
else: # equivalent_thickness < characteristic_size
u_value = 2 * Lambda / (math.pi * B + d_t) * math.log(math.pi * B / d_t + 1)
return u_value
def g_value(self, construction: Construction, library: ObjectLibrary):
"""
works only with simple glazing system
"""
g_value_win = 0
for mat in construction.Layers:
material = library.get(mat)
if isinstance(material, WindowMaterial):
g_value_win = material.gValue
else:
raise Exception('Cannot calculate g_value for material: {m}'.format(m=material))
return g_value_win
def heat_store_capacity(self, obj: Union[Construction, Zone, Building], library: ObjectLibrary):
"""
Calculate heat store capacity of Construction / Zone / Building
For constructions layers from inside are considered until they reach any of the following condition:
- we reach the first insulation layer
- we reach 10 cm into the construction
- we reach the the 1/2 of the construction thickness
:param obj: Construction, Zone or Building
:param library: Object library that holds the data of the Materials
:return: kappa value in [J/m^2*K] for Construction and in [J/K] for Zone
"""
if isinstance(obj, Construction):
d = 0 # [m] position in the construction from inside
kappa = 0
# layers from inside to outside
for layer in obj.Layers[::-1]:
material = library.get(layer)
if isinstance(material, OpaqueMaterial):
if material.Conductivity < 0.1: # insulation material
break
elif d + material.Thickness >= min(obj.thickness(library) / 2, 0.1):
# we reached the 1/2 of the construction thickness or 10 cm
t = min(obj.thickness(library) / 2, 0.1) - d
kappa += material.Density * t * material.SpecificHeat
d += t
break
else:
kappa += material.Density * material.Thickness * material.SpecificHeat
d += material.Thickness
else:
raise Exception('Heat store capacity cannot be calculated for: {m}'.format(m=material))
return kappa # [J/m^2*K]
elif isinstance(obj, Zone):
capacity = 0
for surface in obj.BuildingSurfaces:
if surface.SurfaceType.upper() in ["WALL", "ROOF", "CEILING", "FLOOR", "SLABONGRADE"]:
construction = library.get(surface.Construction)
capacity += surface.area_net() * self.heat_store_capacity(construction, library)
for internal in obj.InternalMasses:
construction = library.get(internal.Construction)
capacity += 2 * internal.Area * self.heat_store_capacity(construction, library)
# we take this 2 times, because both sides of the internal structures are exposed to this zone
return capacity # [J/K]
elif isinstance(obj, Building):
capacity = 0
for zone in obj.Zones:
capacity += self.heat_store_capacity(zone, library)
return capacity # [J/K]
else:
raise Exception('Type of parameter "obj" needs to be one of: Construction, Zone, Building')
def sum_AU_envelope(self, zone: Zone, library: ObjectLibrary):
"""
Calculate Summa A*U for the envelope surfaces in [W/K]
TODO simplified correction factor for heatbridges
"""
sum__au = 0
for surface in zone.BuildingSurfaces:
if surface.OutsideBoundaryCondition.lower() == "outdoors":
u_value = self.u_value(construction=surface.Construction, library=library,
surface_type=surface.SurfaceType)
sum__au += u_value * surface.area_net()
for window in surface.Fenestration:
u_value = self.u_value(construction=window.Construction, library=library,
surface_type=surface.SurfaceType)
sum__au += u_value * window.area()
return sum__au # [W/K]
def sum_AU_ground(self, zone: Zone, library: ObjectLibrary):
"""
Calculate Summa A*U for ground contact surfaces in [W/K]
"""
# get average wall thickness of zone for floor U value calculation
thickness_list = [library.get(surface.Construction).thickness(library) for surface in zone.BuildingSurfaces if
surface.SurfaceType.lower() == 'wall']
wall_thickness = sum(thickness_list) / len(thickness_list)
sum__au = 0
for surface in zone.BuildingSurfaces:
if surface.OutsideBoundaryCondition.lower() in ["ground", "othersideconditionsmodel"]:
# OtherSideConditionsModel in case of Ground Domain
u_value = self.u_value_floor_to_ground(surface, wall_thickness, library) # define to soil type?
sum__au += u_value * surface.area()
return sum__au # [W/K]
def sum_lpsi_ground(self, zone: Zone):
"""
Calculate the heat loss through floor-to-ground perimeter heat bridge in [W/K]
TODO not implemented yet
:param zone:
:return:
"""
return None
def sum_lpsi_envelope(self, zone: Zone):
"""
Calculate the heat loss through envelope heat bridges in [W/K]
TODO not implemented yet
:param zone:
:return:
"""
return None
def heat_transmission_direct(self, zone: Zone, library: ObjectLibrary):
"""
Calculate total direct heat transmission through envelope surfaces (H_tr_D) in [W/K]
TODO only sum A*U is calculated, sum L*psi and point heat bridges are neglected
:param zone:
:param library
:return:
"""
h_tr_d = self.sum_AU_envelope(zone=zone, library=library)
return h_tr_d # [W/K]
def heat_transmission_ground(self, zone: Zone, library: ObjectLibrary):
"""
Calculate total heat transmission through ground contact surfaces (H_tr_T) in [W/K]
TODO only sum A*U is calculated, sum L*psi and point heat bridges are neglected
:param zone:
:param library
:return:
"""
h_tr_t = self.sum_AU_ground(zone=zone, library=library)
return h_tr_t # [W/K]
def heat_energy_transmission(self, zone: Zone, hvac: HVAC, library: ObjectLibrary, heating=True) -> pd.Series:
"""
Calculate the heat transmission (gain or loss) of a zone (Q_tr) in [kWh]
:param zone:
:param hvac:
:param library:
:return: Heat transmission for each month of the year as pandas Series
"""
h_tr_d = self.heat_transmission_direct(zone, library)
h_tr_t = self.heat_transmission_ground(zone, library)
if heating:
theta_i = hvac.Heating.set_point # heating setpoint temperature
else: # cooling
theta_i = hvac.Cooling.set_point # cooling setpoint temperature
# monthly mean temperature (pd.Series)
theta_e_monthly = self.weather_data.loc['Monthly', ('External Temperature', 'Mean')]
# yearly mean temperature (float)
theta_e_year = self.weather_data.loc[('Yearly', 'Yearly'), ('External Temperature', 'Mean')]
# length of months (pd.Series)
delta_t = pd.Series(data=SteadyStateCalculation.month_lengths, index=[str(i) for i in range(1, 13)])
q_tr = ((h_tr_d)*(theta_i - theta_e_monthly) + h_tr_t * (theta_i - theta_e_year)) * delta_t / 1000
return q_tr # [kWh]
def heat_natural_ventilation(self, zone: Zone, hvac: HVAC):
# get natural ventilation ACH
n_req = hvac.required_ach
n_fil = hvac.infiltration_ach
if n_req > n_fil:
n_nat = max(n_req - n_fil, 0.2)
else:
n_nat = 0.2
# any other constant natural ventilation
n_nat += hvac.NaturalVentilation.ach
h_nat_vent = 0.35 * (n_nat + n_fil) * zone.volume() # [W/K]
return h_nat_vent
def heat_natural_ventilation_summer_night(self, zone: Zone, hvac: HVAC):
# calculate natural ventilation extra ACH for summer nights
b_night = 1.5 # TODO calculate from weather data
duration = hvac.NaturalVentilation.summer_night_duration
n_night = hvac.NaturalVentilation.summer_night_ach
h_nat_vent_sum_night = 0.35 * b_night * duration / 24 * n_night * zone.volume() # [W/K]
return h_nat_vent_sum_night
def heat_ventilation(self, zone: Zone, hvac: HVAC, heating=True) -> pd.Series:
"""
H_szell in [W/K]
:param zone:
:param hvac:
:param heating:
:return:
"""
h_nat_vent = self.heat_natural_ventilation(zone, hvac)
h_nat_vent_summer = self.heat_natural_ventilation_summer_night(zone, hvac)
# monthly mean temperature (pd.Series)
theta_e_monthly = self.weather_data.loc['Monthly', ('External Temperature', 'Mean')]
if heating:
h_vent = pd.Series(data=h_nat_vent, index=[str(i) for i in range(1, 13)])
else: # Cooling
# set night ventilation for cooling season months
h_nat_vent_summer = pd.Series(data=h_nat_vent_summer, index=[str(i) for i in range(1, 13)])
h_nat_vent_summer.loc[['1', '2', '3', '4', '10', '11', '12']] = 0 # Cooling season only: May-Sept
h_vent = h_nat_vent + h_nat_vent_summer
return h_vent # [W/K]
def heat_energy_ventilation(self, zone: Zone, hvac: HVAC, heating=True) -> pd.Series:
"""
Q_szell in [kWh]
:param zone:
:param hvac:
:param heating:
:return:
"""
h_vent = self.heat_ventilation(zone, hvac, heating=heating)
if heating:
theta_i = hvac.Heating.set_point # heating setpoint temperature
else: # cooling
theta_i = hvac.Cooling.set_point # cooling setpoint temperature
# monthly mean temperature (pd.Series)
theta_e_monthly = self.weather_data.loc['Monthly', ('External Temperature', 'Mean')]
# length of months (pd.Series)
delta_t = pd.Series(data=SteadyStateCalculation.month_lengths, index=[str(i) for i in range(1, 13)])
q_vent: pd.Series = h_vent * (theta_i - theta_e_monthly) * delta_t / 1000
return q_vent # [kWh]
def heat_energy_solar(self, zone: Zone, library: ObjectLibrary, heating=True) -> pd.Series:
q_sd_zone = pd.Series(data=0, index=[str(i) for i in range(1, 13)])
for surface in zone.BuildingSurfaces:
for window in surface.Fenestration:
construction = library.get(window.Construction)
# g value of glazing system
f_g = 0.9 # solar incident correction factor
g_n = self.g_value(construction, library) # g factor for perpendicular radiation
g_w = g_n * f_g
# area of glazing
a_w = window.glazing_area(mode='FrameWidth', frame_width=0.1) # TODO include frame width in model
# g value of shading
if window.Shading is not None:
shading = library.get(window.Shading)
g_sh = shading.ShadingFactor
if not shading.IsScheduled:
g_sh = 1
if heating:
g_sh = 1
else:
g_sh = 1
# TODO f_s shading factor of external shading surfaces
# total solar radiation energy (pd.Series)
orientation = window.orientation()
g_s = self.weather_data.loc['Monthly', ('Total Solar Radiation Energy', orientation)]
q_sd = g_w * a_w * g_sh * g_s
q_sd_zone += q_sd
return q_sd_zone # [kWh]
def heat_energy_internal(self, zone: Zone, hvac: HVAC) -> pd.Series:
a_n = zone.heated_area()
q_b = hvac.internal_gain
# length of months (pd.Series)
delta_t = pd.Series(data=SteadyStateCalculation.month_lengths, index=[str(i) for i in range(1, 13)])
q_b_zone = a_n * q_b * delta_t / 1000
return q_b_zone # kWh
def gamma_tao_loss_gain(self, zone: Zone, hvac: HVAC, library: ObjectLibrary,
heating: bool) -> Tuple[pd.Series, pd.Series, pd.Series, pd.Series]:
"""
Helper function to calculate gamma (loss/gain ratio) and tao (time factor)
:param zone:
:param hvac:
:param library:
:return: gamma, tao, loss, gain as pd.Series
"""
# Total loss in kWh
q_loss = self.heat_energy_transmission(zone, hvac, library, heating=heating)
q_loss += self.heat_energy_ventilation(zone, hvac, heating=heating)
# Total gain in kWh
q_gain = self.heat_energy_solar(zone, library, heating=heating)
q_gain += self.heat_energy_internal(zone, hvac)
# loss/gain ratio
gamma: pd.Series = q_gain / q_loss
# zone heat store capacity in [kJ/K]
c_m_eff = self.heat_store_capacity(zone, library) / 1000
h_tr_d = self.heat_transmission_direct(zone, library)
h_tr_t = self.heat_transmission_ground(zone, library)
h_vent = self.heat_ventilation(zone, hvac, heating=heating)
# time factor [h] as pd.Series
tao: pd.Series = c_m_eff / 3.6 / (h_tr_d + h_tr_t + h_vent)
return gamma, tao, q_loss, q_gain
def heating_demand(self, zone: Zone, hvac: HVAC, library: ObjectLibrary) -> pd.Series:
"""
Monthly net heating energy in [kWh]
:param zone:
:param hvac:
:param library:
:return: pd.Series with the monthly demand
"""
gamma_h, tao_h, q_loss, q_gain = self.gamma_tao_loss_gain(zone, hvac, library, heating=True)
# numeric factors for monthly calculation in case of heating
a_h_0 = 1
tao_h_0 = 15
# TODO for seasonal calculation:
# a_h_0 = 0.8
# tao_h_0 = 30
a_h = a_h_0 + tao_h / tao_h_0 # pd.Series
df = pd.concat([a_h, gamma_h, q_loss, q_gain], keys=['a', 'gamma', 'loss', 'gain'], axis='columns')
# utilization factor
def utilization_factor(gamma: float, a: float, gain: float):
if gamma > 0 and gamma != 1:
theta = (1 - gamma ** a) / (1 - gamma ** (a + 1))
elif gamma == 1:
theta = a / (a + 1)
else: # gamma <= 0
if gain > 0:
theta = 1 / gamma
else: # q_gain <=0
theta = 1
return theta
# calculate for series:
df['theta'] = df.apply(lambda row: utilization_factor(row['gamma'], row['a'], row['gain']), axis='columns')
# net heating demand
def net_demand(gamma: float, theta: float, loss: float, gain: float):
if gamma <= 0 and gain > 0:
demand = 0
elif gamma > 2:
demand = 0
else:
demand = loss - theta * gain
if demand < 0:
demand = 0
return demand
q_h_net = df.apply(lambda row: net_demand(row['gamma'], row['theta'], row['loss'], row['gain']), axis='columns')
q_h_net.name = zone.Name
return q_h_net
def cooling_demand(self, zone: Zone, hvac: HVAC, library: ObjectLibrary):
"""
Monthly net cooling energy in [kWh]
:param zone:
:param hvac:
:param library:
:return: pd.Series with the monthly demand
"""
gamma_c, tao_c, q_loss, q_gain = self.gamma_tao_loss_gain(zone, hvac, library, heating=False)
# numeric factors for monthly calculation in case of cooling
a_c_0 = 1
tao_c_0 = 15
# TODO for seasonal calculation:
# a_h_0 = 0.8
# tao_h_0 = 30
a_c = a_c_0 + tao_c / tao_c_0 # pd.Series
df = pd.concat([a_c, gamma_c, q_loss, q_gain], keys=['a', 'gamma', 'loss', 'gain'], axis='columns')
# utilization factor
def utilization_factor(gamma: float, a: float):
if gamma > 0 and gamma != 1:
theta = (1 - gamma ** (-a)) / (1 - gamma ** (-(a + 1)))
elif gamma == 1:
theta = a / (a + 1)
else: # gamma <= 0
theta = 1
return theta
# calculate for series:
df['theta'] = df.apply(lambda row: utilization_factor(row['gamma'], row['a']), axis='columns')
# net heating demand
def net_demand(gamma: float, theta: float, loss: float, gain: float):
if 1 / gamma > 2:
demand = 0
else:
demand = gain - theta * loss
if demand < 0:
demand = 0
return demand
q_c_net = df.apply(lambda row: net_demand(row['gamma'], row['theta'], row['loss'], row['gain']), axis='columns')
q_c_net.name = zone.Name
return q_c_net
def lighting_demand(self, zone: Zone, hvac: HVAC):
"""
Lighting demand with simplified calculation
:param zone:
:param hvac:
:return: Total lighting demand of zone in [kWh/year]
"""
# power density in W/m2
p = hvac.Lighting.power_density
f_fe = 1 # non-dimmable lights
f_szab = 1
t_nappal = 3000 # [h]
t_ejjel = 2000 # [h]
envelope_area = 0
glazing_area = 0
for surface in zone.BuildingSurfaces:
if surface.OutsideBoundaryCondition.lower() == "outdoors" and surface.SurfaceType.lower() == 'wall':
envelope_area += surface.area()
for window in surface.Fenestration:
glazing_area += window.glazing_area(mode='FrameWidth', frame_width=0.1)
# TODO include frame width in model
if envelope_area == 0:
glazing_ratio = 0
else:
glazing_ratio = glazing_area / envelope_area
if glazing_ratio > 0.8:
f_nappal = 0.56
elif 0.8 > glazing_ratio > 0.4:
f_nappal = 0.7
else:
f_nappal = 0.83
w_vil = f_fe * p * f_szab * (t_nappal * f_nappal + t_ejjel) * zone.heated_area() / 1000
return w_vil
def calculate(self, building: Building) -> pd.DataFrame:
"""
Calculate heating cooling and lighting demand of building for each month
:param building:
:return: demands as pandas DataFrame
"""
heating = []
cooling = []
lights = []
for zone in building.Zones:
heating.append(self.heating_demand(zone=zone, hvac=building.HVAC, library=building.Library))
cooling.append(self.cooling_demand(zone=zone, hvac=building.HVAC, library=building.Library))
lights.append(self.lighting_demand(zone=zone, hvac=building.HVAC))
heating_demand = pd.concat(heating, axis='columns').sum(axis='columns')
cooling_demand = pd.concat(cooling, axis='columns').sum(axis='columns')
lights_demand = sum(lights) # float (yearly demand)
result = pd.concat([heating_demand, cooling_demand], axis='columns',
keys=['heating', 'cooling'])
result['lights'] = lights_demand / 12 # (monthly)
return result
|
[
"pandas.DataFrame",
"subprocess.run",
"pandas.MultiIndex.from_tuples",
"shutil.rmtree",
"pandas.read_json",
"pathlib.Path",
"uuid.uuid1",
"requests.get",
"requests.post",
"math.log",
"pandas.concat",
"logging.getLogger"
] |
[((528, 555), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (545, 555), False, 'import logging\n'), ((1851, 1895), 'requests.get', 'requests.get', ([], {'url': 'url', 'params': "{'name': name}"}), "(url=url, params={'name': name})\n", (1863, 1895), False, 'import requests\n'), ((2453, 2501), 'requests.post', 'requests.post', ([], {'url': 'url', 'params': 'params', 'data': 'data'}), '(url=url, params=params, data=data)\n', (2466, 2501), False, 'import requests\n'), ((2845, 2882), 'requests.get', 'requests.get', ([], {'url': 'url', 'params': 'payload'}), '(url=url, params=payload)\n', (2857, 2882), False, 'import requests\n'), ((3310, 3347), 'requests.get', 'requests.get', ([], {'url': 'url', 'params': 'payload'}), '(url=url, params=payload)\n', (3322, 3347), False, 'import requests\n'), ((3520, 3564), 'requests.get', 'requests.get', ([], {'url': 'url', 'params': "{'name': name}"}), "(url=url, params={'name': name})\n", (3532, 3564), False, 'import requests\n'), ((3796, 3854), 'requests.get', 'requests.get', ([], {'url': 'url', 'params': "{'name': name, 'id': sim_id}"}), "(url=url, params={'name': name, 'id': sim_id})\n", (3808, 3854), False, 'import requests\n'), ((9742, 9761), 'subprocess.run', 'subprocess.run', (['cmd'], {}), '(cmd)\n', (9756, 9761), False, 'import subprocess\n'), ((18533, 18567), 'pandas.concat', 'pd.concat', (['res_dfs'], {'axis': '"""columns"""'}), "(res_dfs, axis='columns')\n", (18542, 18567), True, 'import pandas as pd\n'), ((21107, 21150), 'pandas.read_json', 'pd.read_json', (['response_json'], {'orient': '"""split"""'}), "(response_json, orient='split')\n", (21119, 21150), True, 'import pandas as pd\n'), ((23148, 23188), 'pandas.MultiIndex.from_tuples', 'pd.MultiIndex.from_tuples', (['column_labels'], {}), '(column_labels)\n', (23173, 23188), True, 'import pandas as pd\n'), ((23204, 23243), 'pandas.MultiIndex.from_tuples', 'pd.MultiIndex.from_tuples', (['index_labels'], {}), '(index_labels)\n', (23229, 23243), True, 'import pandas as pd\n'), ((23267, 23305), 'pandas.DataFrame', 'pd.DataFrame', ([], {'columns': 'cols', 'index': 'indx'}), '(columns=cols, index=indx)\n', (23279, 23305), True, 'import pandas as pd\n'), ((41810, 41908), 'pandas.concat', 'pd.concat', (['[a_h, gamma_h, q_loss, q_gain]'], {'keys': "['a', 'gamma', 'loss', 'gain']", 'axis': '"""columns"""'}), "([a_h, gamma_h, q_loss, q_gain], keys=['a', 'gamma', 'loss',\n 'gain'], axis='columns')\n", (41819, 41908), True, 'import pandas as pd\n'), ((43697, 43795), 'pandas.concat', 'pd.concat', (['[a_c, gamma_c, q_loss, q_gain]'], {'keys': "['a', 'gamma', 'loss', 'gain']", 'axis': '"""columns"""'}), "([a_c, gamma_c, q_loss, q_gain], keys=['a', 'gamma', 'loss',\n 'gain'], axis='columns')\n", (43706, 43795), True, 'import pandas as pd\n'), ((46939, 47031), 'pandas.concat', 'pd.concat', (['[heating_demand, cooling_demand]'], {'axis': '"""columns"""', 'keys': "['heating', 'cooling']"}), "([heating_demand, cooling_demand], axis='columns', keys=['heating',\n 'cooling'])\n", (46948, 47031), True, 'import pandas as pd\n'), ((1374, 1444), 'requests.post', 'requests.post', ([], {'url': 'url', 'params': "{'name': name, 'type': 'epw'}", 'data': 'epw'}), "(url=url, params={'name': name, 'type': 'epw'}, data=epw)\n", (1387, 1444), False, 'import requests\n'), ((1539, 1595), 'requests.post', 'requests.post', ([], {'url': 'url', 'params': "{'type': 'idd'}", 'data': 'idd'}), "(url=url, params={'type': 'idd'}, data=idd)\n", (1552, 1595), False, 'import requests\n'), ((1697, 1760), 'requests.post', 'requests.post', ([], {'url': 'url', 'params': "{'type': 'vars'}", 'json': 'variables'}), "(url=url, params={'type': 'vars'}, json=variables)\n", (1710, 1760), False, 'import requests\n'), ((9076, 9115), 'pathlib.Path', 'Path', (['f"""{self.output_directory}_{name}"""'], {}), "(f'{self.output_directory}_{name}')\n", (9080, 9115), False, 'from pathlib import Path\n'), ((17170, 17209), 'pathlib.Path', 'Path', (['f"""{self.output_directory}_{name}"""'], {}), "(f'{self.output_directory}_{name}')\n", (17174, 17209), False, 'from pathlib import Path\n'), ((20292, 20331), 'pathlib.Path', 'Path', (['f"""{self.output_directory}_{name}"""'], {}), "(f'{self.output_directory}_{name}')\n", (20296, 20331), False, 'from pathlib import Path\n'), ((21238, 21277), 'pathlib.Path', 'Path', (['f"""{self.output_directory}_{name}"""'], {}), "(f'{self.output_directory}_{name}')\n", (21242, 21277), False, 'from pathlib import Path\n'), ((21339, 21372), 'shutil.rmtree', 'shutil.rmtree', (['simulation_address'], {}), '(simulation_address)\n', (21352, 21372), False, 'import shutil\n'), ((9040, 9052), 'uuid.uuid1', 'uuid.uuid1', ([], {}), '()\n', (9050, 9052), False, 'import uuid\n'), ((27329, 27360), 'math.log', 'math.log', (['(math.pi * B / d_t + 1)'], {}), '(math.pi * B / d_t + 1)\n', (27337, 27360), False, 'import math\n'), ((46725, 46759), 'pandas.concat', 'pd.concat', (['heating'], {'axis': '"""columns"""'}), "(heating, axis='columns')\n", (46734, 46759), True, 'import pandas as pd\n'), ((46805, 46839), 'pandas.concat', 'pd.concat', (['cooling'], {'axis': '"""columns"""'}), "(cooling, axis='columns')\n", (46814, 46839), True, 'import pandas as pd\n'), ((22083, 22093), 'pathlib.Path', 'Path', (['data'], {}), '(data)\n', (22087, 22093), False, 'from pathlib import Path\n')]
|
# -*- coding: utf-8 -*-
"""
Utility functions for the spines versioning sub-package.
"""
#
# Imports
#
import difflib
import inspect
import re
from textwrap import dedent
from types import FunctionType
from typing import Dict
from typing import List
import unicodedata
from ..vendor import autopep8 as _v_autopep8
#
# Functions
#
def slugify(value: str, allow_unicode: bool = False) -> str:
"""Slugifys the given string
Convert to ASCII if 'allow_unicode' is False. Convert spaces to hyphens.
Remove characters that aren't alphanumerics, underscores, or hyphens.
Convert to lowercase. Also strip leading and trailing whitespace.
.. note::
Modified (barely) from Django:
https://github.com/django/django/blob/master/django/utils/text.py
Parameters
----------
value : str
String to slugify.
allow_unicode : bool, optional
Whether or not to allow unicode characters.
Returns
-------
str
Slugified string.
"""
value = str(value)
if allow_unicode:
value = unicodedata.normalize('NFKC', value)
else:
value = unicodedata.normalize('NFKD', value).encode('ascii', 'ignore')\
.decode('ascii')
value = re.sub(r'[^\w\s-]', '_', value).strip().lower()
return re.sub(r'[-\s]+', '-', value)
def get_function_source(func):
"""Gets the source code for the given function
Parameters
----------
func : callable
Function to get source code of.
Returns
-------
str
Function source code, properly formatted.
"""
raw_source = dedent(inspect.getsource(func))
return _v_autopep8.fix_code(raw_source)
def get_doc_string(obj):
"""Gets the documentation string for the given object
Parameters
----------
obj : object
Object to get docstring for.
Returns
-------
str
Docstring of the given object.
"""
return inspect.cleandoc(inspect.getdoc(obj))
def get_diff(a: [str, List[str]], b: [str, List[str]], n=3):
"""Gets the differences between text data
Parameters
----------
a : :obj:`str` or :obj:`list` of :obj:`str`
Text to compare from.
b : :obj:`str` or :obj:`list` of :obj:`str`
Text to compare with.
n : int, optional
Lines of context to show around differences.
Returns
-------
str
Differences between the texts.
"""
if not isinstance(a, list):
a = a.splitlines()
if not isinstance(b, list):
b = b.splitlines()
return ''.join(
difflib.context_diff(a, b, fromfile='Current', tofile='New', n=n)
)
def get_changes(a: [str, List[str]], b: [str, List[str]]) -> List[tuple]:
"""Gets the full set of changes required to go from a to b
Parameters
----------
a : :obj:`str` or :obj:`list` of :obj:`str`
Text to start from.
b : :obj:`str` or :obj:`list` of :obj:`str`
Text to get changes to get to.
Returns
-------
:obj:`list` of :obj:`tuple`
List of five-tuples of operation, from start index, from end
index, to start index and to end index.
"""
if not isinstance(a, str):
a = '\n'.join(a)
if not isinstance(b, list):
b = '\n'.join(b)
s = difflib.SequenceMatcher(None, a, b)
return s.get_opcodes()
def get_function_bytes(func: FunctionType):
"""Gets a byte-representation of a function object
Parameters
----------
func : callable
Function to get bytes for.
Returns
-------
bytes
Byte representation of the function.
"""
bytecode = func.__code__.co_code
consts = func.__code__.co_consts[1:]
dep_objs = func.__code__.co_names
all_vars = func.__code__.co_varnames
ret = []
for v in (consts, dep_objs, all_vars):
for i_v in v:
ret.append('str:%s' % i_v)
return bytecode + ','.join(ret).encode()
def get_function_parameters(cls, obj: FunctionType) -> Dict[str, object]:
"""Gets the parameters for a function object
Parameters
----------
func : callable
Function to get parameters for.
Returns
-------
:obj:`dict` of :obj:`str`, :obj:`object`
Dictionary of parameter name to default value (if any, otherwise
:obj:`None`).
"""
fn_sig = inspect.signature(obj)
return {
k: None if v.default is fn_sig.empty else v.default
for k, v in fn_sig.parameters.items()
}
|
[
"unicodedata.normalize",
"inspect.getdoc",
"difflib.SequenceMatcher",
"difflib.context_diff",
"inspect.signature",
"inspect.getsource",
"re.sub"
] |
[((1314, 1343), 're.sub', 're.sub', (['"""[-\\\\s]+"""', '"""-"""', 'value'], {}), "('[-\\\\s]+', '-', value)\n", (1320, 1343), False, 'import re\n'), ((3311, 3346), 'difflib.SequenceMatcher', 'difflib.SequenceMatcher', (['None', 'a', 'b'], {}), '(None, a, b)\n', (3334, 3346), False, 'import difflib\n'), ((4372, 4394), 'inspect.signature', 'inspect.signature', (['obj'], {}), '(obj)\n', (4389, 4394), False, 'import inspect\n'), ((1072, 1108), 'unicodedata.normalize', 'unicodedata.normalize', (['"""NFKC"""', 'value'], {}), "('NFKC', value)\n", (1093, 1108), False, 'import unicodedata\n'), ((1635, 1658), 'inspect.getsource', 'inspect.getsource', (['func'], {}), '(func)\n', (1652, 1658), False, 'import inspect\n'), ((1983, 2002), 'inspect.getdoc', 'inspect.getdoc', (['obj'], {}), '(obj)\n', (1997, 2002), False, 'import inspect\n'), ((2602, 2667), 'difflib.context_diff', 'difflib.context_diff', (['a', 'b'], {'fromfile': '"""Current"""', 'tofile': '"""New"""', 'n': 'n'}), "(a, b, fromfile='Current', tofile='New', n=n)\n", (2622, 2667), False, 'import difflib\n'), ((1255, 1287), 're.sub', 're.sub', (['"""[^\\\\w\\\\s-]"""', '"""_"""', 'value'], {}), "('[^\\\\w\\\\s-]', '_', value)\n", (1261, 1287), False, 'import re\n'), ((1135, 1171), 'unicodedata.normalize', 'unicodedata.normalize', (['"""NFKD"""', 'value'], {}), "('NFKD', value)\n", (1156, 1171), False, 'import unicodedata\n')]
|
import scrapy
from product_scraper.items import Product
from scrapy.spiders import SitemapSpider, Rule
class SitemapSpider(SitemapSpider):
name = "sitemap_spider"
sitemap_urls = ['https://www.allrecipes.com/sitemap.xml']
sitemap_rules = [
('/recipe/', 'parse_product')
]
def parse_product(self, response):
item = Product()
item['name'] = response.xpath("//h1[contains(@class,'headline') and contains(@class,'heading-content')]/text()").get()
ingredients = response.xpath("//input[@data-tracking-label='ingredient clicked']")
ingredients_obj = []
ingredients_list = ''
for ingredient in ingredients:
ingredient_obj = {
'name': ingredient.xpath(".//@data-ingredient").get(),
'qty': ingredient.xpath(".//@data-quantity").get(),
'unit': ingredient.xpath(".//@data-unit").get(),
}
ingredients_obj.append(ingredient_obj)
ingredients_list = ingredients_list + ingredient_obj['name'] + ','
item['ingredients'] = ingredients_obj
item['ingredients_list'] = ingredients_list
item['description'] = response.xpath("//li[contains(@class,'instructions-section-item')]//div//p/text()").getall()
item['img_url'] = response.xpath("//div[contains(@class,'primary-media-with-filmstrip')]//div//noscript//img/@src").get(0)
item['label'] = response.xpath("//li[@class='breadcrumbs__item'][3]//a/span/text()").get()
# item['price'] = response.xpath("//div[@class='my-4']/span/text()").get()
# item['title'] = response.xpath('//section[1]//h2/text()').get()
# item['img_url'] = response.xpath("//div[@class='product-slider']//img/@src").get(0)
return item
|
[
"product_scraper.items.Product"
] |
[((352, 361), 'product_scraper.items.Product', 'Product', ([], {}), '()\n', (359, 361), False, 'from product_scraper.items import Product\n')]
|
import numpy as np
from mltk.core.preprocess.audio.audio_feature_generator import AudioFeatureGenerator
from mltk.core.preprocess.audio.audio_feature_generator.tests.data import (
DEFAULT_SETTINGS,
YES_INPUT_AUDIO,
YES_OUTPUT_FEATURES_INT8,
NO_INPUT_AUDIO,
NO_OUTPUT_FEATURES_INT8
)
def test_yes_samples():
settings = DEFAULT_SETTINGS
mfe = AudioFeatureGenerator(settings)
sample = np.asarray(YES_INPUT_AUDIO, dtype=np.int16)
calculated = mfe.process_sample(sample, dtype=np.int8)
expected = np.reshape(np.array(YES_OUTPUT_FEATURES_INT8, dtype=np.int8), settings.spectrogram_shape)
assert np.allclose(calculated, expected)
def test_no_samples():
settings = DEFAULT_SETTINGS
mfe = AudioFeatureGenerator(settings)
sample = np.asarray(NO_INPUT_AUDIO, dtype=np.int16)
calculated = mfe.process_sample(sample, dtype=np.int8)
expected = np.reshape(np.array(NO_OUTPUT_FEATURES_INT8, dtype=np.int8), settings.spectrogram_shape)
assert np.allclose(calculated, expected)
|
[
"numpy.asarray",
"mltk.core.preprocess.audio.audio_feature_generator.AudioFeatureGenerator",
"numpy.array",
"numpy.allclose"
] |
[((374, 405), 'mltk.core.preprocess.audio.audio_feature_generator.AudioFeatureGenerator', 'AudioFeatureGenerator', (['settings'], {}), '(settings)\n', (395, 405), False, 'from mltk.core.preprocess.audio.audio_feature_generator import AudioFeatureGenerator\n'), ((419, 462), 'numpy.asarray', 'np.asarray', (['YES_INPUT_AUDIO'], {'dtype': 'np.int16'}), '(YES_INPUT_AUDIO, dtype=np.int16)\n', (429, 462), True, 'import numpy as np\n'), ((644, 677), 'numpy.allclose', 'np.allclose', (['calculated', 'expected'], {}), '(calculated, expected)\n', (655, 677), True, 'import numpy as np\n'), ((745, 776), 'mltk.core.preprocess.audio.audio_feature_generator.AudioFeatureGenerator', 'AudioFeatureGenerator', (['settings'], {}), '(settings)\n', (766, 776), False, 'from mltk.core.preprocess.audio.audio_feature_generator import AudioFeatureGenerator\n'), ((790, 832), 'numpy.asarray', 'np.asarray', (['NO_INPUT_AUDIO'], {'dtype': 'np.int16'}), '(NO_INPUT_AUDIO, dtype=np.int16)\n', (800, 832), True, 'import numpy as np\n'), ((1013, 1046), 'numpy.allclose', 'np.allclose', (['calculated', 'expected'], {}), '(calculated, expected)\n', (1024, 1046), True, 'import numpy as np\n'), ((553, 602), 'numpy.array', 'np.array', (['YES_OUTPUT_FEATURES_INT8'], {'dtype': 'np.int8'}), '(YES_OUTPUT_FEATURES_INT8, dtype=np.int8)\n', (561, 602), True, 'import numpy as np\n'), ((923, 971), 'numpy.array', 'np.array', (['NO_OUTPUT_FEATURES_INT8'], {'dtype': 'np.int8'}), '(NO_OUTPUT_FEATURES_INT8, dtype=np.int8)\n', (931, 971), True, 'import numpy as np\n')]
|
from nf_common_source.code.services.dataframe_service.dataframe_mergers import left_merge_dataframes
from uniclass_to_nf_ea_com_source.b_code.configurations.common_constants.uniclass_bclearer_constants import \
LINKED_TABLE_UNICLASS_ITEMS_TO_RANKS, UNICLASS_CLASSIFICATION_TYPE_OF_RELATION, OBJECT_NAME_COLUMN_NAME, \
UUID_COLUMN_NAME, RELATION_TYPE_UUIDS_COLUMN_NAME, RELATION_TYPE_NAMES_COLUMN_NAME
def add_uniclass_items_to_ranks_link_types_to_domain_tables(
evolve_8_domain_tables: dict) \
-> dict:
uniclass_top_level_core_objects = \
evolve_8_domain_tables[
'uniclass_top_level_core_objects']
linked_table_uniclass_items_to_ranks = \
evolve_8_domain_tables[
LINKED_TABLE_UNICLASS_ITEMS_TO_RANKS]
linked_table_uniclass_items_to_ranks[
RELATION_TYPE_NAMES_COLUMN_NAME] = \
UNICLASS_CLASSIFICATION_TYPE_OF_RELATION
linked_table_uniclass_items_to_ranks_with_link_type_uuids = \
left_merge_dataframes(
master_dataframe=linked_table_uniclass_items_to_ranks,
master_dataframe_key_columns=[RELATION_TYPE_NAMES_COLUMN_NAME],
merge_suffixes=['1', '2'],
foreign_key_dataframe=uniclass_top_level_core_objects,
foreign_key_dataframe_fk_columns=[OBJECT_NAME_COLUMN_NAME],
foreign_key_dataframe_other_column_rename_dictionary={
UUID_COLUMN_NAME: RELATION_TYPE_UUIDS_COLUMN_NAME})
evolve_8_domain_tables[
LINKED_TABLE_UNICLASS_ITEMS_TO_RANKS] = \
linked_table_uniclass_items_to_ranks_with_link_type_uuids
return \
evolve_8_domain_tables
|
[
"nf_common_source.code.services.dataframe_service.dataframe_mergers.left_merge_dataframes"
] |
[((988, 1402), 'nf_common_source.code.services.dataframe_service.dataframe_mergers.left_merge_dataframes', 'left_merge_dataframes', ([], {'master_dataframe': 'linked_table_uniclass_items_to_ranks', 'master_dataframe_key_columns': '[RELATION_TYPE_NAMES_COLUMN_NAME]', 'merge_suffixes': "['1', '2']", 'foreign_key_dataframe': 'uniclass_top_level_core_objects', 'foreign_key_dataframe_fk_columns': '[OBJECT_NAME_COLUMN_NAME]', 'foreign_key_dataframe_other_column_rename_dictionary': '{UUID_COLUMN_NAME: RELATION_TYPE_UUIDS_COLUMN_NAME}'}), "(master_dataframe=linked_table_uniclass_items_to_ranks,\n master_dataframe_key_columns=[RELATION_TYPE_NAMES_COLUMN_NAME],\n merge_suffixes=['1', '2'], foreign_key_dataframe=\n uniclass_top_level_core_objects, foreign_key_dataframe_fk_columns=[\n OBJECT_NAME_COLUMN_NAME],\n foreign_key_dataframe_other_column_rename_dictionary={UUID_COLUMN_NAME:\n RELATION_TYPE_UUIDS_COLUMN_NAME})\n", (1009, 1402), False, 'from nf_common_source.code.services.dataframe_service.dataframe_mergers import left_merge_dataframes\n')]
|
import torch
import torch.nn as nn
import torch.nn.functional as F
class kiunet3d(nn.Module): #
def __init__(self, c=4, n=16, num_classes=5):
super(kiunet3d, self).__init__()
# Entry flow
self.encoder1 = nn.Conv3d(c, n, kernel_size=3, padding=1, stride=1, bias=False) # H//2
self.encoder2 = nn.Conv3d(n, 2 * n, kernel_size=3, padding=1, stride=1, bias=False)
self.encoder3 = nn.Conv3d(2 * n, 4 * n, kernel_size=3, padding=1, stride=1, bias=False)
self.kencoder1 = nn.Conv3d(c, n, kernel_size=3, padding=1, stride=1, bias=False)
self.kencoder2 = nn.Conv3d(n, 2 * n, kernel_size=3, padding=1, stride=1, bias=False)
self.kencoder3 = nn.Conv3d(2 * n, 2 * n, kernel_size=3, padding=1, stride=1, bias=False)
self.downsample1 = nn.MaxPool3d(2, stride=2)
self.downsample2 = nn.MaxPool3d(2, stride=2)
self.downsample3 = nn.MaxPool3d(2, stride=2)
self.kdownsample1 = nn.MaxPool3d(2, stride=2)
self.kdownsample2 = nn.MaxPool3d(2, stride=2)
self.kdownsample3 = nn.MaxPool3d(2, stride=2)
self.upsample1 = nn.Upsample(scale_factor=2, mode='trilinear', align_corners=False) # H//8
self.upsample2 = nn.Upsample(scale_factor=2, mode='trilinear', align_corners=False) # H//4
self.upsample3 = nn.Upsample(scale_factor=2, mode='trilinear', align_corners=False) # H//2
self.kupsample1 = nn.Upsample(scale_factor=2, mode='trilinear', align_corners=False) # H//8
self.kupsample2 = nn.Upsample(scale_factor=2, mode='trilinear', align_corners=False) # H//4
self.kupsample3 = nn.Upsample(scale_factor=2, mode='trilinear', align_corners=False) # H//2
self.decoder1 = nn.Conv3d(4 * n, 2 * n, kernel_size=3, padding=1, stride=1, bias=False)
self.decoder2 = nn.Conv3d(2 * n, 2 * n, kernel_size=3, padding=1, stride=1, bias=False)
self.decoder3 = nn.Conv3d(2 * n, c, kernel_size=3, padding=1, stride=1, bias=False)
self.kdecoder1 = nn.Conv3d(2 * n, 2 * n, kernel_size=3, padding=1, stride=1, bias=False)
self.kdecoder2 = nn.Conv3d(2 * n, 2 * n, kernel_size=3, padding=1, stride=1, bias=False)
self.kdecoder3 = nn.Conv3d(2 * n, c, kernel_size=3, padding=1, stride=1, bias=False)
self.intere1_1 = nn.Conv3d(n, n, 3, stride=1, padding=1)
# self.inte1_1bn = nn.BatchNorm2d(16)
self.intere2_1 = nn.Conv3d(2 * n, 2 * n, 3, stride=1, padding=1)
# self.inte2_1bn = nn.BatchNorm2d(32)
self.intere3_1 = nn.Conv3d(2 * n, 4 * n, 3, stride=1, padding=1)
# self.inte3_1bn = nn.BatchNorm2d(64)
self.intere1_2 = nn.Conv3d(n, n, 3, stride=1, padding=1)
# self.inte1_2bn = nn.BatchNorm2d(16)
self.intere2_2 = nn.Conv3d(2 * n, 2 * n, 3, stride=1, padding=1)
# self.inte2_2bn = nn.BatchNorm2d(32)
self.intere3_2 = nn.Conv3d(4 * n, 2 * n, 3, stride=1, padding=1)
# self.inte3_2bn = nn.BatchNorm2d(64)
self.interd1_1 = nn.Conv3d(2 * n, 2 * n, 3, stride=1, padding=1)
# self.intd1_1bn = nn.BatchNorm2d(32)
self.interd2_1 = nn.Conv3d(2 * n, 2 * n, 3, stride=1, padding=1)
# self.intd2_1bn = nn.BatchNorm2d(16)
self.interd3_1 = nn.Conv3d(n, n, 3, stride=1, padding=1)
# self.intd3_1bn = nn.BatchNorm2d(64)
self.interd1_2 = nn.Conv3d(2 * n, 2 * n, 3, stride=1, padding=1)
# self.intd1_2bn = nn.BatchNorm2d(32)
self.interd2_2 = nn.Conv3d(2 * n, 2 * n, 3, stride=1, padding=1)
# self.intd2_2bn = nn.BatchNorm2d(16)
self.interd3_2 = nn.Conv3d(n, n, 3, stride=1, padding=1)
# self.intd3_2bn = nn.BatchNorm2d(64)
self.seg = nn.Conv3d(c, num_classes, kernel_size=1, padding=0, stride=1, bias=False)
self.softmax = nn.Softmax(dim=1)
# Initialization
for m in self.modules():
if isinstance(m, nn.Conv3d):
torch.nn.init.torch.nn.init.kaiming_normal_(m.weight) #
elif isinstance(m, nn.BatchNorm3d) or isinstance(m, nn.GroupNorm):
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
def forward(self, x):
# Encoder
out = F.relu(F.max_pool3d(self.encoder1(x), 2, 2)) # U-Net branch
out1 = F.relu(F.interpolate(self.kencoder1(x), scale_factor=2, mode='trilinear')) # Ki-Net branch
tmp = out
out = torch.add(out, F.interpolate(F.relu(self.intere1_1(out1)), scale_factor=0.25, mode='trilinear')) # CRFB
out1 = torch.add(out1, F.interpolate(F.relu(self.intere1_2(tmp)), scale_factor=4, mode='trilinear')) # CRFB
u1 = out # skip conn
o1 = out1 # skip conn
out = F.relu(F.max_pool3d(self.encoder2(out), 2, 2))
out1 = F.relu(F.interpolate(self.kencoder2(out1), scale_factor=2, mode='trilinear'))
tmp = out
out = torch.add(out, F.interpolate(F.relu(self.intere2_1(out1)), scale_factor=0.0625, mode='trilinear'))
out1 = torch.add(out1, F.interpolate(F.relu(self.intere2_2(tmp)), scale_factor=16, mode='trilinear'))
u2 = out
o2 = out1
out = F.relu(F.max_pool3d(self.encoder3(out), 2, 2))
out1 = F.relu(F.interpolate(self.kencoder3(out1), scale_factor=2, mode='trilinear'))
tmp = out
out = torch.add(out, F.interpolate(F.relu(self.intere3_1(out1)), scale_factor=0.015625, mode='trilinear'))
out1 = torch.add(out1, F.interpolate(F.relu(self.intere3_2(tmp)), scale_factor=64, mode='trilinear'))
### End of encoder block
### Start Decoder
out = F.relu(F.interpolate(self.decoder1(out), scale_factor=2, mode='trilinear')) # U-NET
out1 = F.relu(F.max_pool3d(self.kdecoder1(out1), 2, 2)) # Ki-NET
tmp = out
out = torch.add(out, F.interpolate(F.relu(self.interd1_1(out1)), scale_factor=0.0625, mode='trilinear'))
out1 = torch.add(out1, F.interpolate(F.relu(self.interd1_2(tmp)), scale_factor=16, mode='trilinear'))
out = torch.add(out, u2) # skip conn
out1 = torch.add(out1, o2) # skip conn
out = F.relu(F.interpolate(self.decoder2(out), scale_factor=2, mode='trilinear'))
out1 = F.relu(F.max_pool3d(self.kdecoder2(out1), 2, 2))
tmp = out
out = torch.add(out, F.interpolate(F.relu(self.interd2_1(out1)), scale_factor=0.25, mode='trilinear'))
out1 = torch.add(out1, F.interpolate(F.relu(self.interd2_2(tmp)), scale_factor=4, mode='trilinear'))
out = torch.add(out, u1)
out1 = torch.add(out1, o1)
out = F.relu(F.interpolate(self.decoder3(out), scale_factor=2, mode='trilinear'))
out1 = F.relu(F.max_pool3d(self.kdecoder3(out1), 2, 2))
out = torch.add(out, out1) # fusion of both branches
out = F.relu(self.seg(out)) # 1*1 conv
# out = self.soft(out)
return out
if __name__ == '__main__':
t1 = torch.rand(1, 1, 32, 32, 32)
net = kiunet3d(c=1, n=1, num_classes=2)
out = net(t1)
print(out.shape)
|
[
"torch.nn.Conv3d",
"torch.add",
"torch.nn.Upsample",
"torch.nn.init.constant_",
"torch.nn.Softmax",
"torch.rand",
"torch.nn.MaxPool3d",
"torch.nn.init.torch.nn.init.kaiming_normal_"
] |
[((6915, 6943), 'torch.rand', 'torch.rand', (['(1)', '(1)', '(32)', '(32)', '(32)'], {}), '(1, 1, 32, 32, 32)\n', (6925, 6943), False, 'import torch\n'), ((236, 299), 'torch.nn.Conv3d', 'nn.Conv3d', (['c', 'n'], {'kernel_size': '(3)', 'padding': '(1)', 'stride': '(1)', 'bias': '(False)'}), '(c, n, kernel_size=3, padding=1, stride=1, bias=False)\n', (245, 299), True, 'import torch.nn as nn\n'), ((332, 399), 'torch.nn.Conv3d', 'nn.Conv3d', (['n', '(2 * n)'], {'kernel_size': '(3)', 'padding': '(1)', 'stride': '(1)', 'bias': '(False)'}), '(n, 2 * n, kernel_size=3, padding=1, stride=1, bias=False)\n', (341, 399), True, 'import torch.nn as nn\n'), ((424, 495), 'torch.nn.Conv3d', 'nn.Conv3d', (['(2 * n)', '(4 * n)'], {'kernel_size': '(3)', 'padding': '(1)', 'stride': '(1)', 'bias': '(False)'}), '(2 * n, 4 * n, kernel_size=3, padding=1, stride=1, bias=False)\n', (433, 495), True, 'import torch.nn as nn\n'), ((522, 585), 'torch.nn.Conv3d', 'nn.Conv3d', (['c', 'n'], {'kernel_size': '(3)', 'padding': '(1)', 'stride': '(1)', 'bias': '(False)'}), '(c, n, kernel_size=3, padding=1, stride=1, bias=False)\n', (531, 585), True, 'import torch.nn as nn\n'), ((611, 678), 'torch.nn.Conv3d', 'nn.Conv3d', (['n', '(2 * n)'], {'kernel_size': '(3)', 'padding': '(1)', 'stride': '(1)', 'bias': '(False)'}), '(n, 2 * n, kernel_size=3, padding=1, stride=1, bias=False)\n', (620, 678), True, 'import torch.nn as nn\n'), ((704, 775), 'torch.nn.Conv3d', 'nn.Conv3d', (['(2 * n)', '(2 * n)'], {'kernel_size': '(3)', 'padding': '(1)', 'stride': '(1)', 'bias': '(False)'}), '(2 * n, 2 * n, kernel_size=3, padding=1, stride=1, bias=False)\n', (713, 775), True, 'import torch.nn as nn\n'), ((804, 829), 'torch.nn.MaxPool3d', 'nn.MaxPool3d', (['(2)'], {'stride': '(2)'}), '(2, stride=2)\n', (816, 829), True, 'import torch.nn as nn\n'), ((857, 882), 'torch.nn.MaxPool3d', 'nn.MaxPool3d', (['(2)'], {'stride': '(2)'}), '(2, stride=2)\n', (869, 882), True, 'import torch.nn as nn\n'), ((910, 935), 'torch.nn.MaxPool3d', 'nn.MaxPool3d', (['(2)'], {'stride': '(2)'}), '(2, stride=2)\n', (922, 935), True, 'import torch.nn as nn\n'), ((964, 989), 'torch.nn.MaxPool3d', 'nn.MaxPool3d', (['(2)'], {'stride': '(2)'}), '(2, stride=2)\n', (976, 989), True, 'import torch.nn as nn\n'), ((1018, 1043), 'torch.nn.MaxPool3d', 'nn.MaxPool3d', (['(2)'], {'stride': '(2)'}), '(2, stride=2)\n', (1030, 1043), True, 'import torch.nn as nn\n'), ((1072, 1097), 'torch.nn.MaxPool3d', 'nn.MaxPool3d', (['(2)'], {'stride': '(2)'}), '(2, stride=2)\n', (1084, 1097), True, 'import torch.nn as nn\n'), ((1124, 1190), 'torch.nn.Upsample', 'nn.Upsample', ([], {'scale_factor': '(2)', 'mode': '"""trilinear"""', 'align_corners': '(False)'}), "(scale_factor=2, mode='trilinear', align_corners=False)\n", (1135, 1190), True, 'import torch.nn as nn\n'), ((1224, 1290), 'torch.nn.Upsample', 'nn.Upsample', ([], {'scale_factor': '(2)', 'mode': '"""trilinear"""', 'align_corners': '(False)'}), "(scale_factor=2, mode='trilinear', align_corners=False)\n", (1235, 1290), True, 'import torch.nn as nn\n'), ((1324, 1390), 'torch.nn.Upsample', 'nn.Upsample', ([], {'scale_factor': '(2)', 'mode': '"""trilinear"""', 'align_corners': '(False)'}), "(scale_factor=2, mode='trilinear', align_corners=False)\n", (1335, 1390), True, 'import torch.nn as nn\n'), ((1425, 1491), 'torch.nn.Upsample', 'nn.Upsample', ([], {'scale_factor': '(2)', 'mode': '"""trilinear"""', 'align_corners': '(False)'}), "(scale_factor=2, mode='trilinear', align_corners=False)\n", (1436, 1491), True, 'import torch.nn as nn\n'), ((1526, 1592), 'torch.nn.Upsample', 'nn.Upsample', ([], {'scale_factor': '(2)', 'mode': '"""trilinear"""', 'align_corners': '(False)'}), "(scale_factor=2, mode='trilinear', align_corners=False)\n", (1537, 1592), True, 'import torch.nn as nn\n'), ((1627, 1693), 'torch.nn.Upsample', 'nn.Upsample', ([], {'scale_factor': '(2)', 'mode': '"""trilinear"""', 'align_corners': '(False)'}), "(scale_factor=2, mode='trilinear', align_corners=False)\n", (1638, 1693), True, 'import torch.nn as nn\n'), ((1727, 1798), 'torch.nn.Conv3d', 'nn.Conv3d', (['(4 * n)', '(2 * n)'], {'kernel_size': '(3)', 'padding': '(1)', 'stride': '(1)', 'bias': '(False)'}), '(4 * n, 2 * n, kernel_size=3, padding=1, stride=1, bias=False)\n', (1736, 1798), True, 'import torch.nn as nn\n'), ((1823, 1894), 'torch.nn.Conv3d', 'nn.Conv3d', (['(2 * n)', '(2 * n)'], {'kernel_size': '(3)', 'padding': '(1)', 'stride': '(1)', 'bias': '(False)'}), '(2 * n, 2 * n, kernel_size=3, padding=1, stride=1, bias=False)\n', (1832, 1894), True, 'import torch.nn as nn\n'), ((1919, 1986), 'torch.nn.Conv3d', 'nn.Conv3d', (['(2 * n)', 'c'], {'kernel_size': '(3)', 'padding': '(1)', 'stride': '(1)', 'bias': '(False)'}), '(2 * n, c, kernel_size=3, padding=1, stride=1, bias=False)\n', (1928, 1986), True, 'import torch.nn as nn\n'), ((2012, 2083), 'torch.nn.Conv3d', 'nn.Conv3d', (['(2 * n)', '(2 * n)'], {'kernel_size': '(3)', 'padding': '(1)', 'stride': '(1)', 'bias': '(False)'}), '(2 * n, 2 * n, kernel_size=3, padding=1, stride=1, bias=False)\n', (2021, 2083), True, 'import torch.nn as nn\n'), ((2109, 2180), 'torch.nn.Conv3d', 'nn.Conv3d', (['(2 * n)', '(2 * n)'], {'kernel_size': '(3)', 'padding': '(1)', 'stride': '(1)', 'bias': '(False)'}), '(2 * n, 2 * n, kernel_size=3, padding=1, stride=1, bias=False)\n', (2118, 2180), True, 'import torch.nn as nn\n'), ((2206, 2273), 'torch.nn.Conv3d', 'nn.Conv3d', (['(2 * n)', 'c'], {'kernel_size': '(3)', 'padding': '(1)', 'stride': '(1)', 'bias': '(False)'}), '(2 * n, c, kernel_size=3, padding=1, stride=1, bias=False)\n', (2215, 2273), True, 'import torch.nn as nn\n'), ((2300, 2339), 'torch.nn.Conv3d', 'nn.Conv3d', (['n', 'n', '(3)'], {'stride': '(1)', 'padding': '(1)'}), '(n, n, 3, stride=1, padding=1)\n', (2309, 2339), True, 'import torch.nn as nn\n'), ((2411, 2458), 'torch.nn.Conv3d', 'nn.Conv3d', (['(2 * n)', '(2 * n)', '(3)'], {'stride': '(1)', 'padding': '(1)'}), '(2 * n, 2 * n, 3, stride=1, padding=1)\n', (2420, 2458), True, 'import torch.nn as nn\n'), ((2530, 2577), 'torch.nn.Conv3d', 'nn.Conv3d', (['(2 * n)', '(4 * n)', '(3)'], {'stride': '(1)', 'padding': '(1)'}), '(2 * n, 4 * n, 3, stride=1, padding=1)\n', (2539, 2577), True, 'import torch.nn as nn\n'), ((2650, 2689), 'torch.nn.Conv3d', 'nn.Conv3d', (['n', 'n', '(3)'], {'stride': '(1)', 'padding': '(1)'}), '(n, n, 3, stride=1, padding=1)\n', (2659, 2689), True, 'import torch.nn as nn\n'), ((2761, 2808), 'torch.nn.Conv3d', 'nn.Conv3d', (['(2 * n)', '(2 * n)', '(3)'], {'stride': '(1)', 'padding': '(1)'}), '(2 * n, 2 * n, 3, stride=1, padding=1)\n', (2770, 2808), True, 'import torch.nn as nn\n'), ((2880, 2927), 'torch.nn.Conv3d', 'nn.Conv3d', (['(4 * n)', '(2 * n)', '(3)'], {'stride': '(1)', 'padding': '(1)'}), '(4 * n, 2 * n, 3, stride=1, padding=1)\n', (2889, 2927), True, 'import torch.nn as nn\n'), ((3000, 3047), 'torch.nn.Conv3d', 'nn.Conv3d', (['(2 * n)', '(2 * n)', '(3)'], {'stride': '(1)', 'padding': '(1)'}), '(2 * n, 2 * n, 3, stride=1, padding=1)\n', (3009, 3047), True, 'import torch.nn as nn\n'), ((3119, 3166), 'torch.nn.Conv3d', 'nn.Conv3d', (['(2 * n)', '(2 * n)', '(3)'], {'stride': '(1)', 'padding': '(1)'}), '(2 * n, 2 * n, 3, stride=1, padding=1)\n', (3128, 3166), True, 'import torch.nn as nn\n'), ((3238, 3277), 'torch.nn.Conv3d', 'nn.Conv3d', (['n', 'n', '(3)'], {'stride': '(1)', 'padding': '(1)'}), '(n, n, 3, stride=1, padding=1)\n', (3247, 3277), True, 'import torch.nn as nn\n'), ((3350, 3397), 'torch.nn.Conv3d', 'nn.Conv3d', (['(2 * n)', '(2 * n)', '(3)'], {'stride': '(1)', 'padding': '(1)'}), '(2 * n, 2 * n, 3, stride=1, padding=1)\n', (3359, 3397), True, 'import torch.nn as nn\n'), ((3469, 3516), 'torch.nn.Conv3d', 'nn.Conv3d', (['(2 * n)', '(2 * n)', '(3)'], {'stride': '(1)', 'padding': '(1)'}), '(2 * n, 2 * n, 3, stride=1, padding=1)\n', (3478, 3516), True, 'import torch.nn as nn\n'), ((3588, 3627), 'torch.nn.Conv3d', 'nn.Conv3d', (['n', 'n', '(3)'], {'stride': '(1)', 'padding': '(1)'}), '(n, n, 3, stride=1, padding=1)\n', (3597, 3627), True, 'import torch.nn as nn\n'), ((3694, 3767), 'torch.nn.Conv3d', 'nn.Conv3d', (['c', 'num_classes'], {'kernel_size': '(1)', 'padding': '(0)', 'stride': '(1)', 'bias': '(False)'}), '(c, num_classes, kernel_size=1, padding=0, stride=1, bias=False)\n', (3703, 3767), True, 'import torch.nn as nn\n'), ((3792, 3809), 'torch.nn.Softmax', 'nn.Softmax', ([], {'dim': '(1)'}), '(dim=1)\n', (3802, 3809), True, 'import torch.nn as nn\n'), ((6018, 6036), 'torch.add', 'torch.add', (['out', 'u2'], {}), '(out, u2)\n', (6027, 6036), False, 'import torch\n'), ((6065, 6084), 'torch.add', 'torch.add', (['out1', 'o2'], {}), '(out1, o2)\n', (6074, 6084), False, 'import torch\n'), ((6506, 6524), 'torch.add', 'torch.add', (['out', 'u1'], {}), '(out, u1)\n', (6515, 6524), False, 'import torch\n'), ((6540, 6559), 'torch.add', 'torch.add', (['out1', 'o1'], {}), '(out1, o1)\n', (6549, 6559), False, 'import torch\n'), ((6730, 6750), 'torch.add', 'torch.add', (['out', 'out1'], {}), '(out, out1)\n', (6739, 6750), False, 'import torch\n'), ((3926, 3979), 'torch.nn.init.torch.nn.init.kaiming_normal_', 'torch.nn.init.torch.nn.init.kaiming_normal_', (['m.weight'], {}), '(m.weight)\n', (3969, 3979), False, 'import torch\n'), ((4078, 4108), 'torch.nn.init.constant_', 'nn.init.constant_', (['m.weight', '(1)'], {}), '(m.weight, 1)\n', (4095, 4108), True, 'import torch.nn as nn\n'), ((4125, 4153), 'torch.nn.init.constant_', 'nn.init.constant_', (['m.bias', '(0)'], {}), '(m.bias, 0)\n', (4142, 4153), True, 'import torch.nn as nn\n')]
|
import os
import subprocess
from pyramid.static import PathSegmentMd5CacheBuster
class GitCacheBuster(PathSegmentMd5CacheBuster):
"""
Assuming your code is installed as a Git checkout, as opposed to as an
egg from an egg repository like PYPI, you can use this cachebuster to
get the current commit's SHA1 to use as the cache bust token.
"""
def __init__(self):
here = os.path.dirname(os.path.abspath(__file__))
self.sha1 = subprocess.check_output(
['git', 'rev-parse', 'HEAD'],
cwd=here).strip()
def token(self, pathspec):
return self.sha1
|
[
"os.path.abspath",
"subprocess.check_output"
] |
[((418, 443), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (433, 443), False, 'import os\n'), ((465, 528), 'subprocess.check_output', 'subprocess.check_output', (["['git', 'rev-parse', 'HEAD']"], {'cwd': 'here'}), "(['git', 'rev-parse', 'HEAD'], cwd=here)\n", (488, 528), False, 'import subprocess\n')]
|
#!/usr/bin/env python3
"""
Implementation of building LSTM model
"""
# 3rd party imports
import pandas as pd
import numpy as np
import random as rn
import datetime as datetime
# model
import tensorflow as tf
from keras.models import Sequential
from keras.layers import LSTM
from keras.layers import Dense
from keras.losses import MeanSquaredLogarithmicError
from sklearn.model_selection import train_test_split
# local imports
from db.model import db, Stat
class Lstm:
def extract(self) -> pd.DataFrame:
"""
extract data from database and output into dataframe
"""
# grab all record from stat table
df = pd.read_sql_table("stat", "sqlite:///db/site.db")
# return
return df
def transform(self, df: pd.DataFrame) -> pd.DataFrame:
"""
transforms done to dataframe:
- calculate the difference of each metric
- onehotencode countries
"""
# get diff
df["confirmed_diff"] = np.where(
df.country == df.country.shift(), df.confirmed - df.confirmed.shift(), 0
)
df["recovered_diff"] = np.where(
df.country == df.country.shift(), df.recovered - df.recovered.shift(), 0
)
df["deaths_diff"] = np.where(
df.country == df.country.shift(), df.deaths - df.deaths.shift(), 0
)
# encode country with pd.dummies
dummies = pd.get_dummies(df.country)
dummies["id"] = df.id
df = pd.merge(df, dummies, on=["id"])
# return
return df
def load(
self,
df: pd.DataFrame,
metric="confirmed",
win_size=7,
epochs=5,
batch_size=32,
save=False,
) -> Sequential:
"""
load dataframe into sequential
"""
# variables
x, y = [], []
countries = db.session.query(Stat.country).distinct().all()
# countries come in the form of [('Afghanistan',), ('Albania',), ... ]
for (country,) in countries:
country_df = df[df.country == country]
series = list(country_df[metric])
for i in range(0, len(series) - win_size):
end = i + win_size
series_x, series_y = series[i:end], series[end]
if series_y:
x.append(series_x)
y.append(series_y)
X, y = np.array(x), np.array(y)
# TTS
X_train, X_val, y_train, y_val = train_test_split(
X, y, test_size=0.2, random_state=42
)
# preprocess
X_train = X_train.reshape(X_train.shape[0], 1, X_train.shape[1])
X_val = X_val.reshape(X_val.shape[0], 1, X_val.shape[1])
# build model
model = Sequential()
model.add(
LSTM(
100,
activation="relu",
input_shape=(1, win_size),
return_sequences=True,
)
)
model.add(LSTM(150, activation="relu"))
model.add(Dense(1, activation="relu"))
# Compile Model
model.compile(optimizer="adam", loss=MeanSquaredLogarithmicError())
# Fit Model
model.fit(
X_train,
y_train,
epochs=epochs,
batch_size=batch_size,
validation_data=(X_val, y_val),
verbose=2,
shuffle=True,
)
# Export Model
if save:
model.save("lstm_model.h5")
def main():
"""
run code
"""
# Set random state for Keras
np.random.seed(42)
rn.seed(12345)
# build model and save it
model = Lstm()
df = model.extract()
df = model.transform(df)
lstm = model.load(df, save=True)
if __name__ == "__main__":
main()
|
[
"numpy.random.seed",
"pandas.get_dummies",
"pandas.merge",
"sklearn.model_selection.train_test_split",
"keras.layers.LSTM",
"keras.layers.Dense",
"random.seed",
"numpy.array",
"keras.losses.MeanSquaredLogarithmicError",
"db.model.db.session.query",
"pandas.read_sql_table",
"keras.models.Sequential"
] |
[((3581, 3599), 'numpy.random.seed', 'np.random.seed', (['(42)'], {}), '(42)\n', (3595, 3599), True, 'import numpy as np\n'), ((3604, 3618), 'random.seed', 'rn.seed', (['(12345)'], {}), '(12345)\n', (3611, 3618), True, 'import random as rn\n'), ((652, 701), 'pandas.read_sql_table', 'pd.read_sql_table', (['"""stat"""', '"""sqlite:///db/site.db"""'], {}), "('stat', 'sqlite:///db/site.db')\n", (669, 701), True, 'import pandas as pd\n'), ((1421, 1447), 'pandas.get_dummies', 'pd.get_dummies', (['df.country'], {}), '(df.country)\n', (1435, 1447), True, 'import pandas as pd\n'), ((1491, 1523), 'pandas.merge', 'pd.merge', (['df', 'dummies'], {'on': "['id']"}), "(df, dummies, on=['id'])\n", (1499, 1523), True, 'import pandas as pd\n'), ((2489, 2543), 'sklearn.model_selection.train_test_split', 'train_test_split', (['X', 'y'], {'test_size': '(0.2)', 'random_state': '(42)'}), '(X, y, test_size=0.2, random_state=42)\n', (2505, 2543), False, 'from sklearn.model_selection import train_test_split\n'), ((2765, 2777), 'keras.models.Sequential', 'Sequential', ([], {}), '()\n', (2775, 2777), False, 'from keras.models import Sequential\n'), ((2408, 2419), 'numpy.array', 'np.array', (['x'], {}), '(x)\n', (2416, 2419), True, 'import numpy as np\n'), ((2421, 2432), 'numpy.array', 'np.array', (['y'], {}), '(y)\n', (2429, 2432), True, 'import numpy as np\n'), ((2809, 2887), 'keras.layers.LSTM', 'LSTM', (['(100)'], {'activation': '"""relu"""', 'input_shape': '(1, win_size)', 'return_sequences': '(True)'}), "(100, activation='relu', input_shape=(1, win_size), return_sequences=True)\n", (2813, 2887), False, 'from keras.layers import LSTM\n'), ((2995, 3023), 'keras.layers.LSTM', 'LSTM', (['(150)'], {'activation': '"""relu"""'}), "(150, activation='relu')\n", (2999, 3023), False, 'from keras.layers import LSTM\n'), ((3043, 3070), 'keras.layers.Dense', 'Dense', (['(1)'], {'activation': '"""relu"""'}), "(1, activation='relu')\n", (3048, 3070), False, 'from keras.layers import Dense\n'), ((3142, 3171), 'keras.losses.MeanSquaredLogarithmicError', 'MeanSquaredLogarithmicError', ([], {}), '()\n', (3169, 3171), False, 'from keras.losses import MeanSquaredLogarithmicError\n'), ((1870, 1900), 'db.model.db.session.query', 'db.session.query', (['Stat.country'], {}), '(Stat.country)\n', (1886, 1900), False, 'from db.model import db, Stat\n')]
|
#!/usr/bin/python3
import subprocess
from re import search as re_compile
from sys import stderr
class DHCP():
def __init__(self):
self.leases = []
def fetch(self):
#TODO: This relies on dnsmasq, while there are multiple DHCP server implementation within OpenWrt (mainly odhcpd)
self.leases = []
lines = open("/tmp/dhcp.leases", "r").readlines()
for line in lines:
if not line.strip():
continue
split = line.split(' ')
self.leases.append({'mac': split[1], 'ip': split[2], 'name': split[3]})
class FStools():
def __init__(self):
self.result = ''
def fetch(self):
proc = subprocess.Popen("/sbin/block info", stdout=subprocess.PIPE, shell=True)
self.result = proc.communicate()[0].decode("utf-8")
def mounted(self, fslabel=None, uuid=None, device=None):
if not (fslabel or uuid or device):
raise Exception("must provide either fslabel or uuid or device")
if fslabel:
#return True if re_compile("LABEL=\"(%s)\"" % fslabel, self.result) else False
return True if re_compile('LABEL="(%s) *.?".*MOUNT="(.*)"' % fslabel, self.result) else False
if uuid:
return True if re_compile("UUID=\"(%s)\".*MOUNT=\"(.*)\"" % uuid, self.result) else False
if device:
return True if re_compile("^(%s): .*MOUNT=\"(.*)\"" % uuid, self.result) else False
|
[
"subprocess.Popen",
"re.search"
] |
[((697, 769), 'subprocess.Popen', 'subprocess.Popen', (['"""/sbin/block info"""'], {'stdout': 'subprocess.PIPE', 'shell': '(True)'}), "('/sbin/block info', stdout=subprocess.PIPE, shell=True)\n", (713, 769), False, 'import subprocess\n'), ((1152, 1219), 're.search', 're_compile', (['(\'LABEL="(%s) *.?".*MOUNT="(.*)"\' % fslabel)', 'self.result'], {}), '(\'LABEL="(%s) *.?".*MOUNT="(.*)"\' % fslabel, self.result)\n', (1162, 1219), True, 'from re import search as re_compile\n'), ((1276, 1335), 're.search', 're_compile', (['(\'UUID="(%s)".*MOUNT="(.*)"\' % uuid)', 'self.result'], {}), '(\'UUID="(%s)".*MOUNT="(.*)"\' % uuid, self.result)\n', (1286, 1335), True, 'from re import search as re_compile\n'), ((1398, 1453), 're.search', 're_compile', (['(\'^(%s): .*MOUNT="(.*)"\' % uuid)', 'self.result'], {}), '(\'^(%s): .*MOUNT="(.*)"\' % uuid, self.result)\n', (1408, 1453), True, 'from re import search as re_compile\n')]
|
# -*- coding: utf-8 -*-
"""
Created on Thu May 9 13:46:08 2019
@author: <NAME>
"""
from binomialTreePricer import asianOptionBinomialTree
import pandas as pd
import numpy as np
from datetime import datetime, timedelta
uly_names = ['Crude Oil WTI', 'Ethanol', 'Gold', 'Silver', 'Natural Gas']
uly_init = df_uly[uly_names].tail(1)
df_opt['bdays'] = 1 + np.busday_count(df_opt['Start Date'].values.astype('datetime64[D]'), df_opt['Maturity Date'].values.astype('datetime64[D]'))
df_uly_vol = df_uly[uly_names].std(skipna=True)
oneOverRho = 3
df_vols = pd.DataFrame([[0.3, 0.01, 0.4, 0.1, 0.001]], columns = uly_names)
df_units = pd.DataFrame([[0.01, 0.0001, 1, 0.001, 0.01]], columns = uly_names)
bdays_year = 252
# =============================================================================
# Define risk free rate, reference to US treasury yield curve as of 20190322
# https://www.treasury.gov/resource-center/data-chart-center/interest-rates/pages/TextView.aspx?data=yieldYear&year=2019
# 1m, 2m, 3m, 6m, 1y, 2y, 3y, 5y, 7y, 10y, 20y, 30y
# =============================================================================
# Define risk free rate according to US
yieldCurveDict = {
'2019-04-22': 2.49,
'2019-05-22': 2.48,
'2019-06-22': 2.46,
'2019-09-22': 2.48,
'2020-03-22': 2.45,
'2021-03-22': 2.31,
'2022-03-22': 2.24,
'2024-03-22': 2.24,
'2026-03-22': 2.34,
'2029-03-22': 2.44,
'2039-03-22': 2.69,
'2049-03-22': 2.88
}
# Derive forward rates from US treasury yield curve
curvePoints = ['2019-03-22'] + list(yieldCurveDict.keys())
forwardCurveDict = {}
for i in range(len(yieldCurveDict)):
datePoint1 = curvePoints[i]
datePoint2 = curvePoints[i + 1]
if (datePoint1 == curvePoints[0]):
forwardCurveDict[datePoint2] = yieldCurveDict[datePoint2]
else:
yieldAtDate1 = yieldCurveDict[datePoint1]
yieldAtDate2 = yieldCurveDict[datePoint2]
busDateDiff1 = np.busday_count(curvePoints[0], datePoint1)
busDateDiff2 = np.busday_count(curvePoints[0], datePoint2)
forwardCurveDict[datePoint2] = float((yieldAtDate2 * busDateDiff2 - yieldAtDate1 * busDateDiff1) / (busDateDiff2 - busDateDiff1))
# Function to get risk free rate given a date (datetime.date object)
def getRiskFreeRate(inputDate):
input_date = inputDate.date()
for i in range(len(forwardCurveDict)):
datePoint1 = datetime.strptime(curvePoints[i],'%Y-%m-%d').date()
datePoint2 = datetime.strptime(curvePoints[i + 1],'%Y-%m-%d').date()
if (input_date >= datePoint1 and input_date < datePoint2):
return forwardCurveDict[curvePoints[i + 1]]
return 0
for row in df_opt.index:
# Retrieve the name of the underlying
tmp_uly = df_opt['Underlying'][row][:-8]
tmp_strike = df_opt['Strike'][row]
tmp_maturity = df_opt['Maturity Date'][row]
tmp_steps = df_opt['bdays'][row]
if tmp_steps > bdays_year:
tmp_steps = bdays_year
tmp_init = uly_init[tmp_uly][0]
tmp_time_period = 1 / bdays_year
tmp_vol = df_uly_vol[tmp_uly]
tmp_ir = get_interest_rate(tmp_steps)
tmp_rates = [getRiskFreeRate(tmp_maturity - timedelta(d)) for d in range(tmp_steps)]
tmp_call = df_opt['Call'][row]
tmp_unit = df_units[tmp_uly][0]
pricer = asianOptionBinomialTree(tmp_steps, tmp_vol, tmp_time_period, oneOverRho, tmp_rates)
sim = pricer.getOptionPrice(tmp_init, tmp_strike * tmp_unit)
print('undeylying: %s; bdays: %d, strile: %6.3f, init: %6.3f --> simulate: %6.3f; actual call: %6.3f' \
% (tmp_uly, tmp_steps, tmp_strike* tmp_unit, tmp_init, sim, tmp_call))
|
[
"pandas.DataFrame",
"binomialTreePricer.asianOptionBinomialTree",
"datetime.datetime.strptime",
"datetime.timedelta",
"numpy.busday_count"
] |
[((555, 618), 'pandas.DataFrame', 'pd.DataFrame', (['[[0.3, 0.01, 0.4, 0.1, 0.001]]'], {'columns': 'uly_names'}), '([[0.3, 0.01, 0.4, 0.1, 0.001]], columns=uly_names)\n', (567, 618), True, 'import pandas as pd\n'), ((632, 697), 'pandas.DataFrame', 'pd.DataFrame', (['[[0.01, 0.0001, 1, 0.001, 0.01]]'], {'columns': 'uly_names'}), '([[0.01, 0.0001, 1, 0.001, 0.01]], columns=uly_names)\n', (644, 697), True, 'import pandas as pd\n'), ((3360, 3447), 'binomialTreePricer.asianOptionBinomialTree', 'asianOptionBinomialTree', (['tmp_steps', 'tmp_vol', 'tmp_time_period', 'oneOverRho', 'tmp_rates'], {}), '(tmp_steps, tmp_vol, tmp_time_period, oneOverRho,\n tmp_rates)\n', (3383, 3447), False, 'from binomialTreePricer import asianOptionBinomialTree\n'), ((2014, 2057), 'numpy.busday_count', 'np.busday_count', (['curvePoints[0]', 'datePoint1'], {}), '(curvePoints[0], datePoint1)\n', (2029, 2057), True, 'import numpy as np\n'), ((2081, 2124), 'numpy.busday_count', 'np.busday_count', (['curvePoints[0]', 'datePoint2'], {}), '(curvePoints[0], datePoint2)\n', (2096, 2124), True, 'import numpy as np\n'), ((2463, 2508), 'datetime.datetime.strptime', 'datetime.strptime', (['curvePoints[i]', '"""%Y-%m-%d"""'], {}), "(curvePoints[i], '%Y-%m-%d')\n", (2480, 2508), False, 'from datetime import datetime, timedelta\n'), ((2536, 2585), 'datetime.datetime.strptime', 'datetime.strptime', (['curvePoints[i + 1]', '"""%Y-%m-%d"""'], {}), "(curvePoints[i + 1], '%Y-%m-%d')\n", (2553, 2585), False, 'from datetime import datetime, timedelta\n'), ((3225, 3237), 'datetime.timedelta', 'timedelta', (['d'], {}), '(d)\n', (3234, 3237), False, 'from datetime import datetime, timedelta\n')]
|
from django.conf import settings
from django.core.exceptions import ValidationError
from mighty.applications.shop.apps import cards_test, ShopConfig
import re
class CBModel:
@property
def readable_cb(self):
return ' '.join([self.cb[i:i+4] for i in range(0, len(self.cb), 4)])
@property
def str_cb(self):
return "%s %s %s/%s" % (self.readable_cb, self.cvc, self.date_valid.month, self.date_valid.year)
@property
def mask_cb(self):
cb = self.readable_cb[0:4]+re.sub(r"\d", '*', self.readable_cb[4:-4])+self.readable_cb[-4:]
return "%s %s %s/%s" % (cb, self.cvc, self.date_valid.month, str(self.date_valid.year)[-2:])
def sum_digits(self, digit):
if digit < 10:
return digit
else:
sum = (digit % 10) + (digit // 10)
return sum
def validate_luhn(self, cc_num):
if settings.DEBUG and cc_num in cards_test():
return True
cc_num = cc_num[::-1]
cc_num = [int(x) for x in cc_num]
doubled_second_digit_list = list()
digits = list(enumerate(cc_num, start=1))
for index, digit in digits:
if index % 2 == 0:
doubled_second_digit_list.append(digit * 2)
else:
doubled_second_digit_list.append(digit)
doubled_second_digit_list = [self.sum_digits(x) for x in doubled_second_digit_list]
sum_of_digits = sum(doubled_second_digit_list)
return sum_of_digits % 10 == 0
@property
def is_valid_cvc(self):
return self.cvc
@property
def is_exist_cb(self):
if ShopConfig.subscription_for == 'group':
qs = type(self).objects.filter(cvc=self.cvc, cb=self.cb, date_valid=self.date_valid, group=self.group)
else:
qs = type(self).objects.filter(cvc=self.cvc, cb=self.cb, date_valid=self.date_valid, user=self.user)
if self.pk: qs = qs.exclude(pk=self.pk)
return False if qs.exists() else True
@property
def is_valid_cb(self):
if not self.cb:
raise ValidationError(code='invalid_iban', message='invalid CB')
self.cb = re.sub(r"\s+", "", self.cb, flags=re.UNICODE)
if not self.is_valid_date:
raise ValidationError(code='invalid_date', message='invalid date')
if not self.cb or not self.validate_luhn(self.cb):
raise ValidationError(code='invalid_number', message='invalid number')
if not self.is_valid_cvc:
raise ValidationError(code='invalid_cvc', message='invalid cvc')
if not self.is_exist_cb:
raise ValidationError(code='already_cb', message='CB already exist')
|
[
"re.sub",
"django.core.exceptions.ValidationError",
"mighty.applications.shop.apps.cards_test"
] |
[((2165, 2210), 're.sub', 're.sub', (['"""\\\\s+"""', '""""""', 'self.cb'], {'flags': 're.UNICODE'}), "('\\\\s+', '', self.cb, flags=re.UNICODE)\n", (2171, 2210), False, 'import re\n'), ((2088, 2146), 'django.core.exceptions.ValidationError', 'ValidationError', ([], {'code': '"""invalid_iban"""', 'message': '"""invalid CB"""'}), "(code='invalid_iban', message='invalid CB')\n", (2103, 2146), False, 'from django.core.exceptions import ValidationError\n'), ((2264, 2324), 'django.core.exceptions.ValidationError', 'ValidationError', ([], {'code': '"""invalid_date"""', 'message': '"""invalid date"""'}), "(code='invalid_date', message='invalid date')\n", (2279, 2324), False, 'from django.core.exceptions import ValidationError\n'), ((2402, 2466), 'django.core.exceptions.ValidationError', 'ValidationError', ([], {'code': '"""invalid_number"""', 'message': '"""invalid number"""'}), "(code='invalid_number', message='invalid number')\n", (2417, 2466), False, 'from django.core.exceptions import ValidationError\n'), ((2519, 2577), 'django.core.exceptions.ValidationError', 'ValidationError', ([], {'code': '"""invalid_cvc"""', 'message': '"""invalid cvc"""'}), "(code='invalid_cvc', message='invalid cvc')\n", (2534, 2577), False, 'from django.core.exceptions import ValidationError\n'), ((2629, 2691), 'django.core.exceptions.ValidationError', 'ValidationError', ([], {'code': '"""already_cb"""', 'message': '"""CB already exist"""'}), "(code='already_cb', message='CB already exist')\n", (2644, 2691), False, 'from django.core.exceptions import ValidationError\n'), ((508, 550), 're.sub', 're.sub', (['"""\\\\d"""', '"""*"""', 'self.readable_cb[4:-4]'], {}), "('\\\\d', '*', self.readable_cb[4:-4])\n", (514, 550), False, 'import re\n'), ((918, 930), 'mighty.applications.shop.apps.cards_test', 'cards_test', ([], {}), '()\n', (928, 930), False, 'from mighty.applications.shop.apps import cards_test, ShopConfig\n')]
|
import numpy as np
import pytest
import xarray as xr
from pyomeca import Analogs, Markers, Angles, Rototrans
from ._constants import ANALOGS_DATA, MARKERS_DATA, EXPECTED_VALUES
from .utils import is_expected_array
def test_analogs_creation():
dims = ("channel", "time")
array = Analogs()
np.testing.assert_array_equal(x=array, y=xr.DataArray())
assert array.dims == dims
array = Analogs(ANALOGS_DATA.values)
is_expected_array(array, **EXPECTED_VALUES[56])
size = 10, 100
array = Analogs.from_random_data(size=size)
assert array.shape == size
assert array.dims == dims
with pytest.raises(ValueError):
Analogs(MARKERS_DATA)
def test_markers_creation():
dims = ("axis", "channel", "time")
array = Markers()
np.testing.assert_array_equal(x=array, y=xr.DataArray())
assert array.dims == dims
array = Markers(MARKERS_DATA.values)
is_expected_array(array, **EXPECTED_VALUES[57])
size = 3, 10, 100
array = Markers.from_random_data(size=size)
assert array.shape == (4, size[1], size[2])
assert array.dims == dims
with pytest.raises(ValueError):
Markers(ANALOGS_DATA)
def test_angles_creation():
dims = ("axis", "channel", "time")
array = Angles()
np.testing.assert_array_equal(x=array, y=xr.DataArray())
assert array.dims == dims
array = Angles(MARKERS_DATA.values, time=MARKERS_DATA.time)
is_expected_array(array, **EXPECTED_VALUES[57])
size = 10, 10, 100
array = Angles.from_random_data(size=size)
assert array.shape == size
assert array.dims == dims
with pytest.raises(ValueError):
Angles(ANALOGS_DATA)
def test_rototrans_creation():
dims = ("row", "col", "time")
array = Rototrans()
np.testing.assert_array_equal(x=array, y=xr.DataArray(np.eye(4)[..., np.newaxis]))
assert array.dims == dims
array = Rototrans(MARKERS_DATA.values, time=MARKERS_DATA.time)
is_expected_array(array, **EXPECTED_VALUES[67])
size = 4, 4, 100
array = Rototrans.from_random_data(size=size)
assert array.shape == size
assert array.dims == dims
with pytest.raises(ValueError):
Angles(ANALOGS_DATA)
|
[
"pyomeca.Markers.from_random_data",
"pyomeca.Markers",
"pyomeca.Rototrans",
"pyomeca.Angles",
"pyomeca.Rototrans.from_random_data",
"pytest.raises",
"pyomeca.Angles.from_random_data",
"xarray.DataArray",
"numpy.eye",
"pyomeca.Analogs.from_random_data",
"pyomeca.Analogs"
] |
[((289, 298), 'pyomeca.Analogs', 'Analogs', ([], {}), '()\n', (296, 298), False, 'from pyomeca import Analogs, Markers, Angles, Rototrans\n'), ((403, 431), 'pyomeca.Analogs', 'Analogs', (['ANALOGS_DATA.values'], {}), '(ANALOGS_DATA.values)\n', (410, 431), False, 'from pyomeca import Analogs, Markers, Angles, Rototrans\n'), ((516, 551), 'pyomeca.Analogs.from_random_data', 'Analogs.from_random_data', ([], {'size': 'size'}), '(size=size)\n', (540, 551), False, 'from pyomeca import Analogs, Markers, Angles, Rototrans\n'), ((762, 771), 'pyomeca.Markers', 'Markers', ([], {}), '()\n', (769, 771), False, 'from pyomeca import Analogs, Markers, Angles, Rototrans\n'), ((876, 904), 'pyomeca.Markers', 'Markers', (['MARKERS_DATA.values'], {}), '(MARKERS_DATA.values)\n', (883, 904), False, 'from pyomeca import Analogs, Markers, Angles, Rototrans\n'), ((992, 1027), 'pyomeca.Markers.from_random_data', 'Markers.from_random_data', ([], {'size': 'size'}), '(size=size)\n', (1016, 1027), False, 'from pyomeca import Analogs, Markers, Angles, Rototrans\n'), ((1254, 1262), 'pyomeca.Angles', 'Angles', ([], {}), '()\n', (1260, 1262), False, 'from pyomeca import Analogs, Markers, Angles, Rototrans\n'), ((1367, 1418), 'pyomeca.Angles', 'Angles', (['MARKERS_DATA.values'], {'time': 'MARKERS_DATA.time'}), '(MARKERS_DATA.values, time=MARKERS_DATA.time)\n', (1373, 1418), False, 'from pyomeca import Analogs, Markers, Angles, Rototrans\n'), ((1507, 1541), 'pyomeca.Angles.from_random_data', 'Angles.from_random_data', ([], {'size': 'size'}), '(size=size)\n', (1530, 1541), False, 'from pyomeca import Analogs, Markers, Angles, Rototrans\n'), ((1748, 1759), 'pyomeca.Rototrans', 'Rototrans', ([], {}), '()\n', (1757, 1759), False, 'from pyomeca import Analogs, Markers, Angles, Rototrans\n'), ((1890, 1944), 'pyomeca.Rototrans', 'Rototrans', (['MARKERS_DATA.values'], {'time': 'MARKERS_DATA.time'}), '(MARKERS_DATA.values, time=MARKERS_DATA.time)\n', (1899, 1944), False, 'from pyomeca import Analogs, Markers, Angles, Rototrans\n'), ((2031, 2068), 'pyomeca.Rototrans.from_random_data', 'Rototrans.from_random_data', ([], {'size': 'size'}), '(size=size)\n', (2057, 2068), False, 'from pyomeca import Analogs, Markers, Angles, Rototrans\n'), ((623, 648), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (636, 648), False, 'import pytest\n'), ((658, 679), 'pyomeca.Analogs', 'Analogs', (['MARKERS_DATA'], {}), '(MARKERS_DATA)\n', (665, 679), False, 'from pyomeca import Analogs, Markers, Angles, Rototrans\n'), ((1116, 1141), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (1129, 1141), False, 'import pytest\n'), ((1151, 1172), 'pyomeca.Markers', 'Markers', (['ANALOGS_DATA'], {}), '(ANALOGS_DATA)\n', (1158, 1172), False, 'from pyomeca import Analogs, Markers, Angles, Rototrans\n'), ((1613, 1638), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (1626, 1638), False, 'import pytest\n'), ((1648, 1668), 'pyomeca.Angles', 'Angles', (['ANALOGS_DATA'], {}), '(ANALOGS_DATA)\n', (1654, 1668), False, 'from pyomeca import Analogs, Markers, Angles, Rototrans\n'), ((2140, 2165), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (2153, 2165), False, 'import pytest\n'), ((2175, 2195), 'pyomeca.Angles', 'Angles', (['ANALOGS_DATA'], {}), '(ANALOGS_DATA)\n', (2181, 2195), False, 'from pyomeca import Analogs, Markers, Angles, Rototrans\n'), ((344, 358), 'xarray.DataArray', 'xr.DataArray', ([], {}), '()\n', (356, 358), True, 'import xarray as xr\n'), ((817, 831), 'xarray.DataArray', 'xr.DataArray', ([], {}), '()\n', (829, 831), True, 'import xarray as xr\n'), ((1308, 1322), 'xarray.DataArray', 'xr.DataArray', ([], {}), '()\n', (1320, 1322), True, 'import xarray as xr\n'), ((1818, 1827), 'numpy.eye', 'np.eye', (['(4)'], {}), '(4)\n', (1824, 1827), True, 'import numpy as np\n')]
|
import ast
expr = """\
class Asdf(object):
def meme(self, x):
return 15
a = Asdf()
x = a.meme(a)
if x:
print x
"""
a1 = ast.parse(expr)
0
|
[
"ast.parse"
] |
[((141, 156), 'ast.parse', 'ast.parse', (['expr'], {}), '(expr)\n', (150, 156), False, 'import ast\n')]
|
import socket
import random
def find_listening_port(port_range=None, host='localhost', socket_type='tcp', default_port=None):
"""Find an open listening port"""
if port_range is None:
port_range = (6000,65534)
if socket_type == 'tcp':
socket_protocol = socket.SOCK_STREAM
elif socket_type == 'udp':
socket_protocol = socket.SOCK_DGRAM
else:
raise Exception('Invalid socket_type argument, must be: tcp or udp')
if default_port is not None:
port = test_port(host, default_port, socket_protocol)
if port != -1:
return port
searched_ports = [8080]
for _ in range(100):
port = random.randint(port_range[0], port_range[1])
if port in searched_ports:
continue
port = test_port(host, port, socket_protocol)
if port != -1:
return port
searched_ports.append(port)
raise Exception(f'Failed to find {socket_type} listening port for host={host}')
def test_port(host, port, socket_protocol):
with socket.socket(socket.AF_INET, socket_protocol) as sock:
try:
sock.bind((host, port))
return port
except:
pass
return -1
|
[
"socket.socket",
"random.randint"
] |
[((676, 720), 'random.randint', 'random.randint', (['port_range[0]', 'port_range[1]'], {}), '(port_range[0], port_range[1])\n', (690, 720), False, 'import random\n'), ((1058, 1104), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket_protocol'], {}), '(socket.AF_INET, socket_protocol)\n', (1071, 1104), False, 'import socket\n')]
|
# Generated by Django 3.0.4 on 2020-03-11 19:35
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Book',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=50)),
('detail', models.CharField(blank=True, max_length=500, null=True)),
('upload_date', models.DateTimeField(auto_now_add=True)),
('ISBN_Image', models.FileField(blank=True, null=True, upload_to='', verbose_name='ISBN_Image')),
('ISBN_Data', models.IntegerField(blank=True, null=True)),
('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
|
[
"django.db.models.FileField",
"django.db.migrations.swappable_dependency",
"django.db.models.CharField",
"django.db.models.ForeignKey",
"django.db.models.AutoField",
"django.db.models.IntegerField",
"django.db.models.DateTimeField"
] |
[((247, 304), 'django.db.migrations.swappable_dependency', 'migrations.swappable_dependency', (['settings.AUTH_USER_MODEL'], {}), '(settings.AUTH_USER_MODEL)\n', (278, 304), False, 'from django.db import migrations, models\n'), ((433, 526), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (449, 526), False, 'from django.db import migrations, models\n'), ((550, 581), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)'}), '(max_length=50)\n', (566, 581), False, 'from django.db import migrations, models\n'), ((611, 666), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(500)', 'null': '(True)'}), '(blank=True, max_length=500, null=True)\n', (627, 666), False, 'from django.db import migrations, models\n'), ((701, 740), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (721, 740), False, 'from django.db import migrations, models\n'), ((774, 859), 'django.db.models.FileField', 'models.FileField', ([], {'blank': '(True)', 'null': '(True)', 'upload_to': '""""""', 'verbose_name': '"""ISBN_Image"""'}), "(blank=True, null=True, upload_to='', verbose_name='ISBN_Image'\n )\n", (790, 859), False, 'from django.db import migrations, models\n'), ((887, 929), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (906, 929), False, 'from django.db import migrations, models\n'), ((959, 1055), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': 'settings.AUTH_USER_MODEL'}), '(on_delete=django.db.models.deletion.CASCADE, to=settings.\n AUTH_USER_MODEL)\n', (976, 1055), False, 'from django.db import migrations, models\n')]
|
# -*- coding: utf-8 -*-
"""
Created on Fri Jun 29 15:07:58 2018
@author: nce3xin
"""
import torch
import torch.nn as nn
#use_cuda = not hyperparams.no_cuda and torch.cuda.is_available()
#device = torch.device("cuda" if use_cuda else "cpu")
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
class BaseModel(nn.Module):
def __init__(self, inputDim, hiddenNum, outputDim, layerNum, cell):
super(BaseModel, self).__init__()
self.hiddenNum = hiddenNum
self.inputDim = inputDim
self.outputDim = outputDim
self.layerNum = layerNum
if cell == "RNN":
self.cell = nn.RNN(input_size=self.inputDim, hidden_size=self.hiddenNum,
num_layers=self.layerNum, dropout=0.0,
nonlinearity="tanh", batch_first=True,)
if cell == "LSTM":
self.cell = nn.LSTM(input_size=self.inputDim, hidden_size=self.hiddenNum,
num_layers=self.layerNum, dropout=0.0,
batch_first=True, )
if cell == "GRU":
self.cell = nn.GRU(input_size=self.inputDim, hidden_size=self.hiddenNum,
num_layers=self.layerNum, dropout=0.0,
batch_first=True, )
print(self.cell)
self.fc = nn.Linear(self.hiddenNum, self.outputDim)
# standard RNN model
class RNNModel(BaseModel):
def __init__(self, inputDim, hiddenNum, outputDim, layerNum, cell):
super(RNNModel, self).__init__(inputDim, hiddenNum, outputDim, layerNum, cell)
def forward(self, x, batchSize):
h0 = torch.zeros(self.layerNum * 1, batchSize, self.hiddenNum).to(device)
x=x.float()
h0=h0.float()
rnnOutput, hn = self.cell(x, h0) # rnnOutput 12,20,50 hn 1,20,50
hn = hn.view(batchSize, self.hiddenNum).to(device)
fcOutput = self.fc(hn)
fcOutput=fcOutput.to(device)
return fcOutput
# LSTM model
class LSTMModel(BaseModel):
def __init__(self, inputDim, hiddenNum, outputDim, layerNum, cell):
super(LSTMModel, self).__init__(inputDim, hiddenNum, outputDim, layerNum, cell)
# if batch_first is true, then the input and output tensors are provided as (batch,seq,feature)
# else input of shape (seq_len, batch, input_size)
# h_0,h_n of shape (num_layers*num_directions,batch,hidden_size)
# c_0,c_n of shape (num_layers*num_directions,batch,hidden_size)
def forward(self, x, batchSize):
h0 = torch.zeros(self.layerNum * 1, batchSize, self.hiddenNum).to(device)
c0 = torch.zeros(self.layerNum * 1, batchSize, self.hiddenNum).to(device)
x=x.float()
h0=h0.float()
c0=c0.float()
rnnOutput, hn = self.cell(x, (h0, c0)) # rnnOutput 12,20,50 hn 1,20,50
hn = hn[0].view(batchSize, self.hiddenNum).to(device)
fcOutput = self.fc(hn)
return fcOutput
# GRU model
class GRUModel(BaseModel):
def __init__(self, inputDim, hiddenNum, outputDim, layerNum, cell):
super(GRUModel, self).__init__(inputDim, hiddenNum, outputDim, layerNum, cell)
# if batch_first is true, then the input and output tensors are provided as (batch,seq,feature)
# else input of shape (seq_len, batch, input_size)
# h_0,h_n of shape (num_layers*num_directions,batch,hidden_size)
def forward(self, x, batchSize):
h0 = torch.zeros(self.layerNum * 1, batchSize, self.hiddenNum).to(device)
x=x.float()
h0=h0.float()
rnnOutput, hn = self.cell(x, h0) # rnnOutput 12,20,50 hn 1,20,50
hn = hn.view(batchSize, self.hiddenNum).to(device)
fcOutput = self.fc(hn)
return fcOutput
|
[
"torch.nn.GRU",
"torch.nn.RNN",
"torch.cuda.is_available",
"torch.nn.Linear",
"torch.zeros",
"torch.nn.LSTM"
] |
[((275, 300), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (298, 300), False, 'import torch\n'), ((1346, 1387), 'torch.nn.Linear', 'nn.Linear', (['self.hiddenNum', 'self.outputDim'], {}), '(self.hiddenNum, self.outputDim)\n', (1355, 1387), True, 'import torch.nn as nn\n'), ((645, 788), 'torch.nn.RNN', 'nn.RNN', ([], {'input_size': 'self.inputDim', 'hidden_size': 'self.hiddenNum', 'num_layers': 'self.layerNum', 'dropout': '(0.0)', 'nonlinearity': '"""tanh"""', 'batch_first': '(True)'}), "(input_size=self.inputDim, hidden_size=self.hiddenNum, num_layers=\n self.layerNum, dropout=0.0, nonlinearity='tanh', batch_first=True)\n", (651, 788), True, 'import torch.nn as nn\n'), ((885, 1008), 'torch.nn.LSTM', 'nn.LSTM', ([], {'input_size': 'self.inputDim', 'hidden_size': 'self.hiddenNum', 'num_layers': 'self.layerNum', 'dropout': '(0.0)', 'batch_first': '(True)'}), '(input_size=self.inputDim, hidden_size=self.hiddenNum, num_layers=\n self.layerNum, dropout=0.0, batch_first=True)\n', (892, 1008), True, 'import torch.nn as nn\n'), ((1118, 1240), 'torch.nn.GRU', 'nn.GRU', ([], {'input_size': 'self.inputDim', 'hidden_size': 'self.hiddenNum', 'num_layers': 'self.layerNum', 'dropout': '(0.0)', 'batch_first': '(True)'}), '(input_size=self.inputDim, hidden_size=self.hiddenNum, num_layers=\n self.layerNum, dropout=0.0, batch_first=True)\n', (1124, 1240), True, 'import torch.nn as nn\n'), ((1650, 1707), 'torch.zeros', 'torch.zeros', (['(self.layerNum * 1)', 'batchSize', 'self.hiddenNum'], {}), '(self.layerNum * 1, batchSize, self.hiddenNum)\n', (1661, 1707), False, 'import torch\n'), ((2538, 2595), 'torch.zeros', 'torch.zeros', (['(self.layerNum * 1)', 'batchSize', 'self.hiddenNum'], {}), '(self.layerNum * 1, batchSize, self.hiddenNum)\n', (2549, 2595), False, 'import torch\n'), ((2620, 2677), 'torch.zeros', 'torch.zeros', (['(self.layerNum * 1)', 'batchSize', 'self.hiddenNum'], {}), '(self.layerNum * 1, batchSize, self.hiddenNum)\n', (2631, 2677), False, 'import torch\n'), ((3458, 3515), 'torch.zeros', 'torch.zeros', (['(self.layerNum * 1)', 'batchSize', 'self.hiddenNum'], {}), '(self.layerNum * 1, batchSize, self.hiddenNum)\n', (3469, 3515), False, 'import torch\n')]
|
# Generated by Django 3.1.3 on 2020-11-23 16:50
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('buying', '0004_auto_20201123_1556'),
]
operations = [
migrations.RemoveField(
model_name='item',
name='uuid',
),
migrations.AddField(
model_name='item',
name='product_nr',
field=models.PositiveSmallIntegerField(default=11, unique=True),
preserve_default=False,
),
]
|
[
"django.db.migrations.RemoveField",
"django.db.models.PositiveSmallIntegerField"
] |
[((234, 288), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""item"""', 'name': '"""uuid"""'}), "(model_name='item', name='uuid')\n", (256, 288), False, 'from django.db import migrations, models\n'), ((434, 491), 'django.db.models.PositiveSmallIntegerField', 'models.PositiveSmallIntegerField', ([], {'default': '(11)', 'unique': '(True)'}), '(default=11, unique=True)\n', (466, 491), False, 'from django.db import migrations, models\n')]
|
import cv2 as cv
import numpy as np
class FaceMaskAppEngine:
"""
Perform detector which detects faces from input video,
and classifier to classify croped faces to face or mask class
:param config: Is a Config instance which provides necessary parameters.
"""
def __init__(self, config):
self.config = config
self.detector = None
self.classifier_model = None
self.running_video = False
self.device = self.config.DEVICE
if self.device == "x86":
from libs.detectors.x86.detector import Detector
from libs.classifiers.x86.classifier import Classifier
self.detector = Detector(self.config)
self.classifier_model = Classifier(self.config)
elif self.device == "EdgeTPU":
from libs.detectors.edgetpu.detector import Detector
from libs.classifiers.edgetpu.classifier import Classifier
self.detector = Detector(self.config)
self.classifier_model = Classifier(self.config)
else:
raise ValueError('Not supported device named: ', self.device)
self.image_size = (self.config.DETECTOR_INPUT_SIZE[0], self.config.DETECTOR_INPUT_SIZE[1], 3)
self.classifier_img_size = (self.config.CLASSIFIER_INPUT_SIZE, self.config.CLASSIFIER_INPUT_SIZE, 3)
def set_ui(self, ui):
self.ui = ui
def __process(self, cv_image):
# Resize input image to resolution
self.resolution = self.config.APP_VIDEO_RESOLUTION
cv_image = cv.resize(cv_image, tuple(self.resolution))
resized_image = cv.resize(cv_image, tuple(self.image_size[:2]))
rgb_resized_image = cv.cvtColor(resized_image, cv.COLOR_BGR2RGB)
objects_list = self.detector.inference(rgb_resized_image)
[w, h] = self.resolution
#objects_list = [{'id': '1-0', 'bbox': [.1, .2, .5, .5]}, {'id': '1-1', 'bbox': [.3, .1, .5, .5]}]
faces = []
for obj in objects_list:
if 'bbox' in obj.keys():
face_bbox = obj['bbox'] # [ymin, xmin, ymax, xmax]
xmin, xmax = np.multiply([face_bbox[1], face_bbox[3]], self.resolution[0])
ymin, ymax = np.multiply([face_bbox[0], face_bbox[2]], self.resolution[1])
croped_face = cv_image[int(ymin):int(ymin) + (int(ymax) - int(ymin)),
int(xmin):int(xmin) + (int(xmax) - int(xmin))]
# Resizing input image
croped_face = cv.resize(croped_face, tuple(self.classifier_img_size[:2]))
croped_face = cv.cvtColor(croped_face, cv.COLOR_BGR2RGB)
# Normalizing input image to [0.0-1.0]
croped_face = croped_face / 255.0
faces.append(croped_face)
faces = np.array(faces)
face_mask_results, scores = self.classifier_model.inference(faces)
# TODO: it could be optimized by the returned dictionary from openpifpaf (returining List instead dict)
[w, h] = self.resolution
idx = 0
for obj in objects_list:
if 'bbox' in obj.keys():
obj['face_label'] = face_mask_results[idx]
obj['score'] = scores[idx]
idx = idx + 1
box = obj["bbox"]
x0 = box[1]
y0 = box[0]
x1 = box[3]
y1 = box[2]
obj["bbox"] = [x0, y0, x1, y1]
return cv_image, objects_list
def process_video(self, video_uri):
input_cap = cv.VideoCapture(video_uri)
if (input_cap.isOpened()):
print('opened video ', video_uri)
else:
print('failed to load video ', video_uri)
return
self.running_video = True
while input_cap.isOpened() and self.running_video:
_, cv_image = input_cap.read()
if np.shape(cv_image) != ():
cv_image, objects = self.__process(cv_image)
else:
continue
self.ui.update(cv_image, objects)
input_cap.release()
self.running_video = False
# def process_image(self, image_path):
# # Process and pass the image to ui modules
# cv_image = cv.imread(image_path)
# cv_image, objects, distancings = self.__process(cv_image)
# self.ui.update(cv_image, objects, distancings)
|
[
"numpy.multiply",
"libs.classifiers.edgetpu.classifier.Classifier",
"cv2.cvtColor",
"cv2.VideoCapture",
"numpy.shape",
"numpy.array",
"libs.detectors.edgetpu.detector.Detector"
] |
[((1691, 1735), 'cv2.cvtColor', 'cv.cvtColor', (['resized_image', 'cv.COLOR_BGR2RGB'], {}), '(resized_image, cv.COLOR_BGR2RGB)\n', (1702, 1735), True, 'import cv2 as cv\n'), ((2826, 2841), 'numpy.array', 'np.array', (['faces'], {}), '(faces)\n', (2834, 2841), True, 'import numpy as np\n'), ((3585, 3611), 'cv2.VideoCapture', 'cv.VideoCapture', (['video_uri'], {}), '(video_uri)\n', (3600, 3611), True, 'import cv2 as cv\n'), ((674, 695), 'libs.detectors.edgetpu.detector.Detector', 'Detector', (['self.config'], {}), '(self.config)\n', (682, 695), False, 'from libs.detectors.edgetpu.detector import Detector\n'), ((732, 755), 'libs.classifiers.edgetpu.classifier.Classifier', 'Classifier', (['self.config'], {}), '(self.config)\n', (742, 755), False, 'from libs.classifiers.edgetpu.classifier import Classifier\n'), ((959, 980), 'libs.detectors.edgetpu.detector.Detector', 'Detector', (['self.config'], {}), '(self.config)\n', (967, 980), False, 'from libs.detectors.edgetpu.detector import Detector\n'), ((1017, 1040), 'libs.classifiers.edgetpu.classifier.Classifier', 'Classifier', (['self.config'], {}), '(self.config)\n', (1027, 1040), False, 'from libs.classifiers.edgetpu.classifier import Classifier\n'), ((2128, 2189), 'numpy.multiply', 'np.multiply', (['[face_bbox[1], face_bbox[3]]', 'self.resolution[0]'], {}), '([face_bbox[1], face_bbox[3]], self.resolution[0])\n', (2139, 2189), True, 'import numpy as np\n'), ((2219, 2280), 'numpy.multiply', 'np.multiply', (['[face_bbox[0], face_bbox[2]]', 'self.resolution[1]'], {}), '([face_bbox[0], face_bbox[2]], self.resolution[1])\n', (2230, 2280), True, 'import numpy as np\n'), ((2611, 2653), 'cv2.cvtColor', 'cv.cvtColor', (['croped_face', 'cv.COLOR_BGR2RGB'], {}), '(croped_face, cv.COLOR_BGR2RGB)\n', (2622, 2653), True, 'import cv2 as cv\n'), ((3933, 3951), 'numpy.shape', 'np.shape', (['cv_image'], {}), '(cv_image)\n', (3941, 3951), True, 'import numpy as np\n')]
|
# Generated by Django 2.1.1 on 2019-03-28 18:37
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('variants', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='variant',
name='snpeff_func_class',
field=models.TextField(blank=True, db_index=True, null=True),
),
]
|
[
"django.db.models.TextField"
] |
[((336, 390), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)', 'db_index': '(True)', 'null': '(True)'}), '(blank=True, db_index=True, null=True)\n', (352, 390), False, 'from django.db import migrations, models\n')]
|
#!/usr/bin/env python
import itertools
from heapq import heappush, heappop
class PriorityQueue(object):
def __len__(self):
return len(self._pq)
def __iter__(self):
return iter(task for priority, count, task in self._pq)
def __init__(self):
self._pq = []
self.counter = itertools.count()
def push(self, task, priority=0):
"""
Add one task to priority queue
When priority is the same, count ensures the earlier added tasks first
"""
count = next(self.counter)
entry = [priority, count, task]
heappush(self._pq, entry)
def pop(self):
"""
Pop the task that was pushed with highest priority value
"""
if len(self._pq) == 0:
raise IndexError("pop from an empty priority queue")
_, _, task = heappop(self._pq)
return task
def push_many(self, tasks, priority=0):
for task in tasks:
self.push(task, priority)
|
[
"heapq.heappush",
"itertools.count",
"heapq.heappop"
] |
[((319, 336), 'itertools.count', 'itertools.count', ([], {}), '()\n', (334, 336), False, 'import itertools\n'), ((601, 626), 'heapq.heappush', 'heappush', (['self._pq', 'entry'], {}), '(self._pq, entry)\n', (609, 626), False, 'from heapq import heappush, heappop\n'), ((853, 870), 'heapq.heappop', 'heappop', (['self._pq'], {}), '(self._pq)\n', (860, 870), False, 'from heapq import heappush, heappop\n')]
|
import unittest2 as unittest
import numpy as np
from vsm.corpus.util.corpusbuilders import random_corpus
from vsm.model.base import BaseModel
class TestBaseModel(unittest.TestCase):
def setUp(self):
self.c = random_corpus(1000, 50, 6, 100)
self.m = BaseModel(self.c, 'context')
def test_BaseModel_IO(self):
from tempfile import NamedTemporaryFile as NTF
import os
c = random_corpus(1000, 50, 6, 100)
with NTF(delete=False, suffix='.npz') as tmp:
m0 = BaseModel(c.corpus, 'context')
m0.save(tmp.name)
m1 = BaseModel.load(tmp.name)
self.assertEqual(m0.context_type, m1.context_type)
self.assertTrue((m0.matrix == m1.matrix).all())
os.remove(tmp.name)
suite = unittest.TestLoader().loadTestsFromTestCase(TestBaseModel)
unittest.TextTestRunner(verbosity=2).run(suite)
|
[
"tempfile.NamedTemporaryFile",
"os.remove",
"unittest2.TextTestRunner",
"vsm.model.base.BaseModel.load",
"vsm.model.base.BaseModel",
"unittest2.TestLoader",
"vsm.corpus.util.corpusbuilders.random_corpus"
] |
[((224, 255), 'vsm.corpus.util.corpusbuilders.random_corpus', 'random_corpus', (['(1000)', '(50)', '(6)', '(100)'], {}), '(1000, 50, 6, 100)\n', (237, 255), False, 'from vsm.corpus.util.corpusbuilders import random_corpus\n'), ((273, 301), 'vsm.model.base.BaseModel', 'BaseModel', (['self.c', '"""context"""'], {}), "(self.c, 'context')\n", (282, 301), False, 'from vsm.model.base import BaseModel\n'), ((435, 466), 'vsm.corpus.util.corpusbuilders.random_corpus', 'random_corpus', (['(1000)', '(50)', '(6)', '(100)'], {}), '(1000, 50, 6, 100)\n', (448, 466), False, 'from vsm.corpus.util.corpusbuilders import random_corpus\n'), ((774, 793), 'os.remove', 'os.remove', (['tmp.name'], {}), '(tmp.name)\n', (783, 793), False, 'import os\n'), ((803, 824), 'unittest2.TestLoader', 'unittest.TestLoader', ([], {}), '()\n', (822, 824), True, 'import unittest2 as unittest\n'), ((862, 898), 'unittest2.TextTestRunner', 'unittest.TextTestRunner', ([], {'verbosity': '(2)'}), '(verbosity=2)\n', (885, 898), True, 'import unittest2 as unittest\n'), ((480, 512), 'tempfile.NamedTemporaryFile', 'NTF', ([], {'delete': '(False)', 'suffix': '""".npz"""'}), "(delete=False, suffix='.npz')\n", (483, 512), True, 'from tempfile import NamedTemporaryFile as NTF\n'), ((538, 568), 'vsm.model.base.BaseModel', 'BaseModel', (['c.corpus', '"""context"""'], {}), "(c.corpus, 'context')\n", (547, 568), False, 'from vsm.model.base import BaseModel\n'), ((616, 640), 'vsm.model.base.BaseModel.load', 'BaseModel.load', (['tmp.name'], {}), '(tmp.name)\n', (630, 640), False, 'from vsm.model.base import BaseModel\n')]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
"""This module contains the base classes that are used to implement the more specific behaviour."""
import inspect
import six
from cached_property import cached_property
from collections import defaultdict
from copy import copy
from smartloc import Locator
from wait_for import wait_for
from .browser import Browser
from .exceptions import (
NoSuchElementException, LocatorNotImplemented, WidgetOperationFailed, DoNotReadThisWidget)
from .log import PrependParentsAdapter, create_widget_logger, logged
from .utils import Widgetable, Fillable, attributize_string
from .xpath import quote
def do_not_read_this_widget():
"""Call inside widget's read method in case you don't want it to appear in the data."""
raise DoNotReadThisWidget('Do not read this widget.')
def wrap_fill_method(method):
"""Generates a method that automatically coerces the first argument as Fillable."""
@six.wraps(method)
def wrapped(self, value, *args, **kwargs):
return method(self, Fillable.coerce(value), *args, **kwargs)
return wrapped
class WidgetDescriptor(Widgetable):
"""This class handles instantiating and caching of the widgets on view.
It stores the class and the parameters it should be instantiated with. Once it is accessed from
the instance of the class where it was defined on, it passes the instance to the widget class
followed by args and then kwargs.
It also acts as a counter, so you can then order the widgets by their "creation" stamp.
"""
def __init__(self, klass, *args, **kwargs):
self.klass = klass
self.args = args
self.kwargs = kwargs
def __get__(self, obj, type=None):
if obj is None: # class access
return self
# Cache on WidgetDescriptor
if self not in obj._widget_cache:
kwargs = copy(self.kwargs)
try:
parent_logger = obj.logger
current_name = obj._desc_name_mapping[self]
if isinstance(parent_logger, PrependParentsAdapter):
# If it already is adapter, then pull the logger itself out and append
# the widget name
widget_path = '{}/{}'.format(parent_logger.extra['widget_path'], current_name)
parent_logger = parent_logger.logger
else:
# Seems like first in the line.
widget_path = current_name
kwargs['logger'] = create_widget_logger(widget_path, parent_logger)
except AttributeError:
pass
obj._widget_cache[self] = self.klass(obj, *self.args, **kwargs)
widget = obj._widget_cache[self]
obj.child_widget_accessed(widget)
return widget
def __repr__(self):
return '<Descriptor: {}, {!r}, {!r}>'.format(self.klass.__name__, self.args, self.kwargs)
class ExtraData(object):
"""This class implements a simple access to the extra data passed through
:py:class:`widgetastic.browser.Browser` object.
.. code-block:: python
widget.extra.foo
# is equivalent to
widget.browser.extra_objects['foo']
"""
# TODO: Possibly replace it with a descriptor of some sort?
def __init__(self, widget):
self._widget = widget
@property
def _extra_objects_list(self):
return list(six.iterkeys(self._widget.browser.extra_objects))
def __dir__(self):
return self._extra_objects_list
def __getattr__(self, attr):
try:
return self._widget.browser.extra_objects[attr]
except KeyError:
raise AttributeError('Extra object {!r} was not found ({} are available)'.format(
attr, ', '.join(self._extra_objects_list)))
class WidgetMetaclass(type):
"""Metaclass that ensures that ``fill`` and ``read`` methods are logged and coerce Fillable
properly.
For ``fill`` methods placed in :py:class:`Widget` descendants it first wraps them using
:py:func:`wrap_fill_method` that ensures that :py:class:`widgetastic.utils.Fillable` can be
passed and then it wraps them in the :py:func:`widgetastic.log.logged`.
The same happens for ``read`` except the ``wrap_fill_method`` which is only useful for ``fill``.
Therefore, you shall not wrap any ``read`` or ``fill`` methods in
:py:func:`widgetastic.log.logged`.
"""
def __new__(cls, name, bases, attrs):
new_attrs = {}
for key, value in six.iteritems(attrs):
if key == 'fill':
# handle fill() specifics
new_attrs[key] = logged(log_args=True, log_result=True)(wrap_fill_method(value))
elif key == 'read':
# handle read() specifics
new_attrs[key] = logged(log_result=True)(value)
else:
# Do nothing
new_attrs[key] = value
return super(WidgetMetaclass, cls).__new__(cls, name, bases, new_attrs)
class Widget(six.with_metaclass(WidgetMetaclass, object)):
"""Base class for all UI objects.
Does couple of things:
* Ensures it gets instantiated with a browser or another widget as parent. If you create an
instance in a class, it then creates a WidgetDescriptor which is then invoked on the
instance and instantiates the widget with underlying browser.
* Implements some basic interface for all widgets.
"""
def __new__(cls, *args, **kwargs):
"""Implement some typing saving magic.
Unless you are passing a :py:class:`Widget` or :py:class:`widgetastic.browser.Browser`
as a first argument which implies the instantiation of an actual widget, it will return
:py:class:`WidgetDescriptor` instead which will resolve automatically inside of
:py:class:`View` instance.
This allows you a sort of Django-ish access to the defined widgets then.
"""
if args and isinstance(args[0], (Widget, Browser)):
return super(Widget, cls).__new__(cls)
else:
return WidgetDescriptor(cls, *args, **kwargs)
def __init__(self, parent, logger=None):
"""If you are inheriting from this class, you **MUST ALWAYS** ensure that the inherited class
has an init that always takes the ``parent`` as the first argument. You can do that on your
own, setting the parent as ``self.parent`` or you can do something like this:
.. code-block:: python
def __init__(self, parent, arg1, arg2, logger=None):
super(MyClass, self).__init__(parent, logger=logger)
# or if you have somehow complex inheritance ...
Widget.__init__(self, parent, logger=logger)
"""
self.parent = parent
if isinstance(logger, PrependParentsAdapter):
# The logger is already prepared
self.logger = logger
else:
# We need a PrependParentsAdapter here.
self.logger = create_widget_logger(type(self).__name__, logger)
self.extra = ExtraData(self)
@property
def browser(self):
"""Returns the instance of parent browser.
Returns:
:py:class:`widgetastic.browser.Browser` instance
Raises:
:py:class:`ValueError` when the browser is not defined, which is an error.
"""
try:
return self.parent.browser
except AttributeError:
raise ValueError('Unknown value {!r} specified as parent.'.format(self.parent))
@property
def parent_view(self):
"""Returns a parent view, if the widget lives inside one.
Returns:
:py:class:`View` instance if the widget is defined in one, otherwise ``None``.
"""
if isinstance(self.parent, View):
return self.parent
else:
return None
@property
def is_displayed(self):
"""Shortcut allowing you to detect if the widget is displayed.
If the logic behind is_displayed is more complex, you can always override this.
Returns:
:py:class:`bool`
"""
return self.browser.is_displayed(self)
@logged()
def wait_displayed(self, timeout='10s'):
"""Wait for the element to be displayed. Uses the :py:meth:`is_displayed`
Args:
timout: If you want, you can override the default timeout here
"""
wait_for(lambda: self.is_displayed, timeout=timeout, delay=0.2)
@logged()
def move_to(self):
"""Moves the mouse to the Selenium WebElement that is resolved by this widget.
Returns:
:py:class:`selenium.webdriver.remote.webelement.WebElement` instance
"""
return self.browser.move_to_element(self)
def child_widget_accessed(self, widget):
"""Called when a child widget of this widget gets accessed.
Useful when eg. the containing widget needs to open for the child widget to become visible.
Args:
widget: The widget being accessed.
"""
pass
def fill(self, *args, **kwargs):
"""Interactive objects like inputs, selects, checkboxes, et cetera should implement fill.
When you implement this method, it *MUST ALWAYS* return a boolean whether the value
*was changed*. Otherwise it can break.
Returns:
A boolean whether it changed the value or not.
"""
raise NotImplementedError(
'Widget {} does not implement fill()!'.format(type(self).__name__))
def read(self, *args, **kwargs):
"""Each object should implement read so it is easy to get the value of such object.
When you implement this method, the exact return value is up to you but it *MUST* be
consistent with what :py:meth:`fill` takes.
"""
raise NotImplementedError(
'Widget {} does not implement read()!'.format(type(self).__name__))
def _gen_locator_meth(loc):
def __locator__(self): # noqa
return loc
return __locator__
class ViewMetaclass(WidgetMetaclass):
"""metaclass that ensures nested widgets' functionality from the declaration point of view.
When you pass a ``ROOT`` class attribute, it is used to generate a ``__locator__`` method on
the view that ensures the view is resolvable.
"""
def __new__(cls, name, bases, attrs):
new_attrs = {}
desc_name_mapping = {}
for base in bases:
for key, value in six.iteritems(getattr(base, '_desc_name_mapping', {})):
desc_name_mapping[key] = value
for key, value in six.iteritems(attrs):
if inspect.isclass(value) and issubclass(value, View):
new_attrs[key] = WidgetDescriptor(value)
desc_name_mapping[new_attrs[key]] = key
elif isinstance(value, Widgetable):
new_attrs[key] = value
desc_name_mapping[value] = key
for widget in value.child_items:
if not isinstance(widget, (Widgetable, Widget)):
continue
desc_name_mapping[widget] = key
else:
new_attrs[key] = value
if 'ROOT' in new_attrs:
# For handling the root locator of the View
rl = Locator(new_attrs['ROOT'])
new_attrs['__locator__'] = _gen_locator_meth(rl)
new_attrs['_desc_name_mapping'] = desc_name_mapping
return super(ViewMetaclass, cls).__new__(cls, name, bases, new_attrs)
class View(six.with_metaclass(ViewMetaclass, Widget)):
"""View is a kind of abstract widget that can hold another widgets. Remembers the order,
so therefore it can function like a form with defined filling order.
It looks like this:
.. code-block:: python
class Login(View):
user = SomeInputWidget('user')
password = SomeInputWidget('<PASSWORD>')
login = SomeButtonWidget('Log In')
def a_method(self):
do_something()
The view is usually instantiated with an instance of
:py:class:`widgetastic.browser.Browser`, which will then enable resolving of all of the
widgets defined.
Args:
parent: A parent :py:class:`View` or :py:class:`widgetastic.browser.Browser`
additional_context: If the view needs some context, for example - you want to check that
you are on the page of user XYZ but you can also be on the page for user FOO, then
you shall use the ``additional_context`` to pass in required variables that will allow
you to detect this.
"""
def __init__(self, parent, additional_context=None, logger=None):
Widget.__init__(self, parent, logger=logger)
self.context = additional_context or {}
self._widget_cache = {}
def flush_widget_cache(self):
# Recursively ...
for view in self._views:
view._widget_cache.clear()
self._widget_cache.clear()
@staticmethod
def nested(view_class):
"""Shortcut for :py:class:`WidgetDescriptor`
Usage:
.. code-block:: python
class SomeView(View):
some_widget = Widget()
@View.nested
class another_view(View):
pass
Why? The problem is counting things. When you are placing widgets themselves on a view, they
handle counting themselves and just work. But when you are creating a nested view, that is a
bit of a problem. The widgets are instantiated, whereas the views are placed in a class and
wait for the :py:class:`ViewMetaclass` to pick them up, but that happens after all other
widgets have been instantiated into the :py:class:`WidgetDescriptor`s, which has the
consequence of things being out of order. By wrapping the class into the descriptor we do
the job of :py:meth:`Widget.__new__` which creates the :py:class:`WidgetDescriptor` if not
called with a :py:class:`widgetastic.browser.Browser` or :py:class:`Widget` instance as the
first argument.
Args:
view_class: A subclass of :py:class:`View`
"""
return WidgetDescriptor(view_class)
@classmethod
def widget_names(cls):
"""Returns a list of widget names in the order they were defined on the class.
Returns:
A :py:class:`list` of :py:class:`Widget` instances.
"""
result = []
for key in dir(cls):
value = getattr(cls, key)
if isinstance(value, Widgetable):
result.append((key, value))
return [name for name, _ in sorted(result, key=lambda pair: pair[1]._seq_id)]
@property
def _views(self):
"""Returns all sub-views of this view.
Returns:
A :py:class:`list` of :py:class:`View`
"""
return [view for view in self if isinstance(view, View)]
@property
def is_displayed(self):
"""Overrides the :py:meth:`Widget.is_displayed`. The difference is that if the view does
not have the root locator, it assumes it is displayed.
Returns:
:py:class:`bool`
"""
try:
return super(View, self).is_displayed
except LocatorNotImplemented:
return True
def move_to(self):
"""Overrides the :py:meth:`Widget.move_to`. The difference is that if the view does
not have the root locator, it returns None.
Returns:
:py:class:`selenium.webdriver.remote.webelement.WebElement` instance or ``None``.
"""
try:
return super(View, self).move_to()
except LocatorNotImplemented:
return None
def fill(self, values):
"""Implementation of form filling.
This method goes through all widgets defined on this view one by one and calls their
``fill`` methods appropriately.
``None`` values will be ignored.
Args:
values: A dictionary of ``widget_name: value_to_fill``.
Returns:
:py:class:`bool` if the fill changed any value.
"""
was_change = False
self.before_fill(values)
for name in self.widget_names():
if name not in values or values[name] is None:
continue
widget = getattr(self, name)
try:
if widget.fill(values[name]):
was_change = True
except NotImplementedError:
continue
self.after_fill(was_change)
return was_change
def read(self):
"""Reads the contents of the view and presents them as a dictionary.
Returns:
A :py:class:`dict` of ``widget_name: widget_read_value`` where the values are retrieved
using the :py:meth:`Widget.read`.
"""
result = {}
for widget_name in self.widget_names():
widget = getattr(self, widget_name)
try:
value = widget.read()
except (NotImplementedError, NoSuchElementException, DoNotReadThisWidget):
continue
result[widget_name] = value
return result
def before_fill(self, values):
"""A hook invoked before the loop of filling is invoked.
Args:
values: The same values that are passed to :py:meth:`fill`
"""
pass
def after_fill(self, was_change):
"""A hook invoked after all the widgets were filled.
Args:
was_change: :py:class:`bool` signalizing whether the :py:meth:`fill` changed anything,
"""
pass
def __iter__(self):
"""Allows iterating over the widgets on the view."""
for widget_attr in self.widget_names():
yield getattr(self, widget_attr)
class ClickableMixin(object):
@logged()
def click(self):
return self.browser.click(self)
class Text(Widget, ClickableMixin):
"""A widget that an represent anything that can be read from the webpage as a text content of
a tag.
Args:
locator: Locator of the object ob the page.
"""
def __init__(self, parent, locator, logger=None):
Widget.__init__(self, parent, logger=logger)
self.locator = locator
def __locator__(self):
return self.locator
@property
def text(self):
return self.browser.text(self)
def read(self):
return self.text
class BaseInput(Widget):
"""This represents the bare minimum to interact with bogo-standard form inputs.
Args:
name: If you want to look the input up by name, use this parameter, pass the name.
id: If you want to look the input up by id, use this parameter, pass the id.
"""
def __init__(self, parent, name=None, id=None, logger=None):
if (name is None and id is None) or (name is not None and id is not None):
raise TypeError('TextInput must have either name= or id= specified but also not both.')
Widget.__init__(self, parent, logger=logger)
self.name = name
self.id = id
def __locator__(self):
if self.name is not None:
id_attr = '@name={}'.format(quote(self.name))
else:
id_attr = '@id={}'.format(quote(self.id))
return './/*[(self::input or self::textarea) and {}]'.format(id_attr)
class TextInput(BaseInput):
"""This represents the bare minimum to interact with bogo-standard text form inputs.
Args:
name: If you want to look the input up by name, use this parameter, pass the name.
id: If you want to look the input up by id, use this parameter, pass the id.
"""
@property
def value(self):
return self.browser.get_attribute('value', self)
def read(self):
return self.value
def fill(self, value):
current_value = self.value
if value == current_value:
return False
if value.startswith(current_value):
# only add the additional characters, like user would do
to_fill = value[len(current_value):]
else:
# Clear and type everything
self.browser.clear(self)
to_fill = value
self.browser.send_keys(to_fill, self)
return True
class Checkbox(BaseInput, ClickableMixin):
"""This widget represents the bogo-standard form checkbox.
Args:
name: If you want to look the input up by name, use this parameter, pass the name.
id: If you want to look the input up by id, use this parameter, pass the id.
"""
@property
def selected(self):
return self.browser.is_selected(self)
def read(self):
return self.selected
def fill(self, value):
value = bool(value)
current_value = self.selected
if value == current_value:
return False
else:
self.click()
if self.selected != value:
# TODO: More verbose here
raise WidgetOperationFailed('Failed to set the checkbox to requested value.')
return True
class TableColumn(Widget, ClickableMixin):
"""Represents a cell in the row."""
def __init__(self, parent, position, logger=None):
Widget.__init__(self, parent, logger=logger)
self.position = position
def __locator__(self):
return self.browser.element('./td[{}]'.format(self.position + 1), parent=self.parent)
@property
def text(self):
return self.browser.text(self)
class TableRow(Widget, ClickableMixin):
"""Represents a row in the table.
If subclassing and also changing the Column class, do not forget to set the Column to the new
class.
Args:
index: Position of the row in the table.
"""
Column = TableColumn
def __init__(self, parent, index, logger=None):
Widget.__init__(self, parent, logger=logger)
self.index = index
def __locator__(self):
loc = self.parent.ROW_AT_INDEX.format(self.index + 1)
return self.browser.element(loc, parent=self.parent)
def __getitem__(self, item):
if isinstance(item, int):
return self.Column(self, item, logger=self.logger)
elif isinstance(item, six.string_types):
return self[self.parent.header_index_mapping[item]]
else:
raise TypeError('row[] accepts only integers and strings')
def __getattr__(self, attr):
try:
return self[self.parent.attributized_headers[attr]]
except KeyError:
raise AttributeError('Cannot find column {} in the table'.format(attr))
def __dir__(self):
result = super(TableRow, self).__dir__()
result.extend(self.parent.attributized_headers.keys())
return sorted(result)
def __iter__(self):
for i, header in enumerate(self.parent.headers):
yield header, self[i]
# TODO: read/fill? How would that work?
class Table(Widget):
"""Basic table-handling class.
Usage is as follows assuming the table is instantiated as ``view.table``:
.. code-block:: python
# List the headers
view.table.headers # => (None, 'something', ...)
# Access rows by their position
view.table[0] # => gives you the first row
# Or you can iterate through rows simply
for row in view.table:
do_something()
# You can filter rows
# The column names are "attributized"
view.table.rows(column_name='asdf') # All rows where asdf is in "Column Name"
# And with Django fashion:
view.table.rows(column_name__contains='asdf')
view.table.rows(column_name__startswith='asdf')
view.table.rows(column_name__endswith='asdf')
# You can put multiple filters together.
# And you can of course query a songle row
row = view.table.row(column_name='asdf')
# When you have a row, you can do these things.
row[0] # => gives you the first column cell in the row
row['Column Name'] # => Gives you the column that is named "Column Name". Non-attributized
row.column_name # => Gives you the column whose attributized name is "column_name"
# Basic row column can give you text
assert row.column_name.text == 'some text'
# Or you can click at it
assert row.column_name.click()
If you subclass Table, Row, or Column, do not forget to update the Row in Table and Column in
Row in order for the classes to use the correct class.
Args:
locator: A locator to the table ``<table>`` tag.
"""
HEADERS = './thead/tr/th|./tr/th'
ROWS = './tbody/tr[./td]|./tr[not(./th) and ./td]'
ROW_AT_INDEX = './tbody/tr[{0}]|./tr[not(./th)][{0}]'
Row = TableRow
def __init__(self, parent, locator, logger=None):
Widget.__init__(self, parent, logger=logger)
self.locator = locator
def __locator__(self):
return self.locator
def clear_cache(self):
for item in [
'headers', 'attributized_headers', 'header_index_mapping', 'index_header_mapping']:
try:
delattr(self, item)
except AttributeError:
pass
@cached_property
def headers(self):
result = []
for header in self.browser.elements(self.HEADERS, parent=self):
result.append(self.browser.text(header).strip() or None)
without_none = [x for x in result if x is not None]
if len(without_none) != len(set(without_none)):
self.logger.warning(
'Detected duplicate headers in %r. Correct functionality is not guaranteed',
without_none)
return tuple(result)
@cached_property
def attributized_headers(self):
return {attributize_string(h): h for h in self.headers if h is not None}
@cached_property
def header_index_mapping(self):
return {h: i for i, h in enumerate(self.headers) if h is not None}
@cached_property
def index_header_mapping(self):
return {i: h for h, i in self.header_index_mapping.items()}
def __getitem__(self, at_index):
if not isinstance(at_index, int):
raise TypeError('table indexing only accepts integers')
return self.Row(self, at_index, logger=self.logger)
def row(self, **filters):
return list(self.rows(**filters))[0]
def __iter__(self):
return self.rows()
def _get_number_preceeding_rows(self, row_el):
"""This is a sort of trick that helps us remove stale element errors.
We know that correct tables only have ``<tr>`` elements next to each other. We do not want
to pass around webelements because they can get stale. Therefore this trick will give us the
number of elements that precede this element, effectively giving us the index of the row.
How simple.
"""
return self.browser.execute_script(
"""\
var prev = []; var element = arguments[0];
while (element.previousElementSibling)
prev.push(element = element.previousElementSibling);
return prev.length;
""", row_el)
def rows(self, **filters):
if not filters:
return self._all_rows()
else:
return self._filtered_rows(**filters)
def _all_rows(self):
for row_pos in range(len(self.browser.elements(self.ROWS, parent=self))):
yield self.Row(self, row_pos, logger=self.logger)
def _filtered_rows(self, **filters):
# Pre-process the filters
processed_filters = defaultdict(list)
for filter_column, filter_value in six.iteritems(filters):
if '__' in filter_column:
column, method = filter_column.rsplit('__', 1)
else:
column = filter_column
method = None
column_index = self.header_index_mapping[self.attributized_headers[column]]
processed_filters[column_index].append((method, filter_value))
# Build the query
query_parts = []
for column_index, matchers in six.iteritems(processed_filters):
col_query_parts = []
for method, value in matchers:
if method is None:
# equals
q = 'normalize-space(.)=normalize-space({})'.format(quote(value))
elif method == 'contains':
# in
q = 'contains(normalize-space(.), normalize-space({}))'.format(quote(value))
elif method == 'startswith':
# starts with
q = 'starts-with(normalize-space(.), normalize-space({}))'.format(quote(value))
elif method == 'endswith':
# ends with
# This needs to be faked since selenium does not support this feature.
q = (
'substring(normalize-space(.), '
'string-length(normalize-space(.)) - string-length({0}) + 1)={0}').format(
'normalize-space({})'.format(quote(value)))
else:
raise ValueError('Unknown method {}'.format(method))
col_query_parts.append(q)
query_parts.append(
'./td[{}][{}]'.format(column_index + 1, ' and '.join(col_query_parts)))
query = './/tr[{}]'.format(' and '.join(query_parts))
for row_element in self.browser.elements(query, parent=self):
yield self.Row(self, self._get_number_preceeding_rows(row_element), logger=self.logger)
|
[
"six.iterkeys",
"inspect.isclass",
"copy.copy",
"collections.defaultdict",
"smartloc.Locator",
"wait_for.wait_for",
"six.wraps",
"six.iteritems",
"six.with_metaclass"
] |
[((5075, 5118), 'six.with_metaclass', 'six.with_metaclass', (['WidgetMetaclass', 'object'], {}), '(WidgetMetaclass, object)\n', (5093, 5118), False, 'import six\n'), ((11697, 11738), 'six.with_metaclass', 'six.with_metaclass', (['ViewMetaclass', 'Widget'], {}), '(ViewMetaclass, Widget)\n', (11715, 11738), False, 'import six\n'), ((965, 982), 'six.wraps', 'six.wraps', (['method'], {}), '(method)\n', (974, 982), False, 'import six\n'), ((4565, 4585), 'six.iteritems', 'six.iteritems', (['attrs'], {}), '(attrs)\n', (4578, 4585), False, 'import six\n'), ((8540, 8604), 'wait_for.wait_for', 'wait_for', (['(lambda : self.is_displayed)'], {'timeout': 'timeout', 'delay': '(0.2)'}), '(lambda : self.is_displayed, timeout=timeout, delay=0.2)\n', (8548, 8604), False, 'from wait_for import wait_for\n'), ((10756, 10776), 'six.iteritems', 'six.iteritems', (['attrs'], {}), '(attrs)\n', (10769, 10776), False, 'import six\n'), ((27971, 27988), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (27982, 27988), False, 'from collections import defaultdict\n'), ((28032, 28054), 'six.iteritems', 'six.iteritems', (['filters'], {}), '(filters)\n', (28045, 28054), False, 'import six\n'), ((28497, 28529), 'six.iteritems', 'six.iteritems', (['processed_filters'], {}), '(processed_filters)\n', (28510, 28529), False, 'import six\n'), ((1904, 1921), 'copy.copy', 'copy', (['self.kwargs'], {}), '(self.kwargs)\n', (1908, 1921), False, 'from copy import copy\n'), ((3448, 3496), 'six.iterkeys', 'six.iterkeys', (['self._widget.browser.extra_objects'], {}), '(self._widget.browser.extra_objects)\n', (3460, 3496), False, 'import six\n'), ((11457, 11483), 'smartloc.Locator', 'Locator', (["new_attrs['ROOT']"], {}), "(new_attrs['ROOT'])\n", (11464, 11483), False, 'from smartloc import Locator\n'), ((10793, 10815), 'inspect.isclass', 'inspect.isclass', (['value'], {}), '(value)\n', (10808, 10815), False, 'import inspect\n')]
|
#!/usr/bin/env python3
from distutils.core import setup
import setuptools
setup(
name='kblab-client',
version='0.0.16a0',
description='KB lab client',
author='<NAME>',
author_email='<EMAIL>',
url="https://github.com/kungbib/kblab",
install_requires = [
'requests',
'pyyaml',
'lxml',
'htfile'
],
packages=[ 'kblab' ],
include_package_data = True
)
|
[
"distutils.core.setup"
] |
[((76, 351), 'distutils.core.setup', 'setup', ([], {'name': '"""kblab-client"""', 'version': '"""0.0.16a0"""', 'description': '"""KB lab client"""', 'author': '"""<NAME>"""', 'author_email': '"""<EMAIL>"""', 'url': '"""https://github.com/kungbib/kblab"""', 'install_requires': "['requests', 'pyyaml', 'lxml', 'htfile']", 'packages': "['kblab']", 'include_package_data': '(True)'}), "(name='kblab-client', version='0.0.16a0', description='KB lab client',\n author='<NAME>', author_email='<EMAIL>', url=\n 'https://github.com/kungbib/kblab', install_requires=['requests',\n 'pyyaml', 'lxml', 'htfile'], packages=['kblab'], include_package_data=True)\n", (81, 351), False, 'from distutils.core import setup\n')]
|
import inspect
from discord.ext import commands
class PromptCancelled(Exception):
pass
class Context(commands.Context):
def get_config(self):
# Default Config
config = {'prefix': 'm!', 'giveawayrole': None}
if not self.guild:
return config
config.update(self.bot.guild_config.get(self.guild.id, {}))
return config
async def update_config(self, data):
await self.bot.db.guilds.update_one(
{"guild": self.guild.id},
{"$set": data},
upsert=True
)
config = self.bot.guild_config.get(self.guild.id, {})
config.update(data)
self.bot.guild_config[self.guild.id] = config
async def prompt(self, question, *, converter=str, timeout=60):
def check(msg):
return msg.author == self.author and msg.channel == self.channel
await self.send(question)
while True:
res = await self.bot.wait_for('message', timeout=timeout, check=check)
if res.content.lower() == 'cancel':
raise PromptCancelled()
# Must handle timeout errors
try:
if inspect.isclass(converter) and issubclass(converter, commands.Converter):
return await converter().convert(self, res.content)
return converter(res.content)
except commands.BadArgument as err:
await self.send(err)
async def ask(self, question, *, timeout=60):
msg = await self.send(question)
await msg.add_reaction('✅')
await msg.add_reaction('❌')
def check(reaction, user):
return user == self.author and reaction.message.id == msg.id and str(reaction.emoji) in ['✅', '❌']
reaction, _ = await self.bot.wait_for('reaction_add', timeout=timeout, check=check)
# Must handle timeout errors
return str(reaction.emoji) == '✅'
|
[
"inspect.isclass"
] |
[((1184, 1210), 'inspect.isclass', 'inspect.isclass', (['converter'], {}), '(converter)\n', (1199, 1210), False, 'import inspect\n')]
|
#!/usr/bin/env python3
# -*- coding:utf-8 -*-
import pytest
from ..geohex import GeoHex, code2hex
# geohexのテスト座標系一覧
# 本家のgeohexテストが、経度が子午線に近い場合、東経なら西経、西経なら東経に
# 変換したもの相当を計算してしまうバグがあった頃のテストらしく、
# 以下はそれを全部修正している。
# (仮に東経を西経(-180より小さい)、西経を東経(180より大きい)としたときに
# 本家のコードになるかチェックした上で修正している)
geohex_testcases = [
['XM', (32.70505659484853, 140), 0],
['OY', (0, 0), 0],
['GI', (-11.469624951364336, -180), 0],
['QU', (11.469624951364336, -180), 0],
['GH', (-22.492949287972582, -160), 0],
['EU', (-77.56429515669195, -180), 0],
['CI', (-85.44459258668755, 120), 0],
['BV', (-85.44459258668755, -80), 0],
['Fb', (-49.88876303236152, -99.99999999999999), 0],
['SV', (67.44237810333283, -80), 0],
['TO', (84.42893127178179, -180), 0],
['TK', (81.67153816651363, -180), 0],
['bD', (83.1877023714706, -40.000000000000036), 0],
['ZA', (74.81716117480471, 119.99999999999997), 0],
['aX', (77.56429515669195, 19.99999999999999), 0],
['PZ', (41.870676400014105, 40), 0],
['OK', (-49.88876303236152, 60), 0],
['CZ3', (-78.36652525170464, -13.333333333333325), 1],
['CI8', (-84.79037346855579, 120), 1],
['DO0', (-85.12842640497867, -180), 1],
['EX2', (-74.81716117480471, -173.33333333333337), 1],
['LK1', (-78.36652525170464, 173.33333333333337), 1], # 本家だとEU1
['OK4', (-49.88876303236154, 60), 1],
['GI3', (-15.214003464478482, -173.33333333333334), 1],
['Mc1', (-15.214003464478495, 173.33333333333337), 1], # 本家だとGI1
['QU0', (3.846110061441703, -180), 1],
['QU7', (15.214003464478509, -173.33333333333337), 1],
['GH8', (-15.214003464478509, -160), 1],
['OY0', (-7.674947601297844, 0), 1],
['Fb3', (-52.305221672747535, -93.33333333333333), 1],
['XM4', (32.70505659484853, 140), 1],
['PZ0', (35.884127591325736, 40), 1],
['SW6', (71.48066686818822, -86.66666666666666), 1],
['SV3', (65.9192675132544, -73.33333333333333), 1],
['aX0', (75.79318460548843, 20.000000000000018), 1],
['aX8', (79.11767176735405, 20.000000000000018), 1],
['aZ2', (81.67153816651366, -33.33333333333335), 1],
['TO4', (84.42893127178182, -180), 1],
['aB6', (83.1877023714706, 173.33333333333337), 1],
['aA1', (81.09508214542126, 173.33333333333337), 1], # 本家だとTK1
['TK3', (81.09508214542126, -173.33333333333334), 1],
['ZA4', (74.81716117480471, 120.00000000000001), 1],
['PA0', (3.8461100614416903, -60), 1],
['BV8', (-84.79037346855579, -80.00000000000003), 1],
['CI76', (-85.12842640497867, 131.1111111111111), 2],
['CI55', (-85.01824615280158, 111.11111111111113), 2],
['DO07', (-85.01824615280158, -177.77777777777774), 2],
['KE05', (-85.01824615280158, 177.77777777777774), 2], # 本家だとDO05
['EU06', (-79.11767176735404, -175.55555555555554), 2],
['LK13', (-78.62242518007724, 175.55555555555554), 2], # 本家だとEU13
['GH32', (-26.001626863364294, -157.77777777777777), 2],
['OK51', (-48.20715710294776, 51.1111111111111), 2],
['GI47', (-10.20954347278331, -177.77777777777774), 2],
['Mc53', (-8.944451331547656, 175.55555555555554), 2], # 本家だとGI53
['QU08', (6.40164208700373, -180), 2],
['OC58', (6.401642087003718, -166.66666666666666), 2],
['XL60', (20.10241692335465, 173.33333333333334), 2],
['XM56', (35.884127591325765, 137.77777777777777), 2],
['PS62', (22.492949287972593, 128.88888888888889), 2],
['XX03', (43.75284033840225, 142.22222222222223), 2],
['PZ38', (40.9081556333403, 46.66666666666667), 2],
['PZ04', (35.884127591325736, 40), 2],
['PZ18', (40.9081556333403, 33.33333333333333), 2],
['OU88', (-12.72410877326099, 0), 2],
['OY53', (2.5651438499496004, -4.444444444444445), 2],
['PA04', (3.846110061441703, -60), 2],
['Fb17', (-51.51375829311735, -104.44444444444444), 2],
['Fb34', (-52.305221672747535, -93.33333333333333), 2],
['Fb86', (-44.67241139341943, -95.55555555555556), 2],
['GH58', (-16.448311865961884, -166.66666666666666), 2],
['TK63', (81.48362471150257, -164.44444444444443), 2],
['TO07', (83.77015010087217, -177.77777777777774), 2],
['aE42', (84.42893127178182, 175.55555555555554), 2], # 本家だとTO42
['aA02', (80.47907085748827, 175.55555555555554), 2], # 本家だとTK02
['bD31', (82.55108111056764, -35.555555555555536), 2],
['bE73', (84.67256857383902, -51.11111111111112), 2],
['bb33', (85.0182461528016, 88.88888888888887), 2],
['TO84', (85.12842640497868, -180), 2],
['TO47', (84.55210762141155, -177.77777777777774), 2],
['aB61', (83.03381226166323, 171.11111111111111), 2],
['ZA08', (73.41342238844585, 120.00000000000003), 2],
['aX55', (78.62242518007724, 11.111111111111107), 2],
['aX04', (75.79318460548843, 19.99999999999999), 2],
['YF58', (76.40945269309518, 33.33333333333335), 2],
['SV40', (66.43743994246994, -80), 2],
['RX15', (48.20715710294777, -108.88888888888889), 2],
['OU40', (-24.842889690285062, 0), 2],
['BV56', (-85.12842640497867, -82.22222222222224), 2],
['CI771', (-85.055246359875, 128.14814814814815), 3],
['KE045', (-85.09197243860311, 179.25925925925932), 3], # 本家だとDO045
['KE056', (-85.01824615280158, 179.25925925925924), 3], # 本家だとDO056
['DO077', (-84.9809697867981, -177.03703703703704), 3],
['BV778', (-84.9434152165899, -71.11111111111111), 3],
['BV568', (-85.055246359875, -82.22222222222224), 3],
['CZ335', (-78.53774837153644, -11.851851851851855), 3],
['LK570', (-76.60898893146053, 175.55555555555554), 3], # 本家だとEU570
['EU868', (-75.58174046495692, -175.55555555555554), 3],
['OK468', (-49.33454485016383, 64.44444444444446), 3],
['OY443', (-0.4276628949350651, 0.7407407407407408), 3],
['OY004', (-10.209543472783297, 0), 3],
['Fb526', (-47.34497008204646, -109.62962962962963), 3],
['GH588', (-15.626274156012219, -166.66666666666666), 3],
['GI440', (-12.306605159163768, -180), 3],
['GI622', (-11.469624951364336, -172.59259259259258), 3],
['QU340', (6.826461637179052, -173.33333333333334), 3],
['XK026', (3.8461100614416903, 177.037037037037), 3], # 本家だとQU026
['XM454', (33.77808654808929, 137.77777777777777), 3],
['PS624', (22.492949287972593, 128.88888888888889), 3],
['XX038', (44.36748324231291, 142.22222222222223), 3],
['XU630', (40.25857226491307, 135.55555555555557), 3],
['PZ342', (38.940540734971115, 45.18518518518519), 3],
['PZ580', (45.87616409528786, 33.33333333333332), 3],
['OY864', (7.674947601297857, 4.444444444444445), 3],
['OY145', (-3.419303491595713, -7.4074074074074066), 3],
['PA032', (2.5651438499496133, -59.25925925925927), 3],
['OI758', (27.90753234300438, -115.55555555555554), 3],
['SV428', (67.7682415504757, -84.44444444444446), 3],
['SV385', (67.11199113847319, -74.07407407407408), 3],
['aX355', (77.09532905324006, 23.70370370370368), 3],
['aX568', (78.53774837153645, 17.777777777777768), 3],
['bD744', (83.62936132439059, -33.33333333333335), 3],
['bD515', (83.53374630039932, -49.629629629629626), 3],
['bb337', (85.05524635987501, 89.62962962962966), 3],
['TO808', (84.98096978679811, -180), 3],
['aE428', (84.51135351620655, 175.55555555555554), 3], # 本家だとTO428
['TO474', (84.55210762141155, -177.7777777777778), 3],
['TO073', (83.72356825638371, -177.03703703703704), 3],
['aB648', (83.28841126938276, 173.33333333333334), 3],
['YG168', (70.50590211456444, 97.77777777777777), 3],
['BV7511', (-85.06751874496823, -76.54320987654322), 4],
['BV7541', (-85.03061013890785, -75.80246913580245), 4],
['KE0532', (-85.055246359875, 178.02469135802468), 4], # 本家だとDO0532
['DO3203', (-84.88020043270879, -177.53086419753086), 4],
['EU6235', (-77.6255431823326, -170.6172839506173), 4],
['LK1425', (-78.3377438281995, 171.60493827160496), 4], # 本家だとEU1425
['OK7172', (-47.92134973633546, 64.69135802469134), 4],
['CZ0737', (-78.92765772010968, -16.790123456790113), 4],
['Fb0200', (-55.251983655277314, -104.44444444444443), 4],
['Fb8608', (-45.07649961065172, -95.55555555555556), 4],
['GH4405', (-23.149924898842006, -160.2469135802469), 4],
['Mc2651', (-11.190070126580682, 170.12345679012347), 4], # 本家だとGI2651
['OC2121', (-1.4254091619935396, -177.28395061728392), 4],
['QU7871', (17.946266278655553, -172.83950617283952), 4],
['XK4212', (11.050191029226413, 174.32098765432102), 4], # 本家だとQU4212
['OK4362', (-50.70830056151141, 63.20987654320988), 4],
['OX2538', (-10.34981081126957, 5.185185185185185), 4],
['OY5663', (3.7038637783779045, -0.4938271604938276), 4],
['OG6663', (-0.14255547494137086, -60.49382716049383), 4],
['PZ0631', (35.420785913410434, 44.938271604938265), 4],
['SV8518', (70.60082366755294, -82.96296296296296), 4],
['TK3246', (81.09508214542126, -177.28395061728395), 4],
['aA1825', (81.5047101156331, 171.60493827160494), 4], # 本家だとTK1825
['aB6727', (83.35472580926526, 174.32098765432096), 4],
['TR1202', (85.05524635987501, -171.60493827160494), 4],
['bD4054', (82.92930196274219, -40.74074074074073), 4],
['aX4486', (77.74714967960598, 20.493827160493833), 4],
['ZA5685', (76.03603270206148, 117.53086419753089), 4],
['XM4881', (35.420785913410434, 139.7530864197531), 4],
['XU6302', (40.25857226491307, 135.06172839506175), 4],
['XX0337', (43.546541711503515, 143.2098765432099), 4],
['XM6425', (32.82493120891863, 151.60493827160496), 4],
['PS6270', (22.62459744062219, 129.62962962962962), 4],
['BV80302', (-85.07976074632226, -79.42386831275724), 5],
['BV80373', (-85.04704785300429, -78.93004115226337), 5],
['DO08354', (-84.93083484773156, -179.50617283950612), 5],
['KE05758', (-84.9601406144117, 178.27160493827157), 5], # 本家だとDO05758
['EU31135', (-78.72508109998212, -176.13168724279834), 5],
['LK45042', (-77.47186327705677, 177.6131687242798), 5], # 本家だとEU45042
['CZ35087', (-78.21221081913176, -15.47325102880659), 5],
['OK06445', (-54.569097632502626, 64.36213991769546), 5],
['Fb80457', (-46.33739563508728, -100.16460905349794), 5],
['GH80422', (-17.674819831740678, -160.65843621399173), 5],
['GI74753', (-7.156617561633091, -172.75720164609046), 5],
['Mc80240', (-6.496077810333859, 178.51851851851848), 5], # 本家だとGI80240
['XK01265', (2.612613889744312, 176.70781893004116), 5], # 本家だとQU01265
['QU86344', (18.487906116222756, -174.8148148148148), 5],
['OY77334', (4.556967978746024, 9.87654320987654), 5],
['OY15454', (-2.422723288518337, -9.1358024691358), 5],
['PA04170', (3.466736260749132, -60.493827160493815), 5],
['PS62113', (21.921032689851973, 127.98353909465017), 5],
['PS68342', (24.454191557293267, 133.90946502057614), 5],
['XM78145', (37.63518586440209, 145.84362139917695), 5],
['XM48257', (34.99370111793707, 138.35390946502056), 5],
['XX03156', (43.546541711503515, 141.3991769547325), 5],
['PZ47685', (43.062423615808804, 43.62139917695472), 5],
['SV40654', (66.49436163127307, -78.7654320987654), 5],
['OI71873', (25.702285927321487, -115.22633744855965), 5],
['aX40264', (76.99947013952956, 19.012345679012363), 5],
['ZA44552', (74.96578743743133, 118.84773662551439), 5],
['bD47117', (83.27729497862227, -38.68312757201646), 5],
['aA51615', (82.02192059895748, 172.26337448559673), 5], # 本家だとTK51615
['TK71615', (82.02192059895748, -174.40329218106993), 5],
['TO38458', (84.32651565973765, -173.58024691358025), 5],
['aE58484', (85.04294351619683, 173.33333333333337), 5], # 本家だとTO58484
['BV553581', (-85.03609541431689, -88.4224965706447), 6],
['CI802417', (-85.02099635433396, 118.46364883401921), 6],
['CI750110', (-85.11090833582553, 124.11522633744853), 6],
['KE018862', (-85.14051760404581, 177.8875171467764), 6], # 本家だとDO018862
['KE053505', (-85.04978219412602, 178.24417009602195), 6], # 本家だとDO053505
['DO074664', (-85.0182461528016, -177.1193415637861), 6],
['CZ351822', (-78.13429857495768, -16.51577503429356), 6],
['LK412420', (-77.84425402866793, 176.13168724279836), 6], # 本家だとEU412420
['EU708101', (-77.15180611673658, -173.60768175582993), 6],
['OK847038', (-44.58222868649629, 60.82304526748971), 6],
['GH501658', (-21.626844357539934, -166.99588477366254), 6],
['Mc842644', (-3.8461100614416903, 179.01234567901233), 6], # 本家だとGI842644
['GI836575', (-4.9200310051179015, -176.48834019204392), 6],
['QU316588', (6.669163764467287, -174.32098765432102), 6],
['XK175485', (9.476049692387006, 174.78737997256516), 6], # 本家だとQU175485
['Fb711277', (-48.449371535419765, -96.68038408779151), 6],
['OY728344', (4.556967978746024, 2.469135802469136), 6],
['PA016372', (2.470198516809545, -60.466392318244175), 6],
['QU408880', (10.131590386552373, -180), 6],
['XM442337', (32.55832421354656, 138.87517146776406), 6],
['PS387785', (21.935726236625015, 127.62688614540467), 6],
['XX037037', (43.83287620143813, 143.07270233196158), 6],
['PZ173676', (39.63929461614907, 36.927297668038406), 6],
['SV321726', (65.8026708623787, -78.38134430727023), 6],
['PC828583', (39.773338334546146, -104.6639231824417), 6],
['OI713375', (24.382077991931016, -114.51303155006859), 6],
['bD080050', (82.4038568826114, -40.082304526749), 6],
['aX817100', (78.90329568451087, 18.271604938271615), 6],
['ZA712478', (75.49471609617443, 123.04526748971193), 6],
['aA166315', (81.06807090685717, 179.39643347050756), 6], # 本家だとTK166315 # noqa
['TK873482', (82.83509514206584, -177.09190672153633), 6],
['TO433402', (84.2508548924535, -177.09190672153633), 6],
['aE586342', (85.00585148249067, 175.00685871056243), 6], # 本家だとTO586342 # noqa
['bb335332', (85.03883578676745, 88.42249657064474), 6],
['BV8032788', (-85.04750368112704, -79.67078189300406), 7],
['CI7714524', (-85.05114879997076, 128.01097393689983), 7],
['KE0514686', (-85.05251502947527, 177.21993598536804), 7], # 本家だとDO0514686 # noqa
['KE0517300', (-85.05069330650984, 177.3662551440329), 7], # 本家だとDO0517300 # noqa
['DO0732687', (-85.05205966144965, -177.35711019661636), 7],
['DO0732768', (-85.05023777122953, -177.39368998628257), 7],
['EU4031833', (-78.20681658015287, -179.46959304983992), 7],
['EU8072131', (-76.32483001774261, -179.81710105166894), 7],
['LK8255485', (-75.6434006236876, 174.55875628715137), 7], # 本家だとEU8255485 # noqa
['CZ3800810', (-78.06134107808937, -13.360768175582997), 7],
['OK1624776', (-52.26646412714779, 56.424325560128025), 7],
['OK7464515', (-47.31992145629309, 68.02926383173298), 7],
['Fb5316746', (-48.45987650641166, -104.59076360310927), 7],
['Fb4033527', (-51.834628009147735, -99.14037494284406), 7],
['GH4540858', (-21.454956946828297, -162.24965706447188), 7],
['GI4351804', (-12.383971425939663, -178.76543209876544), 7],
['GI8387484', (-4.098933172479513, -177.53086419753086), 7],
['QU0706771', (4.330617601639888, -177.18335619570186), 7],
['QU4884047', (14.714128073890608, -179.99085505258344), 7],
['XK8251271', (19.172525248647407, 174.42158207590305), 7], # 本家だとQU8251271 # noqa
['PA0565644', (5.267123667292918, -60.82304526748971), 7],
['OY8320288', (6.16022759188225, 0.576131687242797), 7],
['OY3556486', (-2.106180802162651, 4.215820759030636), 7],
['PZ8016814', (45.333181571987915, 39.725651577503434), 7],
['PZ1746604', (39.907121529314956, 36.21399176954731), 7],
['PS6242083', (22.42951839128782, 128.4042066758116), 7],
['XM5643552', (35.819935413610864, 137.89666209419295), 7],
['XU6303820', (40.214236183804, 135.7475994513032), 7],
['XM7851800', (38.20164704742394, 145.67901234567904), 7],
['XX0345653', (43.86333785879606, 142.12162780064014), 7],
['XM6306347', (30.851511008998052, 156.14083219021487), 7],
['XK8827707', (21.454956946828297, 178.85688157293094), 7], # 本家だとQU8827707 # noqa
['PZ4253332', (42.03559230187854, 35.15317786922726), 7],
['OI8776718', (30.92400736452902, -116.4883401920439), 7],
['PF2148656', (41.135210942868746, -95.40009144947416), 7],
['ZA4587731', (75.40983806367858, 118.12528577960678), 7],
['SV4335020', (66.7915979463581, -77.33882030178327), 7],
['SV5843218', (69.72847884541945, -86.61179698216738), 7],
['aX0682573', (76.01563273022674, 23.90489254686786), 7],
['aX8100870', (78.67023811301752, 17.80521262002741), 7],
['bD4424804', (83.19521359504485, -41.48148148148148), 7],
['bE6100113', (84.18168672552544, -48.98948331047094), 7],
['aA5027217', (81.87401287786506, 171.91586648376767), 7], # 本家だとTK5027217 # noqa
['TK7070414', (81.87326653740467, -172.62002743484223), 7],
['TO6240542', (84.40581802575272, -171.21170553269317), 7],
['TO7775738', (84.95967676595735, -170.50754458161862), 7],
['aE5821675', (85.00768966906308, 171.78783721993594), 7], # 本家だとTO5821675 # noqa
['bb3371862', (85.05114879997076, 89.41929583904891), 7],
['BV54870250', (-85.05099697346348, -86.48376771833561), 8],
['BV78253862', (-84.89431593312257, -74.9672306050907), 8],
['CI57758513', (-84.95936750999883, 116.01585124218866), 8],
['CI75600226', (-85.05114879997076, 125.85886297820451), 8],
['KE05603615', (-85.04735174306921, 179.38424020728547), 8], # 本家だとDO05603615 # noqa
['DO07152160', (-85.04461606279176, -178.93918609967997), 8],
['LK41063674', (-78.0252444701795, 178.41792409693645), 8], # 本家だとEU41063674 # noqa
['EU72201265', (-76.78353577417859, -179.38119189147997), 8],
['CZ34762511', (-78.27818254539632, -12.30300259106843), 8],
['Fb38241503', (-50.73615525197652, -94.92150586800793), 8],
['OK48511367', (-48.05798249478465, 58.97881420515164), 8],
['GI32042116', (-16.05801314654258, -177.9728699893309), 8],
['GI71565064', (-8.50607873048697, -175.86648376771834), 8],
['GH45442518', (-21.289426163285537, -162.42341106538635), 8],
['OY33228746', (-5.014711132428624, 6.947111720774272), 8],
['PA03278283', (2.8112649508743917, -59.06416704770615), 8],
['XK42174351', (11.177984463616795, 175.0769699740893), 8], # 本家だとQU42174351 # noqa
['QU47122108', (12.28425065422217, -179.20438957475992), 8],
['XM32230826', (29.229032433819672, 140.97546105776559), 8],
['XM56301016', (35.235560418824775, 138.43316567596403), 8],
['PS61272751', (21.453318946417433, 129.7271757354062), 8],
['XM60832123', (31.117062330967663, 153.37296143880505), 8],
['XU63124187', (40.581479285547495, 134.29660112787687), 8],
['XX03173236', (43.508260508407346, 141.77107148300564), 8],
['PZ38755703', (41.37601173942487, 47.1086724584667), 8],
['PZ57757416', (46.004671100980865, 36.12863892699283), 8],
['Fc51504013', (-39.85174798739916, -129.63572626124065), 8],
['OW22586402', (0.7022008550131252, -58.3600060966316), 8],
['PC52458806', (36.09345953973502, -111.351928059747), 8],
['SV20852101', (66.82209536122706, -93.77533912513334), 8],
['aX46486040', (77.61799779414096, 24.60905349794241), 8],
['ZA62754057', (74.95939479445362, 129.37661941777165), 8],
['bD28367162', (83.44032235192313, -52.03170248437738), 8],
['aA05722747', (80.76072843510799, 177.89056546258192), 8], # 本家だとTK05722747 # noqa
['TK83151057', (82.50462047799635, -178.85383325712547), 8],
['TO31130720', (83.83815019303304, -176.04023776863284), 8],
['aE51331450', (84.61506920309282, 172.0073159579332), 8], # 本家だとTO51331450 # noqa
['bb33488116', (85.04932657519551, 88.85840573083371), 8],
['BV804240512', (-85.02069085118825, -80.5080526342529), 9],
['CI708565326', (-85.25892235752575, 126.56200782401055), 9],
['CI754848748', (-84.99009715358707, 124.453589391861), 9],
['KE055580431', (-84.96312883886038, 176.7921556673271), 9], # 本家DO055580431 # noqa
['KE053480025', (-85.05064269354392, 178.51140578163896), 9], # 本家DO053480025 # noqa
['DO073506567', (-85.05064269354392, -177.23111314332166), 9],
['CZ347013544', (-78.34947806021368, -12.656607224508448), 9],
['LK413474586', (-77.91557130594418, 178.59371030838793), 9], # 本家EU413474586 # noqa
['EU354580674', (-78.0620691964047, -175.78113092516384), 9],
['OK474555348', (-48.92264328551308, 61.87471422039323), 9],
['Fb484868583', (-47.990103953997306, -99.8435197886501), 9],
['GH147081657', (-25.800037734013387, -165.93710308387952), 9],
['GI730641802', (-9.797044998389058, -170.64675100340395), 9],
['Mc576514101', (-6.316518377294901, 176.6976578773561), 9], # 本家GI576514101 # noqa
['OY041035522', (-8.406852481197332, -0.7031448458060258), 9],
['OY537117080', (2.810093065032651, -4.005486968449931), 9],
['PA402570238', (9.100307229345306, -61.66133211400701), 9],
['XK058877078', (6.315352201442765, 177.89056546258192), 9], # 本家QU058877078 # noqa
['QU832828267', (17.97695943266528, -179.43504547071078), 9],
['PZ382714446', (40.979941869938415, 45.35182644922014), 9],
['PZ028668414', (36.5981065089872, 36.21094345374179), 9],
['PS383657231', (20.96054525607148, 127.83010719910583), 9],
['XM602176827', (30.447757019883404, 151.73703195651072), 9],
['XM482452815', (34.88453418992268, 138.3772798861962), 9],
['XX033123366', (43.324172591538805, 142.59614896103236), 9],
['OI860616105', (28.611823502104176, -115.09932428999643), 9],
['RX114077466', (46.315519731164876, -108.77102067774221), 9],
['SV355072242', (66.51330668794105, -76.28918355941674), 9],
['YG413103084', (70.84427362032683, 98.29903978052126), 9],
['ZA486483578', (75.49677307800924, 121.5028196921201), 9],
['aX426174078', (77.54216784091535, 16.87547629934462), 9],
['aX747576237', (78.49020322752219, 27.28344256464972), 9],
['aA184816376', (81.51821910339622, 173.32012396484274), 9], # 本家TK184816376 # noqa
['TO310523631', (83.829769600284, -175.91932124168065), 9],
['aE423806423', (84.40576083787137, 176.34608545445306), 9], # 本家TO423806423 # noqa
['bb303856635', (84.89703001315176, 87.40029467052783), 9],
['aE815178640', (85.0496809490896, 176.89071787837224), 9], # 本家TO815178640 # noqa
['SV1700305142', (65.94654074919876, -84.37501058442986), 10],
['BV5482764471', (-85.05111506114845, -87.02264221239982), 10],
['BV7560068257', (-85.05017028096445, -74.02597842469812), 10],
['CI5263615540', (-85.14129611826941, 110.74226489864347), 10],
['CI7265571616', (-85.1114082947234, 123.39853342139577), 10],
['CI7505181555', (-85.08135383503867, 124.10167826720179), 10],
['CI5736546251', (-85.05111506114845, 116.71865738623856), 10],
# DO0273028158
['KE0273028158', (-85.11142495914162, 176.48427577096982), 10],
# DO0536546500
['KE0536546500', (-85.05113193058828, 178.94528273129094), 10],
['DO0716764500', (-85.05113193058828, -177.89056546258197), 10],
# EU4016113514
['LK4016113514', (-78.20657677972486, 179.64842757709698), 10],
# EU4827777232
['LK4827777232', (-76.95200706545712, 178.87855848532573), 10],
['EU7445272188', (-76.67125372443621, -173.73875933546717), 10],
['OK1777354303', (-51.1793357370083, 56.6014665786042), 10],
['CZ3420476772', (-78.4202281335297, -14.765703060170377), 10],
['Fb4334484641', (-50.95852020163508, -97.03127910718219), 10],
['GH5856584625', (-15.961399068408907, -166.9921590543447), 10],
# GI1730824803
['Mc1730824803', (-14.569012270261892, 176.24176531355312), 10],
['GI7234521114', (-8.059072046057983, -177.18742061677588), 10],
['QU3851424320', (10.487900363316632, -174.3748412335518), 10],
# QU8152020110
['XK8152020110', (17.97881945245889, 176.48427577096982), 10],
['PA0426257051', (3.8640599125684467, -61.17190807634337), 10],
['OY4244664644', (0, -4.218869074836153), 10],
['OY7353226324', (2.848178749868688, 8.194888990499416), 10],
['OY3444432334', (-3.864255017562445, 6.679876035157243), 10],
# QU8136543688
['XK8136543688', (17.308706606781136, 178.94528273129094), 10],
['QU8831183154', (20.749483042625016, -179.58339683991258), 10],
['PS6206841033', (21.77982496043273, 129.37492590899083), 10],
['XM0565771230', (27.326562730623966, 139.10785957425188), 10],
['XM4863207510', (34.69969443229734, 141.56886653457298), 10],
['XX0330807788', (43.28817039237169, 142.97515622618502), 10],
['XM6054354111', (30.858393806487317, 152.6430591542617), 10],
['PZ1548687734', (40.17883606392535, 31.28892953309963), 10],
['PZ7555070851', (45.91563178433442, 43.482870158681784), 10],
['OI7581576564', (27.839078765848246, -115.84006503073718), 10],
['RX1535370770', (47.989842206759874, -108.28125793832243), 10],
['SV7031782060', (67.74277620854204, -72.77345933038661), 10],
['SW6431665682', (71.30076217120532, -85.95708648749344), 10],
['aX1617612856', (76.63925321468737, 17.402496231942973), 10],
['aX7255422107', (78.48040532816346, 21.158698707852803), 10],
['ZA7323011017', (75.40885284001867, 127.61706379447577), 10],
# TK0130411728
['aA0130411728', (80.13997185555716, 178.4829548341208), 10],
['TK8071386820', (82.4189123646549, -179.4076106284611), 10],
['TO4434206021', (84.38379583319873, -179.40794933021724), 10],
['bb3372146218', (85.05113193058828, 89.06467510034044), 10],
['BV57264048634', (-85.02070782407576, -85.42972785313896), 11],
['CI57246068382', (-85.02070782407576, 114.25776332650283), 11],
# DO05443747884
['KE05443747884', (-85.02070782407576, 177.89056546258192), 11],
['DO04773823048', (-85.08135942391766, -178.94528273129097), 11],
['DO04773823048', (-85.08135942391766, -178.94528273129097), 11],
['DO07200264504', (-85.05113193058828, -179.29685515419396), 11],
['CZ34837816828', (-78.20655013493614, -13.007840945655294), 11],
# EU18781747622
['LK18781747622', (-77.69287316941559, 174.0233817112342), 11],
['EU47467037811', (-77.28205417737901, -177.18990442965446), 11],
['Fb05827021141', (-53.33088399648074, -102.65621207245964), 11],
['OK72311284586', (-47.75412569895254, 62.57808486737005), 11],
['GH51383310718', (-20.30343065964378, -168.04687632305374), 11],
['GI47416263465', (-10.351477994498591, -177.89304927546053), 11],
['GI87045185255', (-3.375121887678373, -177.89304927546053), 11],
['QU07002763476', (4.002573029443376, -177.89304927546053), 11],
# QU85663663436
['XK85663663436', (20.055949967827992, 179.91216334456695), 11],
# QU84201138888
['XK84201138888', (18.562938073608677, 178.41792409693645), 11],
['PS62147032100', (22.106011968040356, 128.23248488543416), 11],
['XM30178015132', (26.947715856243768, 146.15974303826766), 11],
['XM60054311737', (29.884656543260686, 153.10312903972408), 11],
['XM44560457183', (32.99019885298108, 139.74608658345892), 11],
['XM56456700534', (36.00128269884677, 137.72223068976615), 11],
['XX03131786872', (43.3251683966725, 141.67973490942552), 11],
['PZ16386410216', (38.822566699114795, 38.67183751347751), 11],
['OI78178446320', (28.11665750014962, -113.82072516045996), 11],
['RX13217487787', (46.412184656537676, -106.08635765776445), 11],
['SV40163034106', (66.23147555689933, -80.15625441017913), 11],
['aX41314430862', (77.15715356553575, 18.281201488029684), 11],
['ZA47238521110', (75.14076576175513, 120.93752646107471), 11],
# TK11857485652
['aA11857485652', (81.06017353309196, 170.9449214494177), 11],
['TK43222807032', (81.48680964730961, -179.91453425686015), 11],
['TO03574622544', (83.54984670370604, -178.242137885485), 11],
# TO54114446763
['aE54114446763', (84.73838771967266, 172.35358205332295), 11],
['bb33540877533', (85.05041774078212, 88.15977690844329), 11],
['BV803254745602', (-85.0506539409143, -79.82665996789856), 12],
['CI803456424553', (-85.05112818182883, 120.70679529806696), 12],
# DO053652024200
['KE053652024200', (-85.05207465365511, 178.8683974326407), 12],
# DO053654462750
['KE053654462750', (-85.05113005620892, 178.93433137450816), 12],
['DO071704644727', (-85.05113005620892, -178.25316450932468), 12],
['CY281612610161', (-79.4452281268277, -13.70233007991481), 12],
# EU181655132036
['LK181655132036', (-77.91566682955617, 172.96873970958208), 12],
['EU832016181016', (-76.18499280856993, -179.29689278772247), 12],
['FY636801583240', (-57.364360717067285, -102.999204050873), 12],
['OK446658243285', (-49.8379914216003, 61.87501528862095), 12],
['GH058068815001', (-27.745624883854536, -162.06171522332681), 12],
# GI410587505268
['Mc410587505268', (-13.30821393540185, 177.54768638475392), 12],
['GI871735246286', (-2.881445728338333, -178.2335950745238), 12],
['QU460311054340', (10.418820497790831, -175.42109095835664), 12],
# QU533828300764
['XK533828300764', (13.855307445928876, 176.14139669314187), 12],
['OY313181386161', (-5.615990333705467, 4.921863386528326), 12],
['OY803527038734', (4.845926929147412, 0.3601528673926176), 12],
['PA043284261600', (3.513407610346112, -59.765618384731326), 12],
['PZ060785824214', (35.403520413948364, 44.65703624673294), 12],
['PS625060382624', (22.593728295005597, 128.32032154086718), 12],
['XM317558565378', (28.860201911665968, 144.85235425945686), 12],
['XL822458403751', (29.47417206661665, 153.99297382023593), 12],
['XP251226173262', (42.50140031192322, 143.09452977847025), 12],
['OI745463230145', (26.368400819214855, -113.89764809263869), 12],
['RU681285781273', (43.52956234887473, -107.92106743740135), 12],
['SV352321156833', (66.37275241126041, -76.99217787110894), 12],
['ZA412658306821', (74.49641524173614, 116.71873265329549), 12],
['aX413323833627', (77.15715839512343, 18.632811544461198), 12],
['bD023073480358', (82.11838119136951, -43.59373853353431), 12],
# TK570232153327
['aA570232153327', (82.26169903937593, 175.07813661347168), 12],
['TK800808780528', (82.29949945404425, -179.9914195555104), 12],
['TO328348003525', (84.11778702185453, -177.53045022871783), 12],
# TO563763855337
['aE563763855337', (84.76413166644133, 178.95390080930906), 12],
['bb337212207184', (85.05089012980277, 88.98101576656671), 12],
['BV8046354823266', (-85.02070782407576, -79.45312210888258), 13],
['CI5803560714155', (-84.99010031598158, 113.55468120324429), 13],
# DO0482706175341
['KE0482706175341', (-85.05397142670375, 179.60449670487097), 13],
# DO0482731186381
['KE0482731186381', (-85.05302344313857, 179.60449670487097), 13],
# DO0485035421662
['KE0485035421662', (-85.05207652767761, 179.7692813815018), 13],
# DO0485060618642
['KE0485060618642', (-85.05207652767761, 179.81323734274676), 13],
# DO0485068654767
['KE0485068654767', (-85.05018090449299, 179.81323734274676), 13],
['DO0712537663424', (-85.05207652767761, -179.04967813924787), 13],
['DO0712564265481', (-85.05112880662227, -179.0441961886017), 13],
# EU4501241604860
['LK4501241604860', (-77.50412004123892, 177.36328209528438), 13],
['EU8407181150325', (-75.97257900135345, -179.8485752259737), 13],
['CZ3061211527483', (-78.90392851824576, -12.304683555339782), 13],
['Fb4625116215430', (-49.83798675049283, -97.38281389655673), 13],
['OK7250326542858', (-47.27922833623412, 61.52344286571794), 13],
['GH4221515066426', (-22.593728295005597, -166.28906438657665), 13],
# GI4421818162227
['Mc4421818162227', (-11.523081594188353, 178.2421880635229), 13],
['GI8724851414563', (-2.460176832420581, -179.296880243213), 13],
['QU4074815410453', (9.449059554640053, -179.296880243213), 13],
# QU8810462030471
['XK8810462030471', (20.63278322541268, 179.296880243213), 13],
['PA0414822216031', (3.513422068272812, -60.820310564421405), 13],
['OY4236836048713', (-0.35155967220222734, -3.1640639945607014), 13],
['OY7001555744550', (0.3555792253075448, 6.303766551696238), 13],
['XM4117607703260', (31.353642691994537, 137.46094110164628), 13],
['PS6246856326218', (22.593721608280728, 129.3750011760478), 13],
['XL5887834871345', (29.232394838443412, 153.60845951541816), 13],
['XX0066438378463', (42.81446731196279, 142.0068957168654), 13],
['PZ4537105130820', (42.55307759789627, 38.67187514700597), 13],
['OI8337351073052', (27.997949854465656, -116.74310663523015), 13],
['RX1135780670728', (46.3193563989732, -108.3056068312381), 13],
['SV4163853373183', (66.93005958856028, -80.50781428857267), 13],
['aX4034808877353', (76.92061301619422, 20.742183359331843), 13],
['ZA4726354081160', (75.14077876301411, 121.28906125044925), 13],
['bD0853382488383', (82.5861070130518, -40.429687083483095), 13],
# TK2756770452053
['aA2756770452053', (81.92318725325978, 168.74999607984083), 13],
['TK7434850466772', (82.16644549708819, -172.61719237569804), 13],
['TO0846631354832', (83.90548470121078, -179.3212291361287), 13],
# TO4805277813220
['aE4805277813220', (84.60821834245287, 179.6240786841813), 13],
['bb3356324634667', (85.05109881644849, 88.67683650050841), 13],
['BV80717180121576', (-84.99010031598158, -79.45312629038575), 14],
['CI56436210413512', (-85.14128384950676, 118.1250014373917), 14],
['CI56773820550701', (-85.08136459879998, 118.82812537568195), 14],
['CI80611150250021', (-85.03594145592328, 121.11328340200407), 14],
# DO05167224507116
['KE05167224507116', (-85.05112880662227, 177.5390641252327), 14],
# DO05405867244763
['KE05405867244763', (-85.03594145592328, 177.71484615518105), 14],
['DO07170215584802', (-85.05136559766956, -178.33969235426784), 14],
['DO07170226100057', (-85.05124730768841, -178.33969235426784), 14],
['DO07170227640163', (-85.05101030272562, -178.33969235426784), 14],
# EU16473602511456
['LK16473602511456', (-78.34941078146068, 178.15429704854873), 14],
['EU40673078217161', (-78.09010038610904, -178.18243020182655), 14],
['CZ31831222362536', (-78.49055150369068, -15.468751731403659), 14],
['Fb31200035818717', (-53.33087390431171, -97.03124983666004), 14],
['OK44243402806508', (-49.92565546027554, 58.594914581298774), 14],
['OY01571576207503', (-8.407170118467867, -2.8124999346640127), 14],
['OY56733420402656', (4.079240169372639, -1.1707079849357165), 14],
['GH84065825033010', (-15.961327113736004, -159.60937651906173), 14],
# GI50342481388451
['Mc50342481388451', (-10.621567107054151, 173.9074160840265), 14],
['GI78081171860100', (-5.751378038946543, -173.43633630073705), 14],
['QU30541311610415', (5.480563953528032, -174.13946023902727), 14],
# QU44828780263624
['XK44828780263624', (12.421722939346756, 179.5324159533545), 14],
['PA07571045254517', (5.615985528497132, -58.35937468965406), 14],
['PS38383877824230', (21.162545692441366, 127.50116507131868), 14],
['XM31385143063872', (28.184519988899293, 145.07928861759297), 14],
['XL82447011727335', (29.416780398005393, 155.62616441795882), 14],
['XP22503400845082', (41.93183305535667, 141.56366474463874), 14],
['PZ13526267300787', (38.27268968795205, 34.45312315425837), 14],
['OI78437338044154', (28.184519988899318, -112.9675856147092), 14],
['RU68880348017456', (44.49349196120463, -106.63946180709097), 14],
['SV13411223332736', (65.3100644125332, -84.84258626806907), 14],
['aX38226176387034', (77.28260266195558, 24.844915365330607), 14],
['ZA35122807551718', (74.01954369731402, 123.04687318692636), 14],
['bD32858648748733', (82.85338216634169, -37.96875120871575), 14],
# TK18678460283758
['aA18678460283758', (81.51827215930582, 175.07812406896218), 14],
['TK35253726505866', (81.30832083644431, -177.18750006533597), 14],
['TO35083224064751', (84.11102995385924, -175.5457122971109), 14],
# TO45177603556405
['aE45177603556405', (84.52840268072897, 177.42303995698072), 14],
['bb33563401287846', (85.0501290364617, 88.7193205726401), 14],
['bb33563422636071', (85.0511285983578, 88.70249838541709), 14],
['BV801644720107680', (-85.05491832726936, -80.24414124365012), 15],
['BV801672224571144', (-85.05112880662227, -80.24414124365016), 15],
['BV752066087034602', (-85.0435409337982, -76.8164069918357), 15],
['CI584051202811030', (-84.92832089229395, 113.2031255063539), 15],
# DO053702425073344
['KE053702425073344', (-85.05112880662227, 178.70361275600987), 15],
# DO053474671871414
['KE053474671871414', (-85.05101030272562, 178.62121623619137), 15],
['DO048672464801367', (-85.05112873720078, -179.47265669782374), 15],
['DO048680771383421', (-85.04923289070342, -179.49462910310868), 15],
['CZ312460160330640', (-78.63000548385058, -16.87499960798409), 15],
# EU482446631807432
['LK482446631807432', (-76.99993502220802, 178.59374933575083), 15],
['EU443120677000135', (-77.69287042493717, -179.64843733393766), 15],
['OK486263022716030', (-48.224673018823516, 61.171874624318086), 15],
['Fb563137601173436', (-47.75409810634945, -101.60156310163555), 15],
['GH405610617770460', (-24.527135091152235, -160.3125004573519), 15],
# GI428447007470857
['Mc428447007470857', (-10.627811533740404, 175.5841166159903), 15],
['GI714676472576137', (-8.895519993960725, -174.92369558183077), 15],
['QU430846555413857', (9.655313810029215, -177.73619551649475), 15],
# QU455868523435074
['XK455868523435074', (13.453736454342776, 177.18750006533597), 15],
['XM314886135481601', (28.613458944989343, 144.49218745372033), 15],
['PS624076167125758', (22.268763871961657, 129.02343711615106), 15],
['XL828827023882162', (30.401660908330708, 155.41321858173586), 15],
['XP252183615213788', (42.771513349093496, 142.756969572665), 15],
['OY441655876848342', (-0.35155967220222734, -0.35156266606230013), 15],
['OY450583438341310', (0.6485910111569012, -2.438343213179931), 15],
['PA007023204648554', (1.4061087624421291, -59.41406268784096), 15],
['OI754343508378851', (27.01056357503122, -115.28990605347154), 15],
['RX153284780083552', (47.98992137736064, -108.63281224137839), 15],
['PZ541437131806627', (44.33956527784659, 32.695312611615634), 15],
['SV081012647253500', (65.07213005846185, -80.85937416696618), 15],
['ZA162768706170274', (73.82482041043065, 116.71874937930812), 15],
['aX538661552510561', (78.27820151012904, 16.171874275859466), 15],
['bD475537206875044', (83.40004212473188, -38.67187514700599), 15],
# TK573170435224014
['aA573170435224014', (82.30889241261167, 176.13281206714908), 15],
['TK720137660817775', (82.07002829250126, -177.89062539746055), 15],
['TO078855632751174', (83.89926648794052, -177.8680313420388), 15],
['TO448407012467243', (84.50259507567017, -179.9774059445782), 15],
# TO801157747467437
['aE801157747467437', (84.86090063565264, 178.96790605723493), 15],
['bb337184418811744', (85.05112505786047, 89.37952974397285), 15],
]
def test_geohex():
for r_code, latlng, level in geohex_testcases:
lat = latlng[0]
lng = latlng[1]
geohex = GeoHex(lat, lng, level)
assert geohex.hexes[level]['code'] == r_code
geohex = GeoHex(35.6785879, 139.772194, 8)
assert geohex.hexes[8]['raw_code'] == '70248854864'
print(geohex)
return
def test_code2hex():
for code, latlng, r_level in geohex_testcases:
lat = latlng[0]
lng = latlng[1]
hex_info = code2hex(code, lat=lat, lng=lng)
assert hex_info['code'] == code
assert hex_info['level'] == r_level
return
special_geohex_test_cases = [
['xx44', (0.0, 0.0), -1],
['SW6', (71.48066686818822, -86.66666666666666), 1],
['XK026', (3.8461100614416903, 177.037037037037), 3], # 本家だとQU026
['GH80422', (-17.674819831740678, -160.65843621399173), 5],
['bD28367162', (83.44032235192313, -52.03170248437738), 8],
['bb33488116', (85.04932657519551, 88.85840573083371), 8],
['SV355072242', (66.51330668794105, -76.28918355941674), 9],
['aA184816376', (81.51821910339622, 173.32012396484274), 9], # 本家TK184816376 # noqa
]
def test_special_cases():
for last_code, latlng, last_level in special_geohex_test_cases:
lat = latlng[0]
lng = latlng[1]
for level in range(-2, last_level + 1):
geohex = GeoHex(lat, lng, level)
print(geohex)
code = geohex.hexes[level]['code']
hex_info = code2hex(code, lat=lat, lng=lng)
assert code == hex_info['code']
a_center = geohex.hexes[level]['center']
b_center = hex_info['center']
assert round(a_center[0], 7) == round(b_center[0], 7)
assert round(a_center[1], 7) == round(b_center[1], 7)
return
def test_geohex_paramerror():
with pytest.raises(TypeError):
GeoHex('abc', 0.0, 10)
with pytest.raises(TypeError):
GeoHex(0.0, 'abc', 10)
with pytest.raises(TypeError):
GeoHex(0.0, 0.0, 'abc')
with pytest.raises(TypeError):
GeoHex(0.0, 0.0, 1.5)
with pytest.raises(ValueError):
GeoHex(-100, 0.0, 10)
with pytest.raises(ValueError):
GeoHex(0.0, 200, 10)
with pytest.raises(ValueError):
GeoHex(0.0, 0.0, -3)
return
def test_code2hex_paramerror():
with pytest.raises(TypeError):
code2hex(12345)
with pytest.raises(ValueError):
code2hex('A')
with pytest.raises(ValueError):
code2hex('12345')
with pytest.raises(ValueError):
code2hex('AA934')
return
|
[
"pytest.raises"
] |
[((41061, 41085), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (41074, 41085), False, 'import pytest\n'), ((41127, 41151), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (41140, 41151), False, 'import pytest\n'), ((41193, 41217), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (41206, 41217), False, 'import pytest\n'), ((41260, 41284), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (41273, 41284), False, 'import pytest\n'), ((41325, 41350), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (41338, 41350), False, 'import pytest\n'), ((41391, 41416), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (41404, 41416), False, 'import pytest\n'), ((41456, 41481), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (41469, 41481), False, 'import pytest\n'), ((41566, 41590), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (41579, 41590), False, 'import pytest\n'), ((41625, 41650), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (41638, 41650), False, 'import pytest\n'), ((41683, 41708), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (41696, 41708), False, 'import pytest\n'), ((41745, 41770), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (41758, 41770), False, 'import pytest\n')]
|
from .models import Profile
from django.shortcuts import render, redirect
from django.contrib import messages
import pandas as pd
import numpy as np
import sklearn
from sklearn.neighbors import NearestNeighbors
import json
import os
import requests
df = pd.read_csv(
r'C:\\Users\jayit\\Downloads\\RAPID\\MedBay-V1\\AI DIET PLANNER Microservice\\website\\dataset.csv')
def Recommend(request):
if request.user.is_authenticated:
class Recommender:
def __init__(self):
self.df = pd.read_csv(
r'C:\\Users\jayit\\Downloads\\RAPID\\MedBay-V1\\AI DIET PLANNER Microservice\\website\\dataset.csv')
def get_features(self):
# getting dummies of dataset
nutrient_dummies = self.df.Nutrient.str.get_dummies()
disease_dummies = self.df.Disease.str.get_dummies(sep=' ')
diet_dummies = self.df.Diet.str.get_dummies(sep=' ')
feature_df = pd.concat(
[nutrient_dummies, disease_dummies, diet_dummies], axis=1)
return feature_df
def k_neighbor(self, inputs):
feature_df = self.get_features()
# initializing model with k=20 neighbors
model = NearestNeighbors(n_neighbors=40, algorithm='ball_tree')
# fitting model with dataset features
model.fit(feature_df)
df_results = pd.DataFrame(columns=list(self.df.columns))
# getting distance and indices for k nearest neighbor
distnaces, indices = model.kneighbors(inputs)
for i in list(indices):
df_results = df_results.append(self.df.loc[i])
df_results = df_results.filter(
['Meal_Id', 'Name', 'catagory', 'Nutrient', 'Veg_Non', 'Price', 'Review', 'Diet', 'Disease', 'description'])
df_results = df_results.drop_duplicates(subset=['Name'])
df_results = df_results.reset_index(drop=True)
return df_results
ob = Recommender()
data = ob.get_features()
total_features = data.columns
d = dict()
for i in total_features:
d[i] = 0
# extract values from database where Table name is Profie
p = Profile.objects.get(number=request.user.username)
diet = list(p.diet.split('++'))
disease = list(p.disease.split('++'))
nutrient = list(p.nutrient.split('++'))
Recommend_input = diet+disease+nutrient
image = p.image.url
for i in Recommend_input:
d[i] = 1
final_input = list(d.values())
results = ob.k_neighbor([final_input]) # pass 2d array []
data = dict(results)
ids = list(data['Meal_Id'])
n = list(data['Name'])
c = list(data['catagory'])
vn = list(data['Veg_Non'])
r = list(data['Review'])
nt = list(data['Nutrient'])
p = list(data['Price'])
i = range(len(n))
sc = c
headers = {"Content-Type": "application/json;","Authorization":"<KEY>"}
tab = '\t'
if request.method == "POST":
lengthDrugs = len(n)
sendData = {
"phone": "+917044659720",
"media": {
"type": "media_template",
"lang_code": "en",
"template_name": "welcome",
"body":[
{
"text":f"DIETUP! Our Top 5 Recommendations for you! MEAL 1: Name: {n[0]} Category: {c[0]} Calories: {p[0]} MEAL 2: Name: {n[1]} Category: {c[1]} Calories: {p[1]} MEAL 3: Name: {n[2]} Category: {c[2]} Calories: {p[2]} MEAL 4: Name: {n[3]} Category: {c[3]} Calories: {p[3]} MEAL 5: Name: {n[4]} Category: {c[4]} Calories: {p[4]}"
}
]
}
}
jsonObject = json.dumps(sendData)
req = requests.post(url = "https://rapidapi.rmlconnect.net/wbm/v1/message",headers=headers, data = jsonObject)
data1 = zip(n, ids, n, c, sc, vn, r, nt, p, p)
return render(request, "website/recommend.html", {'data1': data1, 'image': image})
else:
messages.error(
request, 'You must be logged in for meal recommendations..')
return redirect('Home')
|
[
"pandas.read_csv",
"django.contrib.messages.error",
"django.shortcuts.redirect",
"json.dumps",
"sklearn.neighbors.NearestNeighbors",
"django.shortcuts.render",
"requests.post",
"pandas.concat"
] |
[((255, 391), 'pandas.read_csv', 'pd.read_csv', (['"""C:\\\\\\\\Users\\\\jayit\\\\\\\\Downloads\\\\\\\\RAPID\\\\\\\\MedBay-V1\\\\\\\\AI DIET PLANNER Microservice\\\\\\\\website\\\\\\\\dataset.csv"""'], {}), "(\n 'C:\\\\\\\\Users\\\\jayit\\\\\\\\Downloads\\\\\\\\RAPID\\\\\\\\MedBay-V1\\\\\\\\AI DIET PLANNER Microservice\\\\\\\\website\\\\\\\\dataset.csv'\n )\n", (266, 391), True, 'import pandas as pd\n'), ((4096, 4171), 'django.shortcuts.render', 'render', (['request', '"""website/recommend.html"""', "{'data1': data1, 'image': image}"], {}), "(request, 'website/recommend.html', {'data1': data1, 'image': image})\n", (4102, 4171), False, 'from django.shortcuts import render, redirect\n'), ((4191, 4266), 'django.contrib.messages.error', 'messages.error', (['request', '"""You must be logged in for meal recommendations.."""'], {}), "(request, 'You must be logged in for meal recommendations..')\n", (4205, 4266), False, 'from django.contrib import messages\n'), ((4295, 4311), 'django.shortcuts.redirect', 'redirect', (['"""Home"""'], {}), "('Home')\n", (4303, 4311), False, 'from django.shortcuts import render, redirect\n'), ((3867, 3887), 'json.dumps', 'json.dumps', (['sendData'], {}), '(sendData)\n', (3877, 3887), False, 'import json\n'), ((3906, 4012), 'requests.post', 'requests.post', ([], {'url': '"""https://rapidapi.rmlconnect.net/wbm/v1/message"""', 'headers': 'headers', 'data': 'jsonObject'}), "(url='https://rapidapi.rmlconnect.net/wbm/v1/message', headers\n =headers, data=jsonObject)\n", (3919, 4012), False, 'import requests\n'), ((524, 660), 'pandas.read_csv', 'pd.read_csv', (['"""C:\\\\\\\\Users\\\\jayit\\\\\\\\Downloads\\\\\\\\RAPID\\\\\\\\MedBay-V1\\\\\\\\AI DIET PLANNER Microservice\\\\\\\\website\\\\\\\\dataset.csv"""'], {}), "(\n 'C:\\\\\\\\Users\\\\jayit\\\\\\\\Downloads\\\\\\\\RAPID\\\\\\\\MedBay-V1\\\\\\\\AI DIET PLANNER Microservice\\\\\\\\website\\\\\\\\dataset.csv'\n )\n", (535, 660), True, 'import pandas as pd\n'), ((983, 1051), 'pandas.concat', 'pd.concat', (['[nutrient_dummies, disease_dummies, diet_dummies]'], {'axis': '(1)'}), '([nutrient_dummies, disease_dummies, diet_dummies], axis=1)\n', (992, 1051), True, 'import pandas as pd\n'), ((1283, 1338), 'sklearn.neighbors.NearestNeighbors', 'NearestNeighbors', ([], {'n_neighbors': '(40)', 'algorithm': '"""ball_tree"""'}), "(n_neighbors=40, algorithm='ball_tree')\n", (1299, 1338), False, 'from sklearn.neighbors import NearestNeighbors\n')]
|
from django.shortcuts import render
from django.template.loader import render_to_string
# Create your views here.
from django.http import HttpResponse
def test_page(request):
return render(request, "ws_test_page.html")
|
[
"django.shortcuts.render"
] |
[((185, 221), 'django.shortcuts.render', 'render', (['request', '"""ws_test_page.html"""'], {}), "(request, 'ws_test_page.html')\n", (191, 221), False, 'from django.shortcuts import render\n')]
|
#! /usr/bin/env python
import os
def main():
if os.path.exists(".dockerignore"):
print(".dockerignore already exists, remove it to proceed")
exit(-1)
with open(".gitignore", "r") as fin, open(".dockerignore", "w") as fout:
fout.write("# This file was automatically generated by ./ci/bootstrap_dockerignore.py\n")
fout.write("# based on the contents of .gitignore\n\n")
for line in fin:
if line[0] in "#!/\n":
fout.write(line)
else:
fout.write("**/" + line)
if __name__ == "__main__":
main()
|
[
"os.path.exists"
] |
[((53, 84), 'os.path.exists', 'os.path.exists', (['""".dockerignore"""'], {}), "('.dockerignore')\n", (67, 84), False, 'import os\n')]
|
from django.http import HttpResponse, JsonResponse
from django.shortcuts import render
from django.views.decorators.csrf import csrf_exempt
from rest_framework.parsers import JSONParser
from .models import *
from .serializers import *
# Create your views here.
"""
Method: to post and get the posts
"""
@csrf_exempt
def post_list(request):
#get alls the posts
if request.method=='GET':
posts = Post.objects.all()
serializer = PostSerializer(posts, many=True)
return JsonResponse(serializer.data, safe=False)
if request.method=='POST':
data = JSONParser().parse(request)
serializer = PostSerializer(data=data)
if serializer.is_valid():
serializer.save()
return JsonResponse(serializer.data, status=201)
return JsonResponse(serializer.errors, status=400)
@csrf_exempt
def post_details(request, slug):
try:
post = Post.objects.get(slug=slug)
except Post.DoesNotExist:
return HttpResponse(status=404)
if request.method =='GET':
serializer = PostSerializer(post)
return JsonResponse(serializer.data)
elif request.method =='PUT':
data = JSONParser().parse(request)
serializer = PostSerializer(post, data=data)
if serializer.is_valid():
serializer.save()
return JsonResponse(serializer.data)
return JsonResponse(serializer.errors, status=400)
elif request.method == 'DELETE':
post.delete()
return HttpResponse(status=204)
|
[
"rest_framework.parsers.JSONParser",
"django.http.HttpResponse",
"django.http.JsonResponse"
] |
[((501, 542), 'django.http.JsonResponse', 'JsonResponse', (['serializer.data'], {'safe': '(False)'}), '(serializer.data, safe=False)\n', (513, 542), False, 'from django.http import HttpResponse, JsonResponse\n'), ((805, 848), 'django.http.JsonResponse', 'JsonResponse', (['serializer.errors'], {'status': '(400)'}), '(serializer.errors, status=400)\n', (817, 848), False, 'from django.http import HttpResponse, JsonResponse\n'), ((1108, 1137), 'django.http.JsonResponse', 'JsonResponse', (['serializer.data'], {}), '(serializer.data)\n', (1120, 1137), False, 'from django.http import HttpResponse, JsonResponse\n'), ((748, 789), 'django.http.JsonResponse', 'JsonResponse', (['serializer.data'], {'status': '(201)'}), '(serializer.data, status=201)\n', (760, 789), False, 'from django.http import HttpResponse, JsonResponse\n'), ((994, 1018), 'django.http.HttpResponse', 'HttpResponse', ([], {'status': '(404)'}), '(status=404)\n', (1006, 1018), False, 'from django.http import HttpResponse, JsonResponse\n'), ((1396, 1439), 'django.http.JsonResponse', 'JsonResponse', (['serializer.errors'], {'status': '(400)'}), '(serializer.errors, status=400)\n', (1408, 1439), False, 'from django.http import HttpResponse, JsonResponse\n'), ((590, 602), 'rest_framework.parsers.JSONParser', 'JSONParser', ([], {}), '()\n', (600, 602), False, 'from rest_framework.parsers import JSONParser\n'), ((1351, 1380), 'django.http.JsonResponse', 'JsonResponse', (['serializer.data'], {}), '(serializer.data)\n', (1363, 1380), False, 'from django.http import HttpResponse, JsonResponse\n'), ((1514, 1538), 'django.http.HttpResponse', 'HttpResponse', ([], {'status': '(204)'}), '(status=204)\n', (1526, 1538), False, 'from django.http import HttpResponse, JsonResponse\n'), ((1187, 1199), 'rest_framework.parsers.JSONParser', 'JSONParser', ([], {}), '()\n', (1197, 1199), False, 'from rest_framework.parsers import JSONParser\n')]
|
import unittest
import json
from neo3 import vm, storage
from neo3.contracts import manifest
from .utils import test_engine, test_block
class BlockchainInteropTestCase(unittest.TestCase):
def test_get_height(self):
engine = test_engine(has_container=True, has_snapshot=True)
engine.invoke_syscall_by_name("System.Blockchain.GetHeight")
item = engine.current_context.evaluation_stack.pop()
# unlike the C# test case, our chain starts at -1 because our chain is created without the genesis block
# this was done such that you can sync without having to know the validators to create the genesis block
# this is useful for purposes where we're less concerned with security (e.g. a statistics app)
self.assertEqual(vm.BigInteger(-1), item.to_biginteger())
def test_get_block(self):
engine = test_engine(has_container=True, has_snapshot=True)
# test with height
engine.push(vm.ByteStringStackItem(b'\x01'))
engine.invoke_syscall_by_name("System.Blockchain.GetBlock")
self.assertIsInstance(engine.pop(), vm.NullStackItem)
# test with invalid height (-1)
engine.push(vm.ByteStringStackItem(b'\xFF'))
with self.assertRaises(ValueError) as context:
engine.invoke_syscall_by_name("System.Blockchain.GetBlock")
self.assertEqual("Invalid height", str(context.exception))
# test with invalid data > 32 bytes
engine.push(vm.ByteStringStackItem(b'\xFF' * 33))
with self.assertRaises(ValueError) as context:
engine.invoke_syscall_by_name("System.Blockchain.GetBlock")
self.assertEqual("Invalid data", str(context.exception))
# test with serialized block hash (UInt256). This fake hash won't return a block
engine.push(vm.ByteStringStackItem(b'\x01' * 32))
engine.invoke_syscall_by_name("System.Blockchain.GetBlock")
self.assertIsInstance(engine.pop(), vm.NullStackItem)
# now find an existing block
# first add a block and update the snapshot
# normally this would be done while persisting in Blockchain
testblock = test_block()
engine.snapshot.block_height = testblock.index
engine.snapshot.blocks.put(testblock)
engine.push(vm.ByteStringStackItem(testblock.hash().to_array()))
engine.invoke_syscall_by_name("System.Blockchain.GetBlock")
# # validate the right content was pushed onto the stack
item = engine.pop()
self.assertIsInstance(item, vm.ArrayStackItem)
self.assertEqual(len(item), 8)
self.assertEqual(item[0].to_array(), testblock.hash().to_array())
self.assertEqual(item[1].to_biginteger(), vm.BigInteger(testblock.version))
self.assertEqual(item[2].to_array(), testblock.prev_hash.to_array())
self.assertEqual(item[3].to_array(), testblock.merkle_root.to_array())
self.assertEqual(item[4].to_biginteger(), vm.BigInteger(testblock.timestamp))
self.assertEqual(item[5].to_biginteger(), vm.BigInteger(testblock.index))
self.assertEqual(item[6].to_array(), testblock.next_consensus.to_array())
self.assertEqual(item[7].to_biginteger(), vm.BigInteger(len(testblock.transactions)))
def test_get_transaction_from_block(self):
# this test for the first part is identical to the GetBlock test above
engine = test_engine(has_container=True, has_snapshot=True)
# test with serialized block hash (UInt256). This fake hash won't return a block
engine.push(vm.IntegerStackItem(0)) # index
engine.push(vm.ByteStringStackItem(b'\x01' * 32))
engine.invoke_syscall_by_name("System.Blockchain.GetTransactionFromBlock")
self.assertIsInstance(engine.pop(), vm.NullStackItem)
# now find an existing block, but with an invalid transaction index (
# first add a block and update the snapshot
# normally this would be done while persisting in Blockchain
testblock = test_block()
engine.snapshot.block_height = testblock.index
engine.snapshot.blocks.put(testblock)
engine.push(vm.IntegerStackItem(-1)) # index
engine.push(vm.ByteStringStackItem(testblock.hash().to_array())) # hash
with self.assertRaises(ValueError) as context:
engine.invoke_syscall_by_name("System.Blockchain.GetTransactionFromBlock")
self.assertEqual("Transaction index out of range: -1", str(context.exception))
# now let's try again but this time with an invalid index (out of bounds)
engine.push(vm.IntegerStackItem(len(testblock.transactions) + 1)) # index
engine.push(vm.ByteStringStackItem(testblock.hash().to_array())) # hash
with self.assertRaises(ValueError) as context:
engine.invoke_syscall_by_name("System.Blockchain.GetTransactionFromBlock")
self.assertEqual("Transaction index out of range: 2", str(context.exception))
# Finally, we try with a valid index (we have only 1 transaction, so 0)
engine.push(vm.IntegerStackItem(vm.BigInteger(0))) # index
engine.push(vm.ByteStringStackItem(testblock.hash().to_array())) # hash
engine.invoke_syscall_by_name("System.Blockchain.GetTransactionFromBlock")
# and test the TX items pushed to the stack
item = engine.pop()
testblock_tx = testblock.transactions[0]
self.assertIsInstance(item, vm.ArrayStackItem)
self.assertEqual(len(item), 8)
self.assertEqual(item[0].to_array(), testblock_tx.hash().to_array())
self.assertEqual(item[1].to_biginteger(), vm.BigInteger(testblock_tx.version))
self.assertEqual(item[2].to_biginteger(), vm.BigInteger(testblock_tx.nonce))
self.assertEqual(item[3].to_array(), testblock_tx.sender.to_array())
self.assertEqual(item[4].to_biginteger(), vm.BigInteger(testblock_tx.system_fee))
self.assertEqual(item[5].to_biginteger(), vm.BigInteger(testblock_tx.network_fee))
self.assertEqual(item[6].to_biginteger(), vm.BigInteger(testblock_tx.valid_until_block))
self.assertEqual(item[7].to_array(), testblock_tx.script)
def test_get_transaction(self):
engine = test_engine(has_container=True, has_snapshot=True)
bad_tx_hash_bytes = b'\x01' * 32
engine.push(vm.ByteStringStackItem(bad_tx_hash_bytes))
engine.invoke_syscall_by_name("System.Blockchain.GetTransaction")
self.assertIsInstance(engine.pop(), vm.NullStackItem)
# now get a valid tx
testblock = test_block()
engine.snapshot.block_height = testblock.index
testblock_tx = testblock.transactions[0]
engine.snapshot.transactions.put(testblock_tx)
engine.push(vm.ByteStringStackItem(testblock_tx.hash().to_array()))
engine.invoke_syscall_by_name("System.Blockchain.GetTransaction")
# and test the TX item pushed to the stack. We're not going to check all items in the array as we've already
# done that in test_get_transaction_from_block() so we already know that the "to_stack_item()" conversion works
item = engine.pop()
self.assertIsInstance(item, vm.ArrayStackItem)
self.assertEqual(len(item), 8)
self.assertEqual(item[0].to_array(), testblock_tx.hash().to_array())
def test_get_transaction_height(self):
engine = test_engine(has_container=True, has_snapshot=True)
bad_tx_hash_bytes = b'\x01' * 32
engine.push(vm.ByteStringStackItem(bad_tx_hash_bytes))
engine.invoke_syscall_by_name("System.Blockchain.GetTransactionHeight")
item = engine.pop()
self.assertIsInstance(item, vm.IntegerStackItem)
self.assertEqual(vm.BigInteger(-1), item.to_biginteger())
# now get a valid tx
testblock = test_block()
engine.snapshot.block_height = testblock.index
testblock_tx = testblock.transactions[0]
engine.snapshot.transactions.put(testblock_tx)
engine.push(vm.ByteStringStackItem(testblock_tx.hash().to_array()))
engine.invoke_syscall_by_name("System.Blockchain.GetTransactionHeight")
item = engine.pop()
self.assertIsInstance(item, vm.IntegerStackItem)
self.assertEqual(str(vm.BigInteger(1)), str(item.to_biginteger()))
def test_get_contract(self):
engine = test_engine(has_container=True, has_snapshot=True)
bad_contract_hash_bytes = b'\x01' * 20
engine.push(vm.ByteStringStackItem(bad_contract_hash_bytes))
engine.invoke_syscall_by_name("System.Blockchain.GetContract")
item = engine.pop()
self.assertIsInstance(item, vm.NullStackItem)
# now get a valid contract
# first put one in storage
contract = storage.ContractState(b'\x01\x02', manifest.ContractManifest())
engine.snapshot.contracts.put(contract)
engine.push(vm.ByteStringStackItem(contract.script_hash().to_array()))
engine.invoke_syscall_by_name("System.Blockchain.GetContract")
item = engine.pop()
self.assertIsInstance(item, vm.ArrayStackItem)
self.assertEqual(len(item), 4)
self.assertEqual(contract.script, item[0].to_array())
self.assertEqual(contract.manifest, manifest.ContractManifest.from_json(json.loads(item[1].to_array())))
self.assertEqual(contract.has_storage, item[2].to_boolean())
self.assertEqual(contract.is_payable, item[3].to_boolean())
|
[
"neo3.contracts.manifest.ContractManifest",
"neo3.vm.IntegerStackItem",
"neo3.vm.BigInteger",
"neo3.vm.ByteStringStackItem"
] |
[((773, 790), 'neo3.vm.BigInteger', 'vm.BigInteger', (['(-1)'], {}), '(-1)\n', (786, 790), False, 'from neo3 import vm, storage\n'), ((960, 991), 'neo3.vm.ByteStringStackItem', 'vm.ByteStringStackItem', (["b'\\x01'"], {}), "(b'\\x01')\n", (982, 991), False, 'from neo3 import vm, storage\n'), ((1184, 1215), 'neo3.vm.ByteStringStackItem', 'vm.ByteStringStackItem', (["b'\\xff'"], {}), "(b'\\xff')\n", (1206, 1215), False, 'from neo3 import vm, storage\n'), ((1476, 1512), 'neo3.vm.ByteStringStackItem', 'vm.ByteStringStackItem', (["(b'\\xff' * 33)"], {}), "(b'\\xff' * 33)\n", (1498, 1512), False, 'from neo3 import vm, storage\n'), ((1816, 1852), 'neo3.vm.ByteStringStackItem', 'vm.ByteStringStackItem', (["(b'\\x01' * 32)"], {}), "(b'\\x01' * 32)\n", (1838, 1852), False, 'from neo3 import vm, storage\n'), ((2729, 2761), 'neo3.vm.BigInteger', 'vm.BigInteger', (['testblock.version'], {}), '(testblock.version)\n', (2742, 2761), False, 'from neo3 import vm, storage\n'), ((2969, 3003), 'neo3.vm.BigInteger', 'vm.BigInteger', (['testblock.timestamp'], {}), '(testblock.timestamp)\n', (2982, 3003), False, 'from neo3 import vm, storage\n'), ((3055, 3085), 'neo3.vm.BigInteger', 'vm.BigInteger', (['testblock.index'], {}), '(testblock.index)\n', (3068, 3085), False, 'from neo3 import vm, storage\n'), ((3568, 3590), 'neo3.vm.IntegerStackItem', 'vm.IntegerStackItem', (['(0)'], {}), '(0)\n', (3587, 3590), False, 'from neo3 import vm, storage\n'), ((3621, 3657), 'neo3.vm.ByteStringStackItem', 'vm.ByteStringStackItem', (["(b'\\x01' * 32)"], {}), "(b'\\x01' * 32)\n", (3643, 3657), False, 'from neo3 import vm, storage\n'), ((4158, 4181), 'neo3.vm.IntegerStackItem', 'vm.IntegerStackItem', (['(-1)'], {}), '(-1)\n', (4177, 4181), False, 'from neo3 import vm, storage\n'), ((5641, 5676), 'neo3.vm.BigInteger', 'vm.BigInteger', (['testblock_tx.version'], {}), '(testblock_tx.version)\n', (5654, 5676), False, 'from neo3 import vm, storage\n'), ((5728, 5761), 'neo3.vm.BigInteger', 'vm.BigInteger', (['testblock_tx.nonce'], {}), '(testblock_tx.nonce)\n', (5741, 5761), False, 'from neo3 import vm, storage\n'), ((5890, 5928), 'neo3.vm.BigInteger', 'vm.BigInteger', (['testblock_tx.system_fee'], {}), '(testblock_tx.system_fee)\n', (5903, 5928), False, 'from neo3 import vm, storage\n'), ((5980, 6019), 'neo3.vm.BigInteger', 'vm.BigInteger', (['testblock_tx.network_fee'], {}), '(testblock_tx.network_fee)\n', (5993, 6019), False, 'from neo3 import vm, storage\n'), ((6071, 6116), 'neo3.vm.BigInteger', 'vm.BigInteger', (['testblock_tx.valid_until_block'], {}), '(testblock_tx.valid_until_block)\n', (6084, 6116), False, 'from neo3 import vm, storage\n'), ((6350, 6391), 'neo3.vm.ByteStringStackItem', 'vm.ByteStringStackItem', (['bad_tx_hash_bytes'], {}), '(bad_tx_hash_bytes)\n', (6372, 6391), False, 'from neo3 import vm, storage\n'), ((7511, 7552), 'neo3.vm.ByteStringStackItem', 'vm.ByteStringStackItem', (['bad_tx_hash_bytes'], {}), '(bad_tx_hash_bytes)\n', (7533, 7552), False, 'from neo3 import vm, storage\n'), ((7744, 7761), 'neo3.vm.BigInteger', 'vm.BigInteger', (['(-1)'], {}), '(-1)\n', (7757, 7761), False, 'from neo3 import vm, storage\n'), ((8492, 8539), 'neo3.vm.ByteStringStackItem', 'vm.ByteStringStackItem', (['bad_contract_hash_bytes'], {}), '(bad_contract_hash_bytes)\n', (8514, 8539), False, 'from neo3 import vm, storage\n'), ((8819, 8846), 'neo3.contracts.manifest.ContractManifest', 'manifest.ContractManifest', ([], {}), '()\n', (8844, 8846), False, 'from neo3.contracts import manifest\n'), ((5098, 5114), 'neo3.vm.BigInteger', 'vm.BigInteger', (['(0)'], {}), '(0)\n', (5111, 5114), False, 'from neo3 import vm, storage\n'), ((8277, 8293), 'neo3.vm.BigInteger', 'vm.BigInteger', (['(1)'], {}), '(1)\n', (8290, 8293), False, 'from neo3 import vm, storage\n')]
|
# -*- coding: utf-8 -*-
"""
This module contains tests for tofu.geom in its structured version
"""
# Built-in
import os
import sys
import itertools as itt # for iterating on parameters combinations
import subprocess # for handling bash commands
# Standard
import matplotlib.pyplot as plt
# Make sure the figures do not block the execution => allow interactivity
plt.ion()
_PATH_HERE = os.path.abspath(os.path.dirname(__file__))
_PATH_PCK = os.path.dirname(_PATH_HERE)
_PATH_OUTPUT = os.path.join(_PATH_HERE, 'output_temp')
_PATH_OUTPUT_REF = os.path.join(_PATH_HERE, 'output_ref')
# library-specific
sys.path.insert(0, _PATH_PCK) # ensure Main comes from .. => add PYTHONPATH
import _core
sys.path.pop(0) # clean PYTHONPATH
#######################################################
#
# Setup and Teardown
#
#######################################################
def setup_module():
pass
def teardown_module():
pass
#######################################################
#
# Creating Ves objects and testing methods
#
#######################################################
class Test01_Run():
@classmethod
def setup_class(cls):
cls.dmodel = {}
cls.lsolvers = [
'eRK4-homemade',
'eRK2-scipy', 'eRK4-scipy', 'eRK8-scipy',
]
@classmethod
def setup(self):
pass
def teardown(self):
pass
@classmethod
def teardown_class(cls):
""" Clean-up the saved files """
lf = [
os.path.join(_PATH_OUTPUT, ff) for ff in os.listdir(_PATH_OUTPUT)
if ff.endswith('.npz')
]
for ff in lf:
os.remove(ff)
def test01_init_from_all_models(self):
""" Make sure the main function runs from a python console """
lmodel = _core._class_checks.models.get_available_models(
returnas=list,
)
for model in lmodel:
self.dmodel[model] = _core.Hub(model)
def test02_get_summary_repr(self):
for model in self.dmodel.keys():
print(self.dmodel[model])
self.dmodel[model].get_summary()
def test03_get_dparam(self):
for model in self.dmodel.keys():
out = self.dmodel[model].get_dparam(group='Numerical')
out = self.dmodel[model].get_dparam(eqtype='ode')
def test04_get_variables_compact(self):
for model in self.dmodel.keys():
out = self.dmodel[model].get_variables_compact()
def test05_set_single_param(self):
for model in self.dmodel.keys():
self.dmodel[model].set_dparam(key='Tmax', value=20)
def test06_run_all_models_all_solvers(self):
""" Make sure the main function runs as executable from terminal """
# list of entry parameters to try
for ii, model in enumerate(self.dmodel.keys()):
self.dmodel[model] = {
solver: _core.Hub(model) for solver in self.lsolvers
}
for jj, solver in enumerate(self.lsolvers):
if ii % 2 == 0:
# testing verb = 0, 1, 2
verb = (ii + jj) % 3
else:
# testing verb = float
verb = ii + jj / len(self.lsolvers)
self.dmodel[model][solver].run(solver=solver, verb=verb)
def test07_get_summary_repr_after_run(self):
for model in self.dmodel.keys():
for jj, solver in enumerate(self.lsolvers):
print(self.dmodel[model][solver])
self.dmodel[model][solver].get_summary()
def test08_save(self):
# list of entry parameters to try
for ii, model in enumerate(self.dmodel.keys()):
for jj, solver in enumerate(self.lsolvers):
self.dmodel[model][solver].save(
name=str(ii * 10 + jj),
path=_PATH_OUTPUT, # _PATH_OUTPUT_REF to update ref
)
def test09_load_and_equal(self):
lf = [
os.path.join(_PATH_OUTPUT, ff)
for ff in os.listdir(_PATH_OUTPUT)
if ff.endswith('.npz')
]
for ff in lf:
obj = _core._saveload.load(ff)[0]
model = list(obj.model.keys())[0]
solver = obj.dmisc['solver']
assert obj == self.dmodel[model][solver]
def test10_copy(self):
for model in self.dmodel.keys():
for solver in self.lsolvers:
obj = self.dmodel[model][solver].copy()
assert obj == self.dmodel[model][solver]
assert obj is not self.dmodel[model][solver]
def test11_get_available_output(self):
# verb
_core._saveload.get_available_output(path=_PATH_OUTPUT)
# list
_core._saveload.get_available_output(path=_PATH_OUTPUT, returnas=list)
# dict, with filters
_core._saveload.get_available_output(
path=_PATH_OUTPUT,
model='GK',
name='2',
returnas=dict,
)
def test12_nonregression_output(self):
# load reference files
df_ref = _core._saveload.get_available_output(
path=_PATH_OUTPUT_REF,
returnas=dict,
)
lobj_ref = _core._saveload.load(list(df_ref.keys()))
# compare to current output
dfail = {}
for ii, (ff, v0) in enumerate(df_ref.items()):
model = list(lobj_ref[ii].dmisc['model'].keys())[0]
solver = lobj_ref[ii].dmisc['solver']
obj = self.dmodel[model][solver]
isok, dfaili = obj.__eq__(
lobj_ref[ii],
verb=False,
return_dfail=True,
)
if isok is False:
# only tolerated error: different absolte path to model file
keyok = f"dmisc['model']['{model}']"
if keyok in dfaili.keys():
del dfaili[keyok]
isok = len(dfaili) == 0
if isok is False:
msg = (
f"Differs from reference for: {list(dfaili.keys())}"
)
dfail[f'{model}_{solver}'] = msg
if len(dfail) > 0:
lstr = [f'\t- {k0}: {v0}' for k0, v0 in dfail.items()]
msg = (
"The following output regressions have been detected:\n"
+ "\n".join(lstr)
)
raise Exception(msg)
|
[
"sys.path.pop",
"_core._saveload.get_available_output",
"os.remove",
"_core._class_checks.models.get_available_models",
"os.path.dirname",
"sys.path.insert",
"_core._saveload.load",
"matplotlib.pyplot.ion",
"_core.Hub",
"os.path.join",
"os.listdir"
] |
[((381, 390), 'matplotlib.pyplot.ion', 'plt.ion', ([], {}), '()\n', (388, 390), True, 'import matplotlib.pyplot as plt\n'), ((461, 488), 'os.path.dirname', 'os.path.dirname', (['_PATH_HERE'], {}), '(_PATH_HERE)\n', (476, 488), False, 'import os\n'), ((504, 543), 'os.path.join', 'os.path.join', (['_PATH_HERE', '"""output_temp"""'], {}), "(_PATH_HERE, 'output_temp')\n", (516, 543), False, 'import os\n'), ((563, 601), 'os.path.join', 'os.path.join', (['_PATH_HERE', '"""output_ref"""'], {}), "(_PATH_HERE, 'output_ref')\n", (575, 601), False, 'import os\n'), ((623, 652), 'sys.path.insert', 'sys.path.insert', (['(0)', '_PATH_PCK'], {}), '(0, _PATH_PCK)\n', (638, 652), False, 'import sys\n'), ((714, 729), 'sys.path.pop', 'sys.path.pop', (['(0)'], {}), '(0)\n', (726, 729), False, 'import sys\n'), ((422, 447), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (437, 447), False, 'import os\n'), ((1842, 1904), '_core._class_checks.models.get_available_models', '_core._class_checks.models.get_available_models', ([], {'returnas': 'list'}), '(returnas=list)\n', (1889, 1904), False, 'import _core\n'), ((4748, 4803), '_core._saveload.get_available_output', '_core._saveload.get_available_output', ([], {'path': '_PATH_OUTPUT'}), '(path=_PATH_OUTPUT)\n', (4784, 4803), False, 'import _core\n'), ((4827, 4897), '_core._saveload.get_available_output', '_core._saveload.get_available_output', ([], {'path': '_PATH_OUTPUT', 'returnas': 'list'}), '(path=_PATH_OUTPUT, returnas=list)\n', (4863, 4897), False, 'import _core\n'), ((4935, 5032), '_core._saveload.get_available_output', '_core._saveload.get_available_output', ([], {'path': '_PATH_OUTPUT', 'model': '"""GK"""', 'name': '"""2"""', 'returnas': 'dict'}), "(path=_PATH_OUTPUT, model='GK', name=\n '2', returnas=dict)\n", (4971, 5032), False, 'import _core\n'), ((5180, 5254), '_core._saveload.get_available_output', '_core._saveload.get_available_output', ([], {'path': '_PATH_OUTPUT_REF', 'returnas': 'dict'}), '(path=_PATH_OUTPUT_REF, returnas=dict)\n', (5216, 5254), False, 'import _core\n'), ((1551, 1581), 'os.path.join', 'os.path.join', (['_PATH_OUTPUT', 'ff'], {}), '(_PATH_OUTPUT, ff)\n', (1563, 1581), False, 'import os\n'), ((1696, 1709), 'os.remove', 'os.remove', (['ff'], {}), '(ff)\n', (1705, 1709), False, 'import os\n'), ((1990, 2006), '_core.Hub', '_core.Hub', (['model'], {}), '(model)\n', (1999, 2006), False, 'import _core\n'), ((4066, 4096), 'os.path.join', 'os.path.join', (['_PATH_OUTPUT', 'ff'], {}), '(_PATH_OUTPUT, ff)\n', (4078, 4096), False, 'import os\n'), ((1592, 1616), 'os.listdir', 'os.listdir', (['_PATH_OUTPUT'], {}), '(_PATH_OUTPUT)\n', (1602, 1616), False, 'import os\n'), ((2952, 2968), '_core.Hub', '_core.Hub', (['model'], {}), '(model)\n', (2961, 2968), False, 'import _core\n'), ((4119, 4143), 'os.listdir', 'os.listdir', (['_PATH_OUTPUT'], {}), '(_PATH_OUTPUT)\n', (4129, 4143), False, 'import os\n'), ((4229, 4253), '_core._saveload.load', '_core._saveload.load', (['ff'], {}), '(ff)\n', (4249, 4253), False, 'import _core\n')]
|
from corm import Storage, Entity, Relationship, RelationType
storage = Storage()
class Address(Entity):
street: str
number: int
class User(Entity):
name: str
address: Address = Relationship(
entity_type=Address,
relation_type=RelationType.PARENT,
)
address = Address({'street': 'First', 'number': 1}, storage)
john = User({'name': 'John'}, storage)
storage.make_relation(
from_=john,
to_=address,
relation_type=RelationType.PARENT,
)
assert john.address == address
|
[
"corm.Storage",
"corm.Relationship"
] |
[((72, 81), 'corm.Storage', 'Storage', ([], {}), '()\n', (79, 81), False, 'from corm import Storage, Entity, Relationship, RelationType\n'), ((198, 266), 'corm.Relationship', 'Relationship', ([], {'entity_type': 'Address', 'relation_type': 'RelationType.PARENT'}), '(entity_type=Address, relation_type=RelationType.PARENT)\n', (210, 266), False, 'from corm import Storage, Entity, Relationship, RelationType\n')]
|
# import library
import configparser
import os
def read_config(cfg_file):
config = None
if cfg_file is not None:
config = configparser.ConfigParser()
if os.path.exists(cfg_file):
config.read(cfg_file)
return config
|
[
"configparser.ConfigParser",
"os.path.exists"
] |
[((140, 167), 'configparser.ConfigParser', 'configparser.ConfigParser', ([], {}), '()\n', (165, 167), False, 'import configparser\n'), ((179, 203), 'os.path.exists', 'os.path.exists', (['cfg_file'], {}), '(cfg_file)\n', (193, 203), False, 'import os\n')]
|
from __future__ import unicode_literals
import dataent
from dataent.model.rename_doc import rename_doc
def execute():
if dataent.db.table_exists("Workflow Action") and not dataent.db.table_exists("Workflow Action Master"):
rename_doc('DocType', 'Workflow Action', 'Workflow Action Master')
dataent.reload_doc('workflow', 'doctype', 'workflow_action_master')
|
[
"dataent.db.table_exists",
"dataent.reload_doc",
"dataent.model.rename_doc.rename_doc"
] |
[((124, 166), 'dataent.db.table_exists', 'dataent.db.table_exists', (['"""Workflow Action"""'], {}), "('Workflow Action')\n", (147, 166), False, 'import dataent\n'), ((228, 294), 'dataent.model.rename_doc.rename_doc', 'rename_doc', (['"""DocType"""', '"""Workflow Action"""', '"""Workflow Action Master"""'], {}), "('DocType', 'Workflow Action', 'Workflow Action Master')\n", (238, 294), False, 'from dataent.model.rename_doc import rename_doc\n'), ((297, 364), 'dataent.reload_doc', 'dataent.reload_doc', (['"""workflow"""', '"""doctype"""', '"""workflow_action_master"""'], {}), "('workflow', 'doctype', 'workflow_action_master')\n", (315, 364), False, 'import dataent\n'), ((175, 224), 'dataent.db.table_exists', 'dataent.db.table_exists', (['"""Workflow Action Master"""'], {}), "('Workflow Action Master')\n", (198, 224), False, 'import dataent\n')]
|
import pysam
import sys
import pandas as pd
def is_split_read(s):
try:
s.get_tag("SA")
except KeyError:
return False
return True
def get_read_map(bam_path, contig_of_interest='NC_007605'):
coordinates = pd.DataFrame()
infile = pysam.AlignmentFile(bam_path, "rb")
for s in infile.fetch(contig_of_interest):
if s.mapping_quality<30 or s.is_duplicate or \
s.is_unmapped or s.mate_is_unmapped:
continue
if is_split_read(s):
sa = s.get_tag("SA").split(",")
chrom1, pos1 = sa[0], int(sa[1])
elif not s.is_proper_pair:
chrom1, pos1 = s.next_reference_name, s.next_reference_start
else: # ignore normal mapping reads
continue
if chrom1 != contig_of_interest:
coordinates = coordinates.append({'readpair': s.query_name, 'chr1': chrom1, 'pos1': pos1,
'chr2': s.reference_name, 'pos2': s.reference_start},
ignore_index=True)
return coordinates[['readpair', 'chr1', 'pos1', 'chr2', 'pos2']].astype({'pos1':int, 'pos2':int})
def get_clusters_df(df, chr_col, pos_col, cluster_colname, cluster_name_prefix,
MAX_DIST=1000, MIN_TAGS=6, drop_no_clust=False):
"""
Cluster by chr_col and pos_col. Reads are clustered together if there is at least MIN_TAGS reads pairwise separated by MAX_DIST.
So, with default params, reads in positions 1000,2000,3000,4000,5000,and 6000, will be enough to compose a cluster.
Cluster ID, composed of cluster_name_prefix and a sequential integer, is placed into cluster_colname for the reads in the cluster.
Not clustered reads will have the value in cluster_colname unchanged.
"""
res=df.copy().sort_values([chr_col, pos_col])
ccnt = 1
cstart=0
clust_num = 1
for i in range(1,len(res)):
if res[pos_col].iloc[i] - res[pos_col].iloc[i-1] <= MAX_DIST:
ccnt += 1
else:
if ccnt >= MIN_TAGS:
res[cluster_colname].iloc[range(cstart, i)] = (cluster_name_prefix + str(clust_num))
clust_num += 1
ccnt = 1
cstart = i
# print last cluster
if ccnt > MIN_TAGS:
res[cluster_colname].iloc[range(cstart, i)] = (cluster_name_prefix + str(clust_num))
if drop_no_clust:
return(res[res[cluster_colname]!=""])
else:
return res
if __name__ == '__main__':
coordinates = get_read_map(sys.argv[1], 'NC_007605')
coordinates['integr_hotspot'] = ''
coordinates['EBV_cluster'] = ''
max_distance = 1000
min_tags = 6
final_df = pd.DataFrame()
for chr_ in coordinates['chr1'].unique():
chr_df = get_clusters_df(coordinates[coordinates['chr1']==chr_],
chr_col='chr1', pos_col='pos1',
cluster_colname='integr_hotspot',
cluster_name_prefix=chr_+'_hs',
MAX_DIST = max_distance,
MIN_TAGS = min_tags)
if len(chr_df)<=0:
continue
# add readpairs without hotspot
final_df = pd.concat([final_df, chr_df[chr_df['integr_hotspot'] == '']])
# cluster ebv_reads within each integration hotspot
for hs in [e for e in chr_df['integr_hotspot'].unique() if e]:
chr_hs_df = get_clusters_df(chr_df[chr_df['integr_hotspot'] == hs],
chr_col='chr2', pos_col='pos2',
cluster_colname='EBV_cluster',
cluster_name_prefix=hs+'_ebv_cluster',
MAX_DIST = max_distance,
MIN_TAGS = min_tags)
final_df = pd.concat([final_df, chr_hs_df])
final_df = final_df[['readpair', 'integr_hotspot', 'chr1', 'pos1', 'chr2', 'pos2', 'EBV_cluster']].sort_values(['chr1','pos1','pos2'])
final_df.to_csv(sys.stdout, sep='\t', index=False)
|
[
"pandas.DataFrame",
"pysam.AlignmentFile",
"pandas.concat"
] |
[((238, 252), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (250, 252), True, 'import pandas as pd\n'), ((267, 302), 'pysam.AlignmentFile', 'pysam.AlignmentFile', (['bam_path', '"""rb"""'], {}), "(bam_path, 'rb')\n", (286, 302), False, 'import pysam\n'), ((2821, 2835), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (2833, 2835), True, 'import pandas as pd\n'), ((3376, 3437), 'pandas.concat', 'pd.concat', (["[final_df, chr_df[chr_df['integr_hotspot'] == '']]"], {}), "([final_df, chr_df[chr_df['integr_hotspot'] == '']])\n", (3385, 3437), True, 'import pandas as pd\n'), ((3988, 4020), 'pandas.concat', 'pd.concat', (['[final_df, chr_hs_df]'], {}), '([final_df, chr_hs_df])\n', (3997, 4020), True, 'import pandas as pd\n')]
|
"""
TODO (wimax July 2020): I don't see anything in here that indicates that
screams "e2e test", this certainly seems like more of an integration test.
There's nothing here that does anything cross-service.
Perhaps it's just "does it work in AWS?"
"""
from grapl_tests_common.clients.grapl_web_client import GraplWebClient
def test_real_user_fake_password() -> None:
# Exercises the PasswordVerification case in grapl-web-ui login.rs
resp = GraplWebClient().real_user_fake_password()
assert resp.status_code == 401
def test_nonexistent_user() -> None:
# Exercises the UserRecordNotFound case in grapl-web-ui login.rs
resp = GraplWebClient().nonexistent_user()
assert resp.status_code == 401
def test_check__empty_creds() -> None:
resp = GraplWebClient().empty_creds()
assert resp.status_code == 500
# TODO: https://github.com/grapl-security/issue-tracker/issues/686
# Add a `test_no_content_type()` (it currently 200s for some reason)
|
[
"grapl_tests_common.clients.grapl_web_client.GraplWebClient"
] |
[((452, 468), 'grapl_tests_common.clients.grapl_web_client.GraplWebClient', 'GraplWebClient', ([], {}), '()\n', (466, 468), False, 'from grapl_tests_common.clients.grapl_web_client import GraplWebClient\n'), ((649, 665), 'grapl_tests_common.clients.grapl_web_client.GraplWebClient', 'GraplWebClient', ([], {}), '()\n', (663, 665), False, 'from grapl_tests_common.clients.grapl_web_client import GraplWebClient\n'), ((772, 788), 'grapl_tests_common.clients.grapl_web_client.GraplWebClient', 'GraplWebClient', ([], {}), '()\n', (786, 788), False, 'from grapl_tests_common.clients.grapl_web_client import GraplWebClient\n')]
|
import pyfirmata
import time
from pyfirmata import Arduino, util
import RPi.GPIO as GPIO
import dht11
import time
import datetime
import http.client
import requests
# run by: python3.5 arduino_airtemperature.py
# initialize GPIO for raspberry pi
# New Changes to be made
# Specifically make some changes with regards to Git and GitHub
GPIO.setwarnings(False)
GPIO.setmode(GPIO.BCM)
#GPIO.cleanup()
# read data using pin 14
instance = dht11.DHT11(pin=14)
pod_value = "pod1"
mach_serialno = "QWERT1234"
while True:
result = instance.read()
#print(result.is_valid())
if result.is_valid():
#print(("Last valid input: " + str(datetime.datetime.now())))
#print(("Temperature: %d C" % result.temperature))
#print(("Temperature: %d F" % ((result.temperature * 9/5)+32)))
#print(("Humidity: " + str(result.humidity)))
temphumid = str(datetime.datetime.now()) + ";" + str(result.temperature) + ";" + str(((result.temperature * 9/5)+32)) + ";" + str(result.humidity)
#url = 'http://172.16.1.71:3000/api/receivetemperaturehumidity/{"pod":"'+pod_value+'","machname":"'+mach_serialno+'","datetimereceived":"'+str(datetime.datetime.now())+'","aircelsius":"'+str(result.temperature)+'","airfahrenheit":"'+str(((result.temperature * 9/5)+32))+'","humidity":"'+str(result.humidity)+'"}'
url = 'https://ravenview.herokuapp.com/api/receivetemperaturehumidity/{"pod":"'+pod_value+'","machname":"'+mach_serialno+'","datetimereceived":"'+str(datetime.datetime.now())+'","aircelsius":"'+str(result.temperature)+'","airfahrenheit":"'+str(((result.temperature * 9/5)+32))+'","humidity":"'+str(result.humidity)+'"}'
#print(url)
resp = requests.get(url)
#print(resp)
time.sleep(60)
|
[
"RPi.GPIO.setmode",
"time.sleep",
"requests.get",
"dht11.DHT11",
"datetime.datetime.now",
"RPi.GPIO.setwarnings"
] |
[((338, 361), 'RPi.GPIO.setwarnings', 'GPIO.setwarnings', (['(False)'], {}), '(False)\n', (354, 361), True, 'import RPi.GPIO as GPIO\n'), ((362, 384), 'RPi.GPIO.setmode', 'GPIO.setmode', (['GPIO.BCM'], {}), '(GPIO.BCM)\n', (374, 384), True, 'import RPi.GPIO as GPIO\n'), ((438, 457), 'dht11.DHT11', 'dht11.DHT11', ([], {'pin': '(14)'}), '(pin=14)\n', (449, 457), False, 'import dht11\n'), ((1709, 1726), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (1721, 1726), False, 'import requests\n'), ((1758, 1772), 'time.sleep', 'time.sleep', (['(60)'], {}), '(60)\n', (1768, 1772), False, 'import time\n'), ((889, 912), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (910, 912), False, 'import datetime\n'), ((1501, 1524), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (1522, 1524), False, 'import datetime\n')]
|
import pygame, math
pygame.font.init()
font = pygame.font.SysFont('Monotype', 12)
def distTo(fromPos, toPos):
dist = math.sqrt((fromPos[0] - toPos[0]) ** 2 + (fromPos[1] - toPos[1]) ** 2)
return(dist)
def angleTo(fromPos, toPos):
dx = toPos[0] - fromPos[0]
dy = toPos[1] - fromPos[1]
rads = math.atan2(dy,dx)
rads %= 2*math.pi
degs = math.degrees(rads)
return(degs)
def angleDistToPos(pos, angle, dist):
movement = pygame.math.Vector2()
movement.from_polar((dist, angle))
movement.x = int(movement.x)
movement.y = int(movement.y)
x = movement.x + pos[0]
y = movement.y + pos[1]
return(x,y)
def dirAsCompassDir(degrees):
degrees = int(degrees)
if degrees >= 225 and degrees <= 315:
compassDir = "N"
elif degrees >= 45 and degrees <= 135:
compassDir = "S"
elif degrees >= 135 and degrees <= 225:
compassDir = "W"
elif degrees >= 315 or degrees <= 45:
compassDir = "E"
return(compassDir)
def changeColor(image, replaceWith):
toReplace = (255,0,0)
pa = pygame.PixelArray(image)
pa.replace(toReplace, replaceWith)
return(pa.make_surface())
def tintImage(destImg, tintColor):
tinted = destImg.copy()
tinted.fill((0, 0, 0, 175), None, pygame.BLEND_RGBA_MULT)
tinted.fill(tintColor[0:3] + (0,), None, pygame.BLEND_RGBA_ADD)
return(tinted)
def screenPosToTilePos(tileSize, pos):
x = int((pos[0] -1) / tileSize)
y = int((pos[1] -1) / tileSize)
# print("screen pos to tile {} -> ({}, {})".format(pos, x,y))
return(x, y)
def tilePosToScreenPos(tileSize, pos):
x = (pos[0] * tileSize) + tileSize / 2
y = (pos[1] * tileSize) + tileSize / 2
# print("tile pos to pixels {} -> {}x{}".format(pos, x,y))
return(x, y)
def tilePosId(pos):
return("{:.0f}x{:.0f}".format(pos[0], pos[1]))
def drawDebugLayer(grinder):
# target line
if grinder.target:
pygame.draw.line(
grinder.world.debugLayer,
(20, 10, 40),
(grinder.rect.center[0] + 24, grinder.rect.center[1] + 24),
(grinder.target.center[0] + 24, grinder.target.center[1] + 24),
1
)
# hit marker
if grinder.lastHitArea:
pygame.draw.rect(
grinder.world.debugLayer,
(200, 30, 30),
grinder.lastHitArea,
0
)
grinder.lastHitArea = None
# health bar
x = grinder.rect.x + grinder.rect.width * 0.5
y = grinder.rect.y + grinder.rect.height * 1.5
pygame.draw.line(
grinder.world.debugLayer,
(20, 130, 30),
(x,y), (x + (grinder.health / 100) * grinder.rect.width, y),
3
)
# state text
textsurface = font.render(grinder.state, False, (0, 0, 0))
x = grinder.rect.x + grinder.rect.width * 0.5
y = grinder.rect.y + grinder.rect.height * 1.5
grinder.world.debugLayer.blit(textsurface,(x,y))
|
[
"pygame.math.Vector2",
"pygame.draw.line",
"math.sqrt",
"math.atan2",
"pygame.font.SysFont",
"pygame.draw.rect",
"pygame.font.init",
"pygame.PixelArray",
"math.degrees"
] |
[((21, 39), 'pygame.font.init', 'pygame.font.init', ([], {}), '()\n', (37, 39), False, 'import pygame, math\n'), ((47, 82), 'pygame.font.SysFont', 'pygame.font.SysFont', (['"""Monotype"""', '(12)'], {}), "('Monotype', 12)\n", (66, 82), False, 'import pygame, math\n'), ((120, 190), 'math.sqrt', 'math.sqrt', (['((fromPos[0] - toPos[0]) ** 2 + (fromPos[1] - toPos[1]) ** 2)'], {}), '((fromPos[0] - toPos[0]) ** 2 + (fromPos[1] - toPos[1]) ** 2)\n', (129, 190), False, 'import pygame, math\n'), ((299, 317), 'math.atan2', 'math.atan2', (['dy', 'dx'], {}), '(dy, dx)\n', (309, 317), False, 'import pygame, math\n'), ((344, 362), 'math.degrees', 'math.degrees', (['rads'], {}), '(rads)\n', (356, 362), False, 'import pygame, math\n'), ((428, 449), 'pygame.math.Vector2', 'pygame.math.Vector2', ([], {}), '()\n', (447, 449), False, 'import pygame, math\n'), ((986, 1010), 'pygame.PixelArray', 'pygame.PixelArray', (['image'], {}), '(image)\n', (1003, 1010), False, 'import pygame, math\n'), ((2274, 2399), 'pygame.draw.line', 'pygame.draw.line', (['grinder.world.debugLayer', '(20, 130, 30)', '(x, y)', '(x + grinder.health / 100 * grinder.rect.width, y)', '(3)'], {}), '(grinder.world.debugLayer, (20, 130, 30), (x, y), (x + \n grinder.health / 100 * grinder.rect.width, y), 3)\n', (2290, 2399), False, 'import pygame, math\n'), ((1790, 1982), 'pygame.draw.line', 'pygame.draw.line', (['grinder.world.debugLayer', '(20, 10, 40)', '(grinder.rect.center[0] + 24, grinder.rect.center[1] + 24)', '(grinder.target.center[0] + 24, grinder.target.center[1] + 24)', '(1)'], {}), '(grinder.world.debugLayer, (20, 10, 40), (grinder.rect.\n center[0] + 24, grinder.rect.center[1] + 24), (grinder.target.center[0] +\n 24, grinder.target.center[1] + 24), 1)\n', (1806, 1982), False, 'import pygame, math\n'), ((2035, 2121), 'pygame.draw.rect', 'pygame.draw.rect', (['grinder.world.debugLayer', '(200, 30, 30)', 'grinder.lastHitArea', '(0)'], {}), '(grinder.world.debugLayer, (200, 30, 30), grinder.\n lastHitArea, 0)\n', (2051, 2121), False, 'import pygame, math\n')]
|
import typer
from mltk import cli
@cli.root_cli.command("commander", cls=cli.VariableArgumentParsingCommand)
def silabs_commander_command(ctx: typer.Context):
"""Silab's Commander Utility
This utility allows for accessing a Silab's embedded device via JLink.
For more details issue command: mltk commander --help
"""
# Import all required packages here instead of at top
# to help improve the CLI's responsiveness
from mltk.utils.commander import issue_command
logger = cli.get_logger()
try:
issue_command(*ctx.meta['vargs'], outfile=logger)
except Exception as e:
cli.handle_exception('Commander failed', e)
|
[
"mltk.utils.commander.issue_command",
"mltk.cli.get_logger",
"mltk.cli.root_cli.command",
"mltk.cli.handle_exception"
] |
[((40, 113), 'mltk.cli.root_cli.command', 'cli.root_cli.command', (['"""commander"""'], {'cls': 'cli.VariableArgumentParsingCommand'}), "('commander', cls=cli.VariableArgumentParsingCommand)\n", (60, 113), False, 'from mltk import cli\n'), ((521, 537), 'mltk.cli.get_logger', 'cli.get_logger', ([], {}), '()\n', (535, 537), False, 'from mltk import cli\n'), ((555, 604), 'mltk.utils.commander.issue_command', 'issue_command', (["*ctx.meta['vargs']"], {'outfile': 'logger'}), "(*ctx.meta['vargs'], outfile=logger)\n", (568, 604), False, 'from mltk.utils.commander import issue_command\n'), ((640, 683), 'mltk.cli.handle_exception', 'cli.handle_exception', (['"""Commander failed"""', 'e'], {}), "('Commander failed', e)\n", (660, 683), False, 'from mltk import cli\n')]
|
"""
Note: do not tweak this code unless you know what you're doing.'
"""
import tensorflow as tf
import json
from init import main
import os
with open('config.json') as f:
_d = json.load(f)
labels = _d['labels']
model = _d['CurrentModel']
model = tf.keras.models.load_model(f'models/{model}')
if __name__ == '__main__':
main('neuralModel', labels, model=model)
pass
|
[
"json.load",
"tensorflow.keras.models.load_model",
"init.main"
] |
[((180, 192), 'json.load', 'json.load', (['f'], {}), '(f)\n', (189, 192), False, 'import json\n'), ((253, 298), 'tensorflow.keras.models.load_model', 'tf.keras.models.load_model', (['f"""models/{model}"""'], {}), "(f'models/{model}')\n", (279, 298), True, 'import tensorflow as tf\n'), ((327, 367), 'init.main', 'main', (['"""neuralModel"""', 'labels'], {'model': 'model'}), "('neuralModel', labels, model=model)\n", (331, 367), False, 'from init import main\n')]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import json
import yaml
import textfsm
from behave import given, when, then
from netests.protocols.facts import Facts
from netests.converters.facts.arista.api import _arista_facts_api_converter
from netests.converters.facts.arista.ssh import _arista_facts_ssh_converter
from netests.converters.facts.cumulus.api import _cumulus_facts_api_converter
from netests.converters.facts.cumulus.ssh import _cumulus_facts_ssh_converter
from netests.converters.facts.extreme_vsp.ssh import _extreme_vsp_facts_ssh_converter
from netests.converters.facts.extreme_vsp.api import _extreme_vsp_facts_api_converter
from netests.converters.facts.ios.api import _ios_facts_api_converter
from netests.converters.facts.ios .nc import _ios_facts_nc_converter
from netests.converters.facts.ios.ssh import _ios_facts_ssh_converter
from netests.converters.facts.iosxr.ssh import _iosxr_facts_ssh_converter
from netests.converters.facts.juniper.api import _juniper_facts_api_converter
from netests.converters.facts.juniper .nc import _juniper_facts_nc_converter
from netests.converters.facts.juniper.ssh import _juniper_facts_ssh_converter
from netests.converters.facts.napalm.converter import _napalm_facts_converter
from netests.converters.facts.nxos.api import _nxos_facts_api_converter
from netests.converters.facts.nxos.ssh import _nxos_facts_ssh_converter
from netests.comparators.facts_compare import _compare_facts
from netests.tools.file import open_file, open_txt_file, open_json_file, open_txt_file_as_bytes
from netests.constants import (
NOT_SET,
FEATURES_SRC_PATH,
FACTS_DATA_HOST_KEY,
FACTS_SYS_DICT_KEY,
FACTS_INT_DICT_KEY,
FACTS_DOMAIN_DICT_KEY,
FACTS_MEMORY_DICT_KEY,
FACTS_CONFIG_DICT_KEY,
FACTS_SERIAL_DICT_KEY
)
@given(u'A network protocols named Facts defined in netests/protocols/facts.py')
def step_impl(context):
context.test_not_implemented = list()
@given(u'I create a Facts object equals to Arista manually named o0001')
def step_impl(context):
context.o0001 = Facts(
hostname='leaf03',
domain='dh.local',
version='4.24.0F',
build='da8d6269-c25f-4a12-930b-c3c42c12c38a',
serial='',
base_mac='50:00:00:d7:ee:0b',
memory=2014424,
vendor='Arista',
model='vEOS',
interfaces_lst=['Management1',
'Ethernet8',
'Ethernet2',
'Ethernet3',
'Ethernet1',
'Ethernet6',
'Ethernet7',
'Ethernet4',
'Ethernet5'],
options={}
)
@given(u'I create a Facts object from a Arista API output named o0002')
def step_impl(context):
cmd_output = open_json_file(
path=(
f"{FEATURES_SRC_PATH}outputs/facts/arista/api/"
"arista_api_get_facts.json"
)
)
context.o0002 = _arista_facts_api_converter(
hostname="leaf03",
cmd_output=cmd_output,
options={}
)
@given(u'I create a Facts object from a Arista Netconf named o0003')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'I create a Facts object from a Arista SSH output named o0004')
def step_impl(context):
cmd_output = dict()
cmd_output[FACTS_SYS_DICT_KEY] = open_json_file(
path=(
f"{FEATURES_SRC_PATH}outputs/facts/arista/ssh/"
"arista_cli_show_version.json"
)
)
cmd_output[FACTS_INT_DICT_KEY] = open_json_file(
path=(
f"{FEATURES_SRC_PATH}outputs/facts/arista/ssh/"
"arista_cli_show_interface_status.json"
)
)
cmd_output[FACTS_DOMAIN_DICT_KEY] = open_json_file(
path=(
f"{FEATURES_SRC_PATH}outputs/facts/arista/ssh/"
"arista_cli_show_hostname.json"
)
)
context.o0004 = _arista_facts_ssh_converter(
hostname="leaf03",
cmd_output=cmd_output,
options={}
)
@given(u'I create a Facts object equals to Cumulus manually named o0101')
def step_impl(context):
context.o0101 = Facts(
hostname='cumulus',
domain=NOT_SET,
version='3.7.5',
build='Cumulus Linux 3.7.5',
serial='50:00:00:01:00:00',
base_mac='50:00:00:01:00:00',
memory=951264,
vendor='Cumulus Networks',
model='VX',
interfaces_lst=['swp5',
'swp7',
'swp2',
'swp3',
'swp1',
'swp6',
'swp4',
'eth0'],
options={}
)
@given(u'I create a Facts object from a Cumulus API output named o0102')
def step_impl(context):
cmd_output = dict()
cmd_output[FACTS_SYS_DICT_KEY] = open_txt_file_as_bytes(
path=(
f"{FEATURES_SRC_PATH}outputs/facts/cumulus/api/"
"cumulus_api_show_system.json"
)
)
cmd_output[FACTS_INT_DICT_KEY] = open_txt_file_as_bytes(
path=(
f"{FEATURES_SRC_PATH}outputs/facts/cumulus/api/"
"cumulus_api_show_interface_all.json"
)
)
context.o0102 = _cumulus_facts_api_converter(
hostname="leaf01",
cmd_output=cmd_output,
options={}
)
@given(u'I create a Facts object from a Cumulus Netconf named o0103')
def step_impl(context):
print("Cumulus Facts with Netconf not possible -> Not tested")
@given(u'I create a Facts object from a Cumulus SSH output named o0104')
def step_impl(context):
cmd_output = dict()
cmd_output[FACTS_SYS_DICT_KEY] = open_json_file(
path=(
f"{FEATURES_SRC_PATH}outputs/facts/cumulus/ssh/"
"cumulus_net_show_system.json"
)
)
cmd_output[FACTS_INT_DICT_KEY] = open_json_file(
path=(
f"{FEATURES_SRC_PATH}outputs/facts/cumulus/ssh/"
"cumulus_net_show_interface_all.json"
)
)
context.o0104 = _cumulus_facts_ssh_converter(
hostname="leaf01",
cmd_output=cmd_output,
options={}
)
@given(u'I create a Facts object equals to Extreme VSP manually named o0201')
def step_impl(context):
context.o0201 = Facts(
hostname='spine02',
domain='dh.local',
version='8.1.0.0',
build=NOT_SET,
serial='SDNIPM624B36',
base_mac='00:51:00:02:00:00',
memory=2087444480,
vendor='Extreme Networks',
model='8284XSQ',
interfaces_lst=[ 'mgmt',
'1/1',
'1/2',
'1/3',
'1/4',
'1/5',
'1/6',
'1/7',
'1/8',
'1/9',
'1/10',
'1/11',
'1/12',
'1/13',
'1/14',
'1/15',
'1/16',
'1/17',
'1/18',
'1/19',
'1/20',
'1/21',
'1/22',
'1/23',
'1/24',
'1/25',
'1/26',
'1/27',
'1/28',
'1/29',
'1/30',
'1/31',
'1/32',
'1/33',
'1/34',
'1/35',
'1/36',
'1/37',
'1/38',
'1/39',
'1/40',
'1/41',
'1/42',
'2/1',
'2/2',
'2/3',
'2/4',
'2/5',
'2/6',
'2/7',
'2/8',
'2/9',
'2/10',
'2/11',
'2/12',
'2/13',
'2/14',
'2/15',
'2/16',
'2/17',
'2/18',
'2/19',
'2/20',
'2/21',
'2/22',
'2/23',
'2/24',
'2/25',
'2/26',
'2/27',
'2/28',
'2/29',
'2/30',
'2/31',
'2/32',
'2/33',
'2/34',
'2/35',
'2/36',
'2/37',
'2/38',
'2/39',
'2/40',
'2/41',
'2/42',
'Default'],
options={}
)
@given(u'I create a Facts object from a Extreme VSP API output named o0202')
def step_impl(context):
cmd_output = dict()
cmd_output[FACTS_SYS_DICT_KEY] = open_json_file(
path=(
f"{FEATURES_SRC_PATH}outputs/facts/extreme_vsp/api/"
"extreme_vsp_api_openconfig_system.json"
)
)
cmd_output[FACTS_INT_DICT_KEY] = open_json_file(
path=(
f"{FEATURES_SRC_PATH}outputs/facts/extreme_vsp/api/"
"extreme_vsp_api_openconfig_interfaces.json"
)
)
context.o0202 = _extreme_vsp_facts_api_converter(
hostname="spine02",
cmd_output=cmd_output,
options={}
)
@given(u'I create a Facts object from a Extreme VSP Netconf output named o0203')
def step_impl(context):
print("Extreme VSP VRF with Netconf not possible -> Not tested")
@given(u'I create a Facts object from a Extreme VSP SSH output named o0204')
def step_impl(context):
cmd_output = dict()
cmd_output[FACTS_SYS_DICT_KEY] = open_txt_file(
path=(
f"{FEATURES_SRC_PATH}outputs/facts/extreme_vsp/ssh/"
"extreme_vsp_show_tech.txt"
)
)
cmd_output[FACTS_INT_DICT_KEY] = open_txt_file(
path=(
f"{FEATURES_SRC_PATH}outputs/facts/extreme_vsp/ssh/"
"extreme_vsp_show_interfaces_gigabitethernet_name.txt"
)
)
cmd_output[FACTS_DOMAIN_DICT_KEY] = open_txt_file(
path=(
f"{FEATURES_SRC_PATH}outputs/facts/extreme_vsp/ssh/"
"extreme_vsp_show_sys_dns.txt"
)
)
context.o0204 = _extreme_vsp_facts_ssh_converter(
hostname="spine02",
cmd_output=cmd_output,
options={}
)
@given(u'I create a Facts object equals to IOS manually named o0301')
def step_impl(context):
context.o0301 = Facts(
hostname='leaf05',
domain='dh.local',
version='16.8.1',
build='fc3',
serial='9YEI1T9ZCIY',
base_mac=NOT_SET,
memory='8113376',
vendor='Cisco',
model='CSR1000V',
interfaces_lst=['GigabitEthernet1',
'GigabitEthernet2',
'GigabitEthernet3'],
options={}
)
@given(u'I create a Facts object from a IOS API output named o0302')
def step_impl(context):
context.o0302 = _ios_facts_api_converter(
hostname="leaf05",
cmd_output=open_file(
path=(
f"{FEATURES_SRC_PATH}outputs/facts/ios/api/"
"cisco_ios_api_get_facts_16.8.json"
)
),
options={}
)
@given(u'I create a Facts object from a IOS Netconf named o0303')
def step_impl(context):
context.o0303 = _ios_facts_nc_converter(
hostname="leaf05",
cmd_output=open_file(
path=(
f"{FEATURES_SRC_PATH}outputs/facts/ios/netconf/"
"cisco_ios_nc_get_facts.xml"
)
),
options={}
)
@given(u'I create a Facts object from a IOS SSH named o0304')
def step_impl(context):
cmd_output = dict()
cmd_output[FACTS_SYS_DICT_KEY] = open_txt_file(
path=(
f"{FEATURES_SRC_PATH}outputs/facts/ios/ssh/"
"cisco_ios_show_version.txt"
)
)
cmd_output[FACTS_INT_DICT_KEY] = open_txt_file(
path=(
f"{FEATURES_SRC_PATH}outputs/facts/ios/ssh/"
"cisco_ios_ip_interface_brief.txt"
)
)
context.o0304 = _ios_facts_ssh_converter(
hostname="spine02",
cmd_output=cmd_output,
options={}
)
@given(u'I create a Facts object equals to IOS 16.9 manually named o0311')
def step_impl(context):
context.o0311 = Facts(
hostname='csr1000v',
domain='abc.inc',
version='16.9',
build=NOT_SET,
serial='9KAAMNP24B9',
base_mac=NOT_SET,
memory=NOT_SET,
vendor='Cisco',
model='CSR1000V',
interfaces_lst=['GigabitEthernet1',
'GigabitEthernet2',
'GigabitEthernet3',
'Loopback12',
'Loopback101',
'Loopback854',
'Loopback1500',
'Loopback1501',
'Loopback1609',
'Loopback1974',
'Loopback1996',
'Loopback1997',
'Loopback1998',
'Loopback2000',
'Loopback2222',
'Loopback3000',
'Loopback4321',
'Loopback5263'],
options={}
)
@given(u'I create a Facts object from a IOS API 16.9 output named o0312')
def step_impl(context):
context.o0312 = _ios_facts_api_converter(
hostname="leaf05",
cmd_output=open_file(
path=(
f"{FEATURES_SRC_PATH}outputs/facts/ios/api/"
"cisco_ios_api_get_facts_16.9.json"
)
),
options={}
)
@given(u'I create a Facts object equals to IOS-XR manually named o0401')
def step_impl(context):
context.o0401 = Facts(
hostname='spine03',
domain='dh.local',
version='6.5.3',
build=NOT_SET,
serial=NOT_SET,
base_mac=NOT_SET,
memory=NOT_SET,
vendor='Cisco',
model='IOS-XRv 9000',
interfaces_lst=['Bundle-Ether1',
'Bundle-Ether1.1234',
'Bundle-Ether1.4321',
'Loopback100',
'Loopback200',
'MgmtEth0/RP0/CPU0/0',
'GigabitEthernet0/0/0/0',
'GigabitEthernet0/0/0/1',
'GigabitEthernet0/0/0/2',
'GigabitEthernet0/0/0/3',
'GigabitEthernet0/0/0/4',
'GigabitEthernet0/0/0/5',
'GigabitEthernet0/0/0/6'],
options={}
)
@given(u'I create a Facts object from a IOS-XR API output named o0402')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'I create a Facts object from a IOS-XR Netconf output named o403')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'I create a Facts object from a IOS-XR SSH output named o0404')
def step_impl(context):
cmd_output = dict()
cmd_output[FACTS_SYS_DICT_KEY] = open_txt_file(
path=(
f"{FEATURES_SRC_PATH}outputs/facts/iosxr/ssh/"
"cisco_iosxr_show_version.txt"
)
)
cmd_output[FACTS_INT_DICT_KEY] = open_txt_file(
path=(
f"{FEATURES_SRC_PATH}outputs/facts/iosxr/ssh/"
"cisco_iosxr_show_ip_interface_brief.txt"
)
)
context.o0404 = _iosxr_facts_ssh_converter(
hostname="spine02",
cmd_output=cmd_output,
options={}
)
@given(u'I create a Facts object equals IOS-XR multi manually output named o0405')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'I create a Facts object from a IOS-XR multi Netconf output named o0406')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'I create a Facts object equals to Juniper manually named o0501')
def step_impl(context):
context.o0501 = Facts(
hostname='leaf04',
domain='dh.local',
version='18.3R1.9',
build=NOT_SET,
serial='VM5E983D143E',
base_mac=NOT_SET,
memory=2052008,
vendor='Juniper',
model='VMX',
interfaces_lst=[],
options={}
)
@given(u'I create a Facts object from a Juniper API output named o0502')
def step_impl(context):
cmd_output = dict()
cmd_output[FACTS_SYS_DICT_KEY] = open_txt_file(
path=(
f"{FEATURES_SRC_PATH}outputs/facts/juniper/api/"
"juniper_api_get_software_information.xml"
)
)
cmd_output[FACTS_INT_DICT_KEY] = open_txt_file(
path=(
f"{FEATURES_SRC_PATH}outputs/facts/juniper/api/"
"juniper_api_get_interface_information_terse.xml"
)
)
cmd_output[FACTS_SERIAL_DICT_KEY] = open_txt_file(
path=(
f"{FEATURES_SRC_PATH}outputs/facts/juniper/api/"
"juniper_api_get_chassis_inventory_detail.xml"
)
)
cmd_output[FACTS_MEMORY_DICT_KEY] = open_txt_file(
path=(
f"{FEATURES_SRC_PATH}outputs/facts/juniper/api/"
"juniper_api_get_system_memory_information.xml"
)
)
context.o0502 = _juniper_facts_api_converter(
hostname="leaf04",
cmd_output=cmd_output,
options={}
)
@given(u'I create a Facts object from a Juniper Netconf output named o0503')
def step_impl(context):
cmd_output = dict()
cmd_output[FACTS_SYS_DICT_KEY] = open_json_file(
path=(
f"{FEATURES_SRC_PATH}outputs/facts/juniper/netconf/"
"juniper_nc_get_facts.json"
)
)
cmd_output[FACTS_INT_DICT_KEY] = open_file(
path=(
f"{FEATURES_SRC_PATH}outputs/facts/juniper/netconf/"
"juniper_nc_get_interfaces_terse.xml"
)
)
context.o0503 = _juniper_facts_nc_converter(
hostname="leaf04",
cmd_output=cmd_output,
options={}
)
@given(u'I create a Facts object from a Juniper SSH output named o0504')
def step_impl(context):
cmd_output = dict()
cmd_output[FACTS_SYS_DICT_KEY] = open_json_file(
path=(
f"{FEATURES_SRC_PATH}outputs/facts/juniper/ssh/"
"juniper_show_version.json"
)
)
cmd_output[FACTS_INT_DICT_KEY] = open_json_file(
path=(
f"{FEATURES_SRC_PATH}outputs/facts/juniper/ssh/"
"juniper_show_interfaces_terse.json"
)
)
cmd_output[FACTS_INT_DICT_KEY] = open_json_file(
path=(
f"{FEATURES_SRC_PATH}outputs/facts/juniper/ssh/"
"juniper_show_interfaces_terse.json"
)
)
cmd_output[FACTS_MEMORY_DICT_KEY] = open_json_file(
path=(
f"{FEATURES_SRC_PATH}outputs/facts/juniper/ssh/"
"juniper_show_system_memory.json"
)
)
cmd_output[FACTS_CONFIG_DICT_KEY] = open_json_file(
path=(
f"{FEATURES_SRC_PATH}outputs/facts/juniper/ssh/"
"juniper_show_conf_system.json"
)
)
cmd_output[FACTS_SERIAL_DICT_KEY] = open_json_file(
path=(
f"{FEATURES_SRC_PATH}outputs/facts/juniper/ssh/"
"juniper_show_hardware_detail.json"
)
)
context.o0504 = _juniper_facts_ssh_converter(
hostname="leaf04",
cmd_output=cmd_output,
options={}
)
@given(u'I create a Facts object equals to NAPALM manually named o0601')
def step_impl(context):
context.o0601 = Facts(
hostname='leaf03',
domain=NOT_SET,
version=NOT_SET,
build=NOT_SET,
serial='9QXOX90PJ62',
base_mac=NOT_SET,
memory=NOT_SET,
vendor='Cisco',
model='Nexus9000 C9300v Chassis',
interfaces_lst=[],
options={}
)
@given(u'I create a Facts object from a NAPALM output named o0602')
def step_impl(context):
context.o0602 = _napalm_facts_converter(
hostname="leaf03",
cmd_output=open_json_file(
path=(
f"{FEATURES_SRC_PATH}outputs/facts/napalm/nxos_get_facts.json"
)
),
options={}
)
@given(u'I create a Facts object equals to NXOS manually named o0701')
def step_impl(context):
context.o0701 = Facts(
hostname='leaf02',
domain='dh.local',
version='9.3(3)',
build=NOT_SET,
serial='9QXOX90PJ62',
base_mac=NOT_SET,
memory='16409064',
vendor='Cisco Systems, Inc.',
model='Nexus9000',
interfaces_lst=[],
options={}
)
@given(u'I create a Facts object from a NXOS API output named o0702')
def step_impl(context):
cmd_output = dict()
cmd_output[FACTS_SYS_DICT_KEY] = open_json_file(
path=(
f"{FEATURES_SRC_PATH}outputs/facts/nxos/api/"
"nxos_api_get_facts.json"
)
)
cmd_output[FACTS_INT_DICT_KEY] = open_json_file(
path=(
f"{FEATURES_SRC_PATH}outputs/facts/nxos/api/"
"nxos_api_get_interfaces.json"
)
)
cmd_output[FACTS_DOMAIN_DICT_KEY] = open_json_file(
path=(
f"{FEATURES_SRC_PATH}outputs/facts/nxos/api/"
"nxos_api_get_domain.json"
)
)
context.o0702 = _nxos_facts_api_converter(
hostname="leaf03",
cmd_output=cmd_output,
options={}
)
@given(u'I create a Facts object from a NXOS Netconf output named o0703')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'I create a Facts object from a NXOS SSH output named o0704')
def step_impl(context):
cmd_output = dict()
cmd_output[FACTS_SYS_DICT_KEY] = open_json_file(
path=(
f"{FEATURES_SRC_PATH}outputs/facts/nxos/ssh/"
"nxos_show_version.json"
)
)
cmd_output[FACTS_INT_DICT_KEY] = open_json_file(
path=(
f"{FEATURES_SRC_PATH}outputs/facts/nxos/ssh/"
"nxos_show_interfaces.json"
)
)
cmd_output[FACTS_DOMAIN_DICT_KEY] = open_json_file(
path=(
f"{FEATURES_SRC_PATH}outputs/facts/nxos/ssh/"
"nxos_show_hostname.json"
)
)
context.o0704 = _nxos_facts_ssh_converter(
hostname="leaf03",
cmd_output=cmd_output,
options={}
)
@given(u'Facts o0001 should be equal to o0002')
def step_impl(context):
assert (
context.o0001 == context.o0002 and
context.o0001.hostname == context.o0002.hostname and
context.o0001.domain == context.o0002.domain and
context.o0001.version == context.o0002.version and
context.o0001.build == context.o0002.build and
context.o0001.base_mac == context.o0002.base_mac and
context.o0001.memory == context.o0002.memory and
context.o0001.vendor == context.o0002.vendor and
context.o0001.model == context.o0002.model and
context.o0001.interfaces_lst == context.o0002.interfaces_lst
)
@given(u'Facts o0001 should be equal to o0003')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'Facts o0001 should be equal to o0004')
def step_impl(context):
assert (
context.o0001 == context.o0004 and
context.o0001.hostname == context.o0004.hostname and
context.o0001.domain == context.o0004.domain and
context.o0001.version == context.o0004.version and
context.o0001.build == context.o0004.build and
context.o0001.base_mac == context.o0004.base_mac and
context.o0001.memory == context.o0004.memory and
context.o0001.vendor == context.o0004.vendor and
context.o0001.model == context.o0004.model and
context.o0001.interfaces_lst == context.o0004.interfaces_lst
)
@given(u'Facts o0002 should be equal to o0003')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'Facts o0002 should be equal to o0004')
def step_impl(context):
assert (
context.o0002 == context.o0004 and
context.o0002.hostname == context.o0004.hostname and
context.o0002.domain == context.o0004.domain and
context.o0002.version == context.o0004.version and
context.o0002.build == context.o0004.build and
context.o0002.base_mac == context.o0004.base_mac and
context.o0002.memory == context.o0004.memory and
context.o0002.vendor == context.o0004.vendor and
context.o0002.model == context.o0004.model and
context.o0002.interfaces_lst == context.o0004.interfaces_lst
)
@given(u'Facts o0003 should be equal to o0004')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'Facts YAML file should be equal to o0002')
def step_impl(context):
assert _compare_facts(
host_keys=FACTS_DATA_HOST_KEY,
hostname="leaf03",
groups=['eos'],
facts_host_data=context.o0002,
test=True
)
@given(u'Facts YAML file should be equal to o0003')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'Facts YAML file should be equal to o0004')
def step_impl(context):
assert _compare_facts(
host_keys=FACTS_DATA_HOST_KEY,
hostname="leaf03",
groups=['eos'],
facts_host_data=context.o0004,
test=True
)
@given(u'Facts o0101 should be equal to o0102')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'Facts o0101 should be equal to o0103')
def step_impl(context):
print("Cumulus Facts with Netconf not possible -> Not tested")
@given(u'Facts o0101 should be equal to o0104')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'Facts o0102 should be equal to o0103')
def step_impl(context):
print("Cumulus Facts with Netconf not possible -> Not tested")
@given(u'Facts o0102 should be equal to o0104')
def step_impl(context):
assert context.o0102 == context.o0104
assert (
context.o0102.hostname == context.o0104.hostname and
context.o0102.domain == context.o0104.domain and
context.o0102.version == context.o0104.version and
context.o0102.build == context.o0104.build and
context.o0102.serial == context.o0104.serial and
context.o0102.base_mac == context.o0104.base_mac and
context.o0102.memory == context.o0104.memory and
context.o0102.vendor == context.o0104.vendor and
context.o0102.model == context.o0104.model and
context.o0102.interfaces_lst == context.o0104.interfaces_lst
)
@given(u'Facts o0103 should be equal to o0104')
def step_impl(context):
print("Cumulus Facts with Netconf not possible -> Not tested")
@given(u'Facts YAML file should be equal to o0102')
def step_impl(context):
assert _compare_facts(
host_keys=FACTS_DATA_HOST_KEY,
hostname="leaf01",
groups=['linux'],
facts_host_data=context.o0102,
test=True
)
@given(u'Facts YAML file should be equal to o0103')
def step_impl(context):
print("Cumulus Facts with Netconf not possible -> Not tested")
@given(u'Facts YAML file should be equal to o0104')
def step_impl(context):
assert _compare_facts(
host_keys=FACTS_DATA_HOST_KEY,
hostname="leaf01",
groups=['linux'],
facts_host_data=context.o0104,
test=True
)
@given(u'Facts o0201 should be equal to o0202')
def step_impl(context):
assert (
context.o0201.hostname == context.o0202.hostname and
context.o0201.domain == context.o0202.domain and
context.o0201.memory == context.o0202.memory and
context.o0201.interfaces_lst == context.o0202.interfaces_lst
)
@given(u'Facts o0201 should be equal to o0203')
def step_impl(context):
print("Extreme VSP VRF with Netconf not possible -> Not tested")
@given(u'Facts o0201 should be equal to o0204')
def step_impl(context):
assert (
context.o0201.hostname == context.o0204.hostname and
context.o0201.domain == context.o0204.domain and
context.o0201.version == context.o0204.version and
context.o0201.base_mac == context.o0204.base_mac and
context.o0201.model == context.o0204.model and
context.o0201.serial == context.o0204.serial
)
@given(u'Facts o0202 should be equal to o0203')
def step_impl(context):
print("Extreme VSP VRF with Netconf not possible -> Not tested")
@given(u'Facts o0202 should be equal to o0204')
def step_impl(context):
print("Extreme VSP VRF with Netconf not possible -> Not tested")
@given(u'Facts o0203 should be equal to o0204')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'Facts YAML file should be equal to o0202')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'Facts YAML file should be equal to o0203')
def step_impl(context):
print("Extreme VSP VRF with Netconf not possible -> Not tested")
@given(u'Facts YAML file should be equal to o0204')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'Facts o0301 should be equal to o0302')
def step_impl(context):
assert (
context.o0301.hostname == context.o0302.hostname and
context.o0301.domain == context.o0302.domain and
context.o0301.vendor == context.o0302.vendor and
context.o0301.model == context.o0302.model and
context.o0301.serial == context.o0302.serial and
context.o0301.interfaces_lst == context.o0302.interfaces_lst
)
@given(u'Facts o0301 should be equal to o0303')
def step_impl(context):
assert (
context.o0301.hostname == context.o0303.hostname and
context.o0301.domain == context.o0303.domain and
context.o0301.vendor == context.o0303.vendor and
context.o0301.model == context.o0303.model and
context.o0301.serial == context.o0303.serial and
context.o0301.interfaces_lst == context.o0303.interfaces_lst
)
@given(u'Facts o0301 should be equal to o0304')
def step_impl(context):
assert (
context.o0301 == context.o0304 and
context.o0301.vendor == context.o0304.vendor and
context.o0301.build == context.o0304.build and
context.o0301.memory == context.o0304.memory and
context.o0301.interfaces_lst == context.o0304.interfaces_lst
)
@given(u'Facts o0302 should be equal to o0303')
def step_impl(context):
assert (
context.o0302 == context.o0303 and
context.o0302.hostname == context.o0303.hostname and
context.o0302.domain == context.o0303.domain and
context.o0302.vendor == context.o0303.vendor and
context.o0302.model == context.o0303.model and
context.o0302.serial == context.o0303.serial and
context.o0302.version == context.o0303.version and
context.o0302.memory == context.o0303.memory and
context.o0302.base_mac == context.o0303.base_mac and
context.o0302.build == context.o0303.build and
context.o0302.interfaces_lst == context.o0303.interfaces_lst
)
@given(u'Facts o0302 should be equal to o0304')
def step_impl(context):
assert (
context.o0302.vendor == context.o0304.vendor and
context.o0302.serial == context.o0304.serial and
context.o0302.hostname == context.o0304.hostname and
context.o0302.model == context.o0304.model and
context.o0302.interfaces_lst == context.o0304.interfaces_lst
)
@given(u'Facts o0303 should be equal to o0304')
def step_impl(context):
assert (
context.o0303.vendor == context.o0304.vendor and
context.o0303.serial == context.o0304.serial and
context.o0303.hostname == context.o0304.hostname and
context.o0303.model == context.o0304.model and
context.o0303.interfaces_lst == context.o0304.interfaces_lst
)
@given(u'Facts o0311 should be equal to o0312')
def step_impl(context):
assert (
context.o0311.hostname == context.o0312.hostname and
context.o0311.domain == context.o0312.domain and
context.o0311.vendor == context.o0312.vendor and
context.o0311.serial == context.o0312.serial and
context.o0311.model == context.o0312.model and
context.o0311.interfaces_lst == context.o0312.interfaces_lst
)
@given(u'Facts YAML file should be equal to o0302')
def step_impl(context):
print("/!\\/!\\ Facts YAML file should be equal to o0302 /!\\/!\\")
assert not _compare_facts(
host_keys=FACTS_DATA_HOST_KEY,
hostname="leaf05",
groups=['ios'],
facts_host_data=context.o0302,
test=True
)
@given(u'Facts YAML file should be equal to o0303')
def step_impl(context):
print("/!\\/!\\ Facts YAML file should be equal to o0303 /!\\/!\\")
assert not _compare_facts(
host_keys=FACTS_DATA_HOST_KEY,
hostname="leaf05",
groups=['ios'],
facts_host_data=context.o0303,
test=True
)
@given(u'Facts YAML file should be equal to o0304')
def step_impl(context):
assert _compare_facts(
host_keys=FACTS_DATA_HOST_KEY,
hostname="leaf05",
groups=['ios'],
facts_host_data=context.o0304,
test=True
)
@given(u'Facts o0401 should be equal to o0402')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'Facts o0401 should be equal to o0403')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'Facts o0401 should be equal to o0404')
def step_impl(context):
assert (
context.o0401 == context.o0404 and
context.o0401.vendor == context.o0404.vendor and
context.o0401.hostname == context.o0404.hostname and
context.o0401.model == context.o0404.model and
context.o0401.version == context.o0404.version and
context.o0401.interfaces_lst == context.o0404.interfaces_lst
)
@given(u'Facts o0402 should be equal to o0403')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'Facts o0402 should be equal to o0404')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'Facts o0403 should be equal to o0404')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'Facts o0405 should be equal to o0406')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'Facts YAML file should be equal to o0402')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'Facts YAML file should be equal to o0403')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'Facts YAML file should be equal to o0404')
def step_impl(context):
assert _compare_facts(
host_keys=FACTS_DATA_HOST_KEY,
hostname="spine03",
groups=['iosxr'],
facts_host_data=context.o0404,
test=True
)
@given(u'Facts o0501 should be equal to o0502')
def step_impl(context):
assert (
context.o0501.hostname == context.o0502.hostname and
context.o0501.vendor == context.o0502.vendor and
context.o0501.model == context.o0502.model
)
@given(u'Facts o0501 should be equal to o0503')
def step_impl(context):
assert (
context.o0501.hostname == context.o0503.hostname and
context.o0501.domain == context.o0503.domain and
context.o0501.model == context.o0503.model
)
@given(u'Facts o0501 should be equal to o0504')
def step_impl(context):
assert context.o0501 == context.o0504
@given(u'Facts o0502 should be equal to o0503')
def step_impl(context):
assert context.o0502 == context.o0503
assert (
context.o0502.hostname == context.o0503.hostname and
context.o0502.version == context.o0503.version and
context.o0502.build == context.o0503.build and
context.o0502.serial == context.o0503.serial and
context.o0502.base_mac == context.o0503.base_mac and
context.o0502.vendor == context.o0503.vendor and
context.o0502.model == context.o0503.model and
context.o0502.interfaces_lst == context.o0503.interfaces_lst
)
@given(u'Facts o0502 should be equal to o0504')
def step_impl(context):
assert (
context.o0503.hostname == context.o0504.hostname and
context.o0503.build == context.o0504.build and
context.o0503.base_mac == context.o0504.base_mac and
context.o0503.vendor == context.o0504.vendor and
context.o0503.model == context.o0504.model
)
@given(u'Facts o0503 should be equal to o0504')
def step_impl(context):
assert (
context.o0503.hostname == context.o0504.hostname and
context.o0503.domain == context.o0504.domain and
context.o0503.build == context.o0504.build and
context.o0503.base_mac == context.o0504.base_mac and
context.o0503.vendor == context.o0504.vendor and
context.o0503.model == context.o0504.model
)
@given(u'Facts YAML file should be equal to o0502')
def step_impl(context):
assert True
print("Facts YAML file and o0502 - Versions are differents !")
@given(u'Facts YAML file should be equal to o0503')
def step_impl(context):
assert True
print("Facts YAML file and o0503 - Versions are differents !")
@given(u'Facts YAML file should be equal to o0504')
def step_impl(context):
assert _compare_facts(
host_keys=FACTS_DATA_HOST_KEY,
hostname="leaf04",
groups=['junos'],
facts_host_data=context.o0504,
test=True
)
@given(u'Facts o0601 should be equal to o0602')
def step_impl(context):
assert context.o0601 == context.o0602
@given(u'Facts o0701 should be equal to o0702')
def step_impl(context):
assert context.o0701 == context.o0701
assert (
context.o0701.domain == context.o0701.domain and
context.o0701.version == context.o0701.version and
context.o0701.memory == context.o0701.memory
)
@given(u'Facts o0701 should be equal to o0703')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'Facts o0701 should be equal to o0704')
def step_impl(context):
assert context.o0701 == context.o0704
assert (
context.o0701.domain == context.o0704.domain and
context.o0701.version == context.o0704.version and
context.o0701.memory == context.o0704.memory
)
@given(u'Facts o0702 should be equal to o0703')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'Facts o0702 should be equal to o0704')
def step_impl(context):
assert (
context.o0702 == context.o0704 and
context.o0702.hostname == context.o0704.hostname and
context.o0702.domain == context.o0704.domain and
context.o0702.version == context.o0704.version and
str(context.o0702.memory) == str(context.o0704.memory) and
context.o0702.model == context.o0704.model and
context.o0702.serial == context.o0704.serial and
context.o0702.interfaces_lst == context.o0704.interfaces_lst
)
@given(u'Facts o0703 should be equal to o0704')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'Facts YAML file should be equal to o0702')
def step_impl(context):
assert _compare_facts(
host_keys=FACTS_DATA_HOST_KEY,
hostname="leaf02",
groups=['nxos'],
facts_host_data=context.o0702,
test=True
)
@given(u'Facts YAML file should be equal to o0703')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'Facts YAML file should be equal to o0704')
def step_impl(context):
assert _compare_facts(
host_keys=FACTS_DATA_HOST_KEY,
hostname="leaf02",
groups=['nxos'],
facts_host_data=context.o0704,
test=True
)
@given(u'I create a Facts object to test compare function named o9999')
def step_impl(context):
context.o9999 = Facts(
hostname='leaf01',
domain=NOT_SET,
version='4.0.0',
build='Cumulus Linux 4.0.0',
serial='50:00:00:02:00:00',
base_mac='50:00:00:02:00:00',
memory=944388,
vendor='Cumulus Networks',
model='VX',
interfaces_lst=['swp5',
'swp7',
'swp2',
'swp3',
'swp1',
'swp6',
'swp4',
'eth0'],
options={}
)
@given(u'I create a Facts object to test compare function with <domain> named o9982')
def step_impl(context):
options = {
'compare': {
'domain': True
}
}
context.o9982 = create_facts_obj_for_compare(options)
@given(u'I create a Facts object to test compare equal to o9982 without <domain> named o9983')
def step_impl(context):
options = {}
context.o9983 = create_facts_obj_for_compare(options)
@given(u'I compare Facts o9982 and o9999 with a personal function - should not work')
def step_impl(context):
assert context.o9982 != context.o9999
@given(u'I compare Facts o9983 and o9999 with a personal function - should work')
def step_impl(context):
assert context.o9983 == context.o9999
@given(u'I create a Facts object to test compare function with <build> named o9984')
def step_impl(context):
options = {
'compare': {
'build': True
}
}
context.o9984 = create_facts_obj_for_compare(options)
@given(u'I create a Facts object to test compare equal to o9984 without <build> named o9985')
def step_impl(context):
options = {}
context.o9985 = create_facts_obj_for_compare(options)
@given(u'I compare Facts o9984 and o9999 with a personal function - should not work')
def step_impl(context):
assert context.o9984 != context.o9999
@given(u'I compare Facts o9985 and o9999 with a personal function - should work')
def step_impl(context):
assert context.o9985 == context.o9999
@given(u'I create a Facts object to test compare function with <serial> named o9986')
def step_impl(context):
options = {
'compare': {
'serial': True
}
}
context.o9986 = create_facts_obj_for_compare(options)
@given(u'I create a Facts object to test compare equal to o9986 without <serial> named o9987')
def step_impl(context):
options = {}
context.o9987 = create_facts_obj_for_compare(options)
@given(u'I compare Facts o9986 and o9999 with a personal function - should not work')
def step_impl(context):
assert context.o9986 != context.o9999
@given(u'I compare Facts o9987 and o9999 with a personal function - should work')
def step_impl(context):
assert context.o9987 == context.o9999
@given(u'I create a Facts object to test compare function with <base_mac> named o9988')
def step_impl(context):
options = {
'compare': {
'base_mac': True
}
}
context.o9988 = create_facts_obj_for_compare(options)
@given(u'I create a Facts object to test compare equal to o9988 without <base_mac> named o9989')
def step_impl(context):
options = {}
context.o9989 = create_facts_obj_for_compare(options)
@given(u'I compare Facts o9988 and o9999 with a personal function - should not work')
def step_impl(context):
assert context.o9988 != context.o9999
@given(u'I compare Facts o9989 and o9999 with a personal function - should work')
def step_impl(context):
assert context.o9989 == context.o9999
@given(u'I create a Facts object to test compare function with <memory> named o9990')
def step_impl(context):
options = {
'compare': {
'memory': True
}
}
context.o9990 = create_facts_obj_for_compare(options)
@given(u'I create a Facts object to test compare equal to o9990 without <memory> named o9991')
def step_impl(context):
options = {}
context.o9991 = create_facts_obj_for_compare(options)
@given(u'I compare Facts o9990 and o9999 with a personal function - should not work')
def step_impl(context):
assert context.o9990 != context.o9999
@given(u'I compare Facts o9991 and o9999 with a personal function - should work')
def step_impl(context):
assert context.o9991 == context.o9999
@given(u'I create a Facts object to test compare function with <vendor> named o9992')
def step_impl(context):
options = {
'compare': {
'vendor': True
}
}
context.o9992 = create_facts_obj_for_compare(options)
@given(u'I create a Facts object to test compare equal to o9992 without <vendor> named o9993')
def step_impl(context):
options = {}
context.o9993 = create_facts_obj_for_compare(options)
@given(u'I compare Facts o9992 and o9999 with a personal function - should not work')
def step_impl(context):
assert context.o9992 != context.o9999
@given(u'I compare Facts o9993 and o9999 with a personal function - should work')
def step_impl(context):
assert context.o9993 == context.o9999
@given(u'I create a Facts object to test compare function with <model> named o9994')
def step_impl(context):
options = {
'compare': {
'model': True
}
}
context.o9994 = create_facts_obj_for_compare(options)
@given(u'I create a Facts object to test compare equal to o9994 without <model> named o9995')
def step_impl(context):
options = {}
context.o9995 = create_facts_obj_for_compare(options)
@given(u'I compare Facts o9994 and o9999 with a personal function - should not work')
def step_impl(context):
assert context.o9994 != context.o9999
@given(u'I compare Facts o9995 and o9999 with a personal function - should work')
def step_impl(context):
assert context.o9995 == context.o9999
@given(u'I create a Facts object to test compare function with <interfaces_lst> named o9996')
def step_impl(context):
options = {
'compare': {
'interfaces_lst': True
}
}
context.o9996 = create_facts_obj_for_compare(options)
@given(u'I create a Facts object to test compare equal to o9996 without <interfaces_lst> named o9997')
def step_impl(context):
options = {}
context.o9997 = create_facts_obj_for_compare(options)
@given(u'I compare Facts o9996 and o9999 with a personal function - should not work')
def step_impl(context):
assert context.o9996 != context.o9999
@given(u'I compare Facts o9997 and o9999 with a personal function - should work')
def step_impl(context):
assert context.o9987 == context.o9999
def create_facts_obj_for_compare(options):
return Facts(
hostname='leaf01',
domain="a_very_cool_domain.com",
version='4.0.0',
build='WRONG__BUILD__!!',
serial='WRONG__SERIAL__!!',
base_mac='WRONG__BASE_MAC__!!',
memory=0,
vendor='NOT CUMULUS SO NOT GOOD',
model='NOT A CUMULUS MODEL',
interfaces_lst=['swp5',
'swp7',
'swp2',
'swp3',
'swp1',
'swp6',
'swp4',
'eth0',
'WHAT IS THIS INTERFACE ????'],
options=options
)
@given(u'I Finish my Facts tests and list tests not implemented')
def step_impl(context):
print("| The following tests are not implemented :")
for test in context.test_not_implemented:
print(f"| {test}")
|
[
"netests.tools.file.open_file",
"netests.converters.facts.iosxr.ssh._iosxr_facts_ssh_converter",
"netests.comparators.facts_compare._compare_facts",
"behave.given",
"netests.converters.facts.juniper.nc._juniper_facts_nc_converter",
"netests.tools.file.open_txt_file_as_bytes",
"netests.converters.facts.nxos.api._nxos_facts_api_converter",
"netests.converters.facts.extreme_vsp.ssh._extreme_vsp_facts_ssh_converter",
"netests.converters.facts.extreme_vsp.api._extreme_vsp_facts_api_converter",
"netests.converters.facts.arista.api._arista_facts_api_converter",
"netests.protocols.facts.Facts",
"netests.converters.facts.juniper.api._juniper_facts_api_converter",
"netests.converters.facts.juniper.ssh._juniper_facts_ssh_converter",
"netests.converters.facts.nxos.ssh._nxos_facts_ssh_converter",
"netests.converters.facts.cumulus.ssh._cumulus_facts_ssh_converter",
"netests.tools.file.open_json_file",
"netests.converters.facts.cumulus.api._cumulus_facts_api_converter",
"netests.converters.facts.ios.ssh._ios_facts_ssh_converter",
"netests.tools.file.open_txt_file",
"netests.converters.facts.arista.ssh._arista_facts_ssh_converter"
] |
[((1794, 1873), 'behave.given', 'given', (['u"""A network protocols named Facts defined in netests/protocols/facts.py"""'], {}), "(u'A network protocols named Facts defined in netests/protocols/facts.py')\n", (1799, 1873), False, 'from behave import given, when, then\n'), ((1943, 2014), 'behave.given', 'given', (['u"""I create a Facts object equals to Arista manually named o0001"""'], {}), "(u'I create a Facts object equals to Arista manually named o0001')\n", (1948, 2014), False, 'from behave import given, when, then\n'), ((2693, 2763), 'behave.given', 'given', (['u"""I create a Facts object from a Arista API output named o0002"""'], {}), "(u'I create a Facts object from a Arista API output named o0002')\n", (2698, 2763), False, 'from behave import given, when, then\n'), ((3087, 3154), 'behave.given', 'given', (['u"""I create a Facts object from a Arista Netconf named o0003"""'], {}), "(u'I create a Facts object from a Arista Netconf named o0003')\n", (3092, 3154), False, 'from behave import given, when, then\n'), ((3230, 3300), 'behave.given', 'given', (['u"""I create a Facts object from a Arista SSH output named o0004"""'], {}), "(u'I create a Facts object from a Arista SSH output named o0004')\n", (3235, 3300), False, 'from behave import given, when, then\n'), ((4058, 4130), 'behave.given', 'given', (['u"""I create a Facts object equals to Cumulus manually named o0101"""'], {}), "(u'I create a Facts object equals to Cumulus manually named o0101')\n", (4063, 4130), False, 'from behave import given, when, then\n'), ((4733, 4804), 'behave.given', 'given', (['u"""I create a Facts object from a Cumulus API output named o0102"""'], {}), "(u'I create a Facts object from a Cumulus API output named o0102')\n", (4738, 4804), False, 'from behave import given, when, then\n'), ((5388, 5456), 'behave.given', 'given', (['u"""I create a Facts object from a Cumulus Netconf named o0103"""'], {}), "(u'I create a Facts object from a Cumulus Netconf named o0103')\n", (5393, 5456), False, 'from behave import given, when, then\n'), ((5551, 5622), 'behave.given', 'given', (['u"""I create a Facts object from a Cumulus SSH output named o0104"""'], {}), "(u'I create a Facts object from a Cumulus SSH output named o0104')\n", (5556, 5622), False, 'from behave import given, when, then\n'), ((6190, 6266), 'behave.given', 'given', (['u"""I create a Facts object equals to Extreme VSP manually named o0201"""'], {}), "(u'I create a Facts object equals to Extreme VSP manually named o0201')\n", (6195, 6266), False, 'from behave import given, when, then\n'), ((9431, 9506), 'behave.given', 'given', (['u"""I create a Facts object from a Extreme VSP API output named o0202"""'], {}), "(u'I create a Facts object from a Extreme VSP API output named o0202')\n", (9436, 9506), False, 'from behave import given, when, then\n'), ((10105, 10184), 'behave.given', 'given', (['u"""I create a Facts object from a Extreme VSP Netconf output named o0203"""'], {}), "(u'I create a Facts object from a Extreme VSP Netconf output named o0203')\n", (10110, 10184), False, 'from behave import given, when, then\n'), ((10281, 10356), 'behave.given', 'given', (['u"""I create a Facts object from a Extreme VSP SSH output named o0204"""'], {}), "(u'I create a Facts object from a Extreme VSP SSH output named o0204')\n", (10286, 10356), False, 'from behave import given, when, then\n'), ((11144, 11212), 'behave.given', 'given', (['u"""I create a Facts object equals to IOS manually named o0301"""'], {}), "(u'I create a Facts object equals to IOS manually named o0301')\n", (11149, 11212), False, 'from behave import given, when, then\n'), ((11658, 11725), 'behave.given', 'given', (['u"""I create a Facts object from a IOS API output named o0302"""'], {}), "(u'I create a Facts object from a IOS API output named o0302')\n", (11663, 11725), False, 'from behave import given, when, then\n'), ((12038, 12102), 'behave.given', 'given', (['u"""I create a Facts object from a IOS Netconf named o0303"""'], {}), "(u'I create a Facts object from a IOS Netconf named o0303')\n", (12043, 12102), False, 'from behave import given, when, then\n'), ((12411, 12471), 'behave.given', 'given', (['u"""I create a Facts object from a IOS SSH named o0304"""'], {}), "(u'I create a Facts object from a IOS SSH named o0304')\n", (12416, 12471), False, 'from behave import given, when, then\n'), ((13020, 13093), 'behave.given', 'given', (['u"""I create a Facts object equals to IOS 16.9 manually named o0311"""'], {}), "(u'I create a Facts object equals to IOS 16.9 manually named o0311')\n", (13025, 13093), False, 'from behave import given, when, then\n'), ((14134, 14206), 'behave.given', 'given', (['u"""I create a Facts object from a IOS API 16.9 output named o0312"""'], {}), "(u'I create a Facts object from a IOS API 16.9 output named o0312')\n", (14139, 14206), False, 'from behave import given, when, then\n'), ((14519, 14590), 'behave.given', 'given', (['u"""I create a Facts object equals to IOS-XR manually named o0401"""'], {}), "(u'I create a Facts object equals to IOS-XR manually named o0401')\n", (14524, 14590), False, 'from behave import given, when, then\n'), ((15510, 15580), 'behave.given', 'given', (['u"""I create a Facts object from a IOS-XR API output named o0402"""'], {}), "(u'I create a Facts object from a IOS-XR API output named o0402')\n", (15515, 15580), False, 'from behave import given, when, then\n'), ((15656, 15729), 'behave.given', 'given', (['u"""I create a Facts object from a IOS-XR Netconf output named o403"""'], {}), "(u'I create a Facts object from a IOS-XR Netconf output named o403')\n", (15661, 15729), False, 'from behave import given, when, then\n'), ((15805, 15875), 'behave.given', 'given', (['u"""I create a Facts object from a IOS-XR SSH output named o0404"""'], {}), "(u'I create a Facts object from a IOS-XR SSH output named o0404')\n", (15810, 15875), False, 'from behave import given, when, then\n'), ((16440, 16526), 'behave.given', 'given', (['u"""I create a Facts object equals IOS-XR multi manually output named o0405"""'], {}), "(\n u'I create a Facts object equals IOS-XR multi manually output named o0405')\n", (16445, 16526), False, 'from behave import given, when, then\n'), ((16597, 16682), 'behave.given', 'given', (['u"""I create a Facts object from a IOS-XR multi Netconf output named o0406"""'], {}), "(u'I create a Facts object from a IOS-XR multi Netconf output named o0406'\n )\n", (16602, 16682), False, 'from behave import given, when, then\n'), ((16753, 16825), 'behave.given', 'given', (['u"""I create a Facts object equals to Juniper manually named o0501"""'], {}), "(u'I create a Facts object equals to Juniper manually named o0501')\n", (16758, 16825), False, 'from behave import given, when, then\n'), ((17165, 17236), 'behave.given', 'given', (['u"""I create a Facts object from a Juniper API output named o0502"""'], {}), "(u'I create a Facts object from a Juniper API output named o0502')\n", (17170, 17236), False, 'from behave import given, when, then\n'), ((18240, 18315), 'behave.given', 'given', (['u"""I create a Facts object from a Juniper Netconf output named o0503"""'], {}), "(u'I create a Facts object from a Juniper Netconf output named o0503')\n", (18245, 18315), False, 'from behave import given, when, then\n'), ((18882, 18953), 'behave.given', 'given', (['u"""I create a Facts object from a Juniper SSH output named o0504"""'], {}), "(u'I create a Facts object from a Juniper SSH output named o0504')\n", (18887, 18953), False, 'from behave import given, when, then\n'), ((20293, 20364), 'behave.given', 'given', (['u"""I create a Facts object equals to NAPALM manually named o0601"""'], {}), "(u'I create a Facts object equals to NAPALM manually named o0601')\n", (20298, 20364), False, 'from behave import given, when, then\n'), ((20716, 20782), 'behave.given', 'given', (['u"""I create a Facts object from a NAPALM output named o0602"""'], {}), "(u'I create a Facts object from a NAPALM output named o0602')\n", (20721, 20782), False, 'from behave import given, when, then\n'), ((21065, 21134), 'behave.given', 'given', (['u"""I create a Facts object equals to NXOS manually named o0701"""'], {}), "(u'I create a Facts object equals to NXOS manually named o0701')\n", (21070, 21134), False, 'from behave import given, when, then\n'), ((21492, 21560), 'behave.given', 'given', (['u"""I create a Facts object from a NXOS API output named o0702"""'], {}), "(u'I create a Facts object from a NXOS API output named o0702')\n", (21497, 21560), False, 'from behave import given, when, then\n'), ((22292, 22364), 'behave.given', 'given', (['u"""I create a Facts object from a NXOS Netconf output named o0703"""'], {}), "(u'I create a Facts object from a NXOS Netconf output named o0703')\n", (22297, 22364), False, 'from behave import given, when, then\n'), ((22440, 22508), 'behave.given', 'given', (['u"""I create a Facts object from a NXOS SSH output named o0704"""'], {}), "(u'I create a Facts object from a NXOS SSH output named o0704')\n", (22445, 22508), False, 'from behave import given, when, then\n'), ((23239, 23285), 'behave.given', 'given', (['u"""Facts o0001 should be equal to o0002"""'], {}), "(u'Facts o0001 should be equal to o0002')\n", (23244, 23285), False, 'from behave import given, when, then\n'), ((23906, 23952), 'behave.given', 'given', (['u"""Facts o0001 should be equal to o0003"""'], {}), "(u'Facts o0001 should be equal to o0003')\n", (23911, 23952), False, 'from behave import given, when, then\n'), ((24028, 24074), 'behave.given', 'given', (['u"""Facts o0001 should be equal to o0004"""'], {}), "(u'Facts o0001 should be equal to o0004')\n", (24033, 24074), False, 'from behave import given, when, then\n'), ((24695, 24741), 'behave.given', 'given', (['u"""Facts o0002 should be equal to o0003"""'], {}), "(u'Facts o0002 should be equal to o0003')\n", (24700, 24741), False, 'from behave import given, when, then\n'), ((24817, 24863), 'behave.given', 'given', (['u"""Facts o0002 should be equal to o0004"""'], {}), "(u'Facts o0002 should be equal to o0004')\n", (24822, 24863), False, 'from behave import given, when, then\n'), ((25484, 25530), 'behave.given', 'given', (['u"""Facts o0003 should be equal to o0004"""'], {}), "(u'Facts o0003 should be equal to o0004')\n", (25489, 25530), False, 'from behave import given, when, then\n'), ((25606, 25656), 'behave.given', 'given', (['u"""Facts YAML file should be equal to o0002"""'], {}), "(u'Facts YAML file should be equal to o0002')\n", (25611, 25656), False, 'from behave import given, when, then\n'), ((25864, 25914), 'behave.given', 'given', (['u"""Facts YAML file should be equal to o0003"""'], {}), "(u'Facts YAML file should be equal to o0003')\n", (25869, 25914), False, 'from behave import given, when, then\n'), ((25990, 26040), 'behave.given', 'given', (['u"""Facts YAML file should be equal to o0004"""'], {}), "(u'Facts YAML file should be equal to o0004')\n", (25995, 26040), False, 'from behave import given, when, then\n'), ((26248, 26294), 'behave.given', 'given', (['u"""Facts o0101 should be equal to o0102"""'], {}), "(u'Facts o0101 should be equal to o0102')\n", (26253, 26294), False, 'from behave import given, when, then\n'), ((26370, 26416), 'behave.given', 'given', (['u"""Facts o0101 should be equal to o0103"""'], {}), "(u'Facts o0101 should be equal to o0103')\n", (26375, 26416), False, 'from behave import given, when, then\n'), ((26511, 26557), 'behave.given', 'given', (['u"""Facts o0101 should be equal to o0104"""'], {}), "(u'Facts o0101 should be equal to o0104')\n", (26516, 26557), False, 'from behave import given, when, then\n'), ((26633, 26679), 'behave.given', 'given', (['u"""Facts o0102 should be equal to o0103"""'], {}), "(u'Facts o0102 should be equal to o0103')\n", (26638, 26679), False, 'from behave import given, when, then\n'), ((26774, 26820), 'behave.given', 'given', (['u"""Facts o0102 should be equal to o0104"""'], {}), "(u'Facts o0102 should be equal to o0104')\n", (26779, 26820), False, 'from behave import given, when, then\n'), ((27496, 27542), 'behave.given', 'given', (['u"""Facts o0103 should be equal to o0104"""'], {}), "(u'Facts o0103 should be equal to o0104')\n", (27501, 27542), False, 'from behave import given, when, then\n'), ((27637, 27687), 'behave.given', 'given', (['u"""Facts YAML file should be equal to o0102"""'], {}), "(u'Facts YAML file should be equal to o0102')\n", (27642, 27687), False, 'from behave import given, when, then\n'), ((27897, 27947), 'behave.given', 'given', (['u"""Facts YAML file should be equal to o0103"""'], {}), "(u'Facts YAML file should be equal to o0103')\n", (27902, 27947), False, 'from behave import given, when, then\n'), ((28042, 28092), 'behave.given', 'given', (['u"""Facts YAML file should be equal to o0104"""'], {}), "(u'Facts YAML file should be equal to o0104')\n", (28047, 28092), False, 'from behave import given, when, then\n'), ((28302, 28348), 'behave.given', 'given', (['u"""Facts o0201 should be equal to o0202"""'], {}), "(u'Facts o0201 should be equal to o0202')\n", (28307, 28348), False, 'from behave import given, when, then\n'), ((28639, 28685), 'behave.given', 'given', (['u"""Facts o0201 should be equal to o0203"""'], {}), "(u'Facts o0201 should be equal to o0203')\n", (28644, 28685), False, 'from behave import given, when, then\n'), ((28782, 28828), 'behave.given', 'given', (['u"""Facts o0201 should be equal to o0204"""'], {}), "(u'Facts o0201 should be equal to o0204')\n", (28787, 28828), False, 'from behave import given, when, then\n'), ((29221, 29267), 'behave.given', 'given', (['u"""Facts o0202 should be equal to o0203"""'], {}), "(u'Facts o0202 should be equal to o0203')\n", (29226, 29267), False, 'from behave import given, when, then\n'), ((29364, 29410), 'behave.given', 'given', (['u"""Facts o0202 should be equal to o0204"""'], {}), "(u'Facts o0202 should be equal to o0204')\n", (29369, 29410), False, 'from behave import given, when, then\n'), ((29507, 29553), 'behave.given', 'given', (['u"""Facts o0203 should be equal to o0204"""'], {}), "(u'Facts o0203 should be equal to o0204')\n", (29512, 29553), False, 'from behave import given, when, then\n'), ((29629, 29679), 'behave.given', 'given', (['u"""Facts YAML file should be equal to o0202"""'], {}), "(u'Facts YAML file should be equal to o0202')\n", (29634, 29679), False, 'from behave import given, when, then\n'), ((29755, 29805), 'behave.given', 'given', (['u"""Facts YAML file should be equal to o0203"""'], {}), "(u'Facts YAML file should be equal to o0203')\n", (29760, 29805), False, 'from behave import given, when, then\n'), ((29902, 29952), 'behave.given', 'given', (['u"""Facts YAML file should be equal to o0204"""'], {}), "(u'Facts YAML file should be equal to o0204')\n", (29907, 29952), False, 'from behave import given, when, then\n'), ((30028, 30074), 'behave.given', 'given', (['u"""Facts o0301 should be equal to o0302"""'], {}), "(u'Facts o0301 should be equal to o0302')\n", (30033, 30074), False, 'from behave import given, when, then\n'), ((30477, 30523), 'behave.given', 'given', (['u"""Facts o0301 should be equal to o0303"""'], {}), "(u'Facts o0301 should be equal to o0303')\n", (30482, 30523), False, 'from behave import given, when, then\n'), ((30926, 30972), 'behave.given', 'given', (['u"""Facts o0301 should be equal to o0304"""'], {}), "(u'Facts o0301 should be equal to o0304')\n", (30931, 30972), False, 'from behave import given, when, then\n'), ((31300, 31346), 'behave.given', 'given', (['u"""Facts o0302 should be equal to o0303"""'], {}), "(u'Facts o0302 should be equal to o0303')\n", (31305, 31346), False, 'from behave import given, when, then\n'), ((32024, 32070), 'behave.given', 'given', (['u"""Facts o0302 should be equal to o0304"""'], {}), "(u'Facts o0302 should be equal to o0304')\n", (32029, 32070), False, 'from behave import given, when, then\n'), ((32416, 32462), 'behave.given', 'given', (['u"""Facts o0303 should be equal to o0304"""'], {}), "(u'Facts o0303 should be equal to o0304')\n", (32421, 32462), False, 'from behave import given, when, then\n'), ((32808, 32854), 'behave.given', 'given', (['u"""Facts o0311 should be equal to o0312"""'], {}), "(u'Facts o0311 should be equal to o0312')\n", (32813, 32854), False, 'from behave import given, when, then\n'), ((33257, 33307), 'behave.given', 'given', (['u"""Facts YAML file should be equal to o0302"""'], {}), "(u'Facts YAML file should be equal to o0302')\n", (33262, 33307), False, 'from behave import given, when, then\n'), ((33591, 33641), 'behave.given', 'given', (['u"""Facts YAML file should be equal to o0303"""'], {}), "(u'Facts YAML file should be equal to o0303')\n", (33596, 33641), False, 'from behave import given, when, then\n'), ((33925, 33975), 'behave.given', 'given', (['u"""Facts YAML file should be equal to o0304"""'], {}), "(u'Facts YAML file should be equal to o0304')\n", (33930, 33975), False, 'from behave import given, when, then\n'), ((34183, 34229), 'behave.given', 'given', (['u"""Facts o0401 should be equal to o0402"""'], {}), "(u'Facts o0401 should be equal to o0402')\n", (34188, 34229), False, 'from behave import given, when, then\n'), ((34305, 34351), 'behave.given', 'given', (['u"""Facts o0401 should be equal to o0403"""'], {}), "(u'Facts o0401 should be equal to o0403')\n", (34310, 34351), False, 'from behave import given, when, then\n'), ((34427, 34473), 'behave.given', 'given', (['u"""Facts o0401 should be equal to o0404"""'], {}), "(u'Facts o0401 should be equal to o0404')\n", (34432, 34473), False, 'from behave import given, when, then\n'), ((34864, 34910), 'behave.given', 'given', (['u"""Facts o0402 should be equal to o0403"""'], {}), "(u'Facts o0402 should be equal to o0403')\n", (34869, 34910), False, 'from behave import given, when, then\n'), ((34986, 35032), 'behave.given', 'given', (['u"""Facts o0402 should be equal to o0404"""'], {}), "(u'Facts o0402 should be equal to o0404')\n", (34991, 35032), False, 'from behave import given, when, then\n'), ((35108, 35154), 'behave.given', 'given', (['u"""Facts o0403 should be equal to o0404"""'], {}), "(u'Facts o0403 should be equal to o0404')\n", (35113, 35154), False, 'from behave import given, when, then\n'), ((35230, 35276), 'behave.given', 'given', (['u"""Facts o0405 should be equal to o0406"""'], {}), "(u'Facts o0405 should be equal to o0406')\n", (35235, 35276), False, 'from behave import given, when, then\n'), ((35352, 35402), 'behave.given', 'given', (['u"""Facts YAML file should be equal to o0402"""'], {}), "(u'Facts YAML file should be equal to o0402')\n", (35357, 35402), False, 'from behave import given, when, then\n'), ((35478, 35528), 'behave.given', 'given', (['u"""Facts YAML file should be equal to o0403"""'], {}), "(u'Facts YAML file should be equal to o0403')\n", (35483, 35528), False, 'from behave import given, when, then\n'), ((35604, 35654), 'behave.given', 'given', (['u"""Facts YAML file should be equal to o0404"""'], {}), "(u'Facts YAML file should be equal to o0404')\n", (35609, 35654), False, 'from behave import given, when, then\n'), ((35865, 35911), 'behave.given', 'given', (['u"""Facts o0501 should be equal to o0502"""'], {}), "(u'Facts o0501 should be equal to o0502')\n", (35870, 35911), False, 'from behave import given, when, then\n'), ((36127, 36173), 'behave.given', 'given', (['u"""Facts o0501 should be equal to o0503"""'], {}), "(u'Facts o0501 should be equal to o0503')\n", (36132, 36173), False, 'from behave import given, when, then\n'), ((36389, 36435), 'behave.given', 'given', (['u"""Facts o0501 should be equal to o0504"""'], {}), "(u'Facts o0501 should be equal to o0504')\n", (36394, 36435), False, 'from behave import given, when, then\n'), ((36505, 36551), 'behave.given', 'given', (['u"""Facts o0502 should be equal to o0503"""'], {}), "(u'Facts o0502 should be equal to o0503')\n", (36510, 36551), False, 'from behave import given, when, then\n'), ((37114, 37160), 'behave.given', 'given', (['u"""Facts o0502 should be equal to o0504"""'], {}), "(u'Facts o0502 should be equal to o0504')\n", (37119, 37160), False, 'from behave import given, when, then\n'), ((37492, 37538), 'behave.given', 'given', (['u"""Facts o0503 should be equal to o0504"""'], {}), "(u'Facts o0503 should be equal to o0504')\n", (37497, 37538), False, 'from behave import given, when, then\n'), ((37926, 37976), 'behave.given', 'given', (['u"""Facts YAML file should be equal to o0502"""'], {}), "(u'Facts YAML file should be equal to o0502')\n", (37931, 37976), False, 'from behave import given, when, then\n'), ((38087, 38137), 'behave.given', 'given', (['u"""Facts YAML file should be equal to o0503"""'], {}), "(u'Facts YAML file should be equal to o0503')\n", (38092, 38137), False, 'from behave import given, when, then\n'), ((38248, 38298), 'behave.given', 'given', (['u"""Facts YAML file should be equal to o0504"""'], {}), "(u'Facts YAML file should be equal to o0504')\n", (38253, 38298), False, 'from behave import given, when, then\n'), ((38508, 38554), 'behave.given', 'given', (['u"""Facts o0601 should be equal to o0602"""'], {}), "(u'Facts o0601 should be equal to o0602')\n", (38513, 38554), False, 'from behave import given, when, then\n'), ((38624, 38670), 'behave.given', 'given', (['u"""Facts o0701 should be equal to o0702"""'], {}), "(u'Facts o0701 should be equal to o0702')\n", (38629, 38670), False, 'from behave import given, when, then\n'), ((38928, 38974), 'behave.given', 'given', (['u"""Facts o0701 should be equal to o0703"""'], {}), "(u'Facts o0701 should be equal to o0703')\n", (38933, 38974), False, 'from behave import given, when, then\n'), ((39050, 39096), 'behave.given', 'given', (['u"""Facts o0701 should be equal to o0704"""'], {}), "(u'Facts o0701 should be equal to o0704')\n", (39055, 39096), False, 'from behave import given, when, then\n'), ((39354, 39400), 'behave.given', 'given', (['u"""Facts o0702 should be equal to o0703"""'], {}), "(u'Facts o0702 should be equal to o0703')\n", (39359, 39400), False, 'from behave import given, when, then\n'), ((39476, 39522), 'behave.given', 'given', (['u"""Facts o0702 should be equal to o0704"""'], {}), "(u'Facts o0702 should be equal to o0704')\n", (39481, 39522), False, 'from behave import given, when, then\n'), ((40037, 40083), 'behave.given', 'given', (['u"""Facts o0703 should be equal to o0704"""'], {}), "(u'Facts o0703 should be equal to o0704')\n", (40042, 40083), False, 'from behave import given, when, then\n'), ((40159, 40209), 'behave.given', 'given', (['u"""Facts YAML file should be equal to o0702"""'], {}), "(u'Facts YAML file should be equal to o0702')\n", (40164, 40209), False, 'from behave import given, when, then\n'), ((40418, 40468), 'behave.given', 'given', (['u"""Facts YAML file should be equal to o0703"""'], {}), "(u'Facts YAML file should be equal to o0703')\n", (40423, 40468), False, 'from behave import given, when, then\n'), ((40544, 40594), 'behave.given', 'given', (['u"""Facts YAML file should be equal to o0704"""'], {}), "(u'Facts YAML file should be equal to o0704')\n", (40549, 40594), False, 'from behave import given, when, then\n'), ((40803, 40873), 'behave.given', 'given', (['u"""I create a Facts object to test compare function named o9999"""'], {}), "(u'I create a Facts object to test compare function named o9999')\n", (40808, 40873), False, 'from behave import given, when, then\n'), ((41475, 41569), 'behave.given', 'given', (['u"""I create a Facts object to test compare function with <domain> named o9982"""'], {}), "(\n u'I create a Facts object to test compare function with <domain> named o9982'\n )\n", (41480, 41569), False, 'from behave import given, when, then\n'), ((41725, 41828), 'behave.given', 'given', (['u"""I create a Facts object to test compare equal to o9982 without <domain> named o9983"""'], {}), "(\n u'I create a Facts object to test compare equal to o9982 without <domain> named o9983'\n )\n", (41730, 41828), False, 'from behave import given, when, then\n'), ((41921, 42015), 'behave.given', 'given', (['u"""I compare Facts o9982 and o9999 with a personal function - should not work"""'], {}), "(\n u'I compare Facts o9982 and o9999 with a personal function - should not work'\n )\n", (41926, 42015), False, 'from behave import given, when, then\n'), ((42075, 42160), 'behave.given', 'given', (['u"""I compare Facts o9983 and o9999 with a personal function - should work"""'], {}), "(u'I compare Facts o9983 and o9999 with a personal function - should work'\n )\n", (42080, 42160), False, 'from behave import given, when, then\n'), ((42225, 42318), 'behave.given', 'given', (['u"""I create a Facts object to test compare function with <build> named o9984"""'], {}), "(\n u'I create a Facts object to test compare function with <build> named o9984'\n )\n", (42230, 42318), False, 'from behave import given, when, then\n'), ((42473, 42575), 'behave.given', 'given', (['u"""I create a Facts object to test compare equal to o9984 without <build> named o9985"""'], {}), "(\n u'I create a Facts object to test compare equal to o9984 without <build> named o9985'\n )\n", (42478, 42575), False, 'from behave import given, when, then\n'), ((42668, 42762), 'behave.given', 'given', (['u"""I compare Facts o9984 and o9999 with a personal function - should not work"""'], {}), "(\n u'I compare Facts o9984 and o9999 with a personal function - should not work'\n )\n", (42673, 42762), False, 'from behave import given, when, then\n'), ((42822, 42907), 'behave.given', 'given', (['u"""I compare Facts o9985 and o9999 with a personal function - should work"""'], {}), "(u'I compare Facts o9985 and o9999 with a personal function - should work'\n )\n", (42827, 42907), False, 'from behave import given, when, then\n'), ((42972, 43066), 'behave.given', 'given', (['u"""I create a Facts object to test compare function with <serial> named o9986"""'], {}), "(\n u'I create a Facts object to test compare function with <serial> named o9986'\n )\n", (42977, 43066), False, 'from behave import given, when, then\n'), ((43222, 43325), 'behave.given', 'given', (['u"""I create a Facts object to test compare equal to o9986 without <serial> named o9987"""'], {}), "(\n u'I create a Facts object to test compare equal to o9986 without <serial> named o9987'\n )\n", (43227, 43325), False, 'from behave import given, when, then\n'), ((43418, 43512), 'behave.given', 'given', (['u"""I compare Facts o9986 and o9999 with a personal function - should not work"""'], {}), "(\n u'I compare Facts o9986 and o9999 with a personal function - should not work'\n )\n", (43423, 43512), False, 'from behave import given, when, then\n'), ((43572, 43657), 'behave.given', 'given', (['u"""I compare Facts o9987 and o9999 with a personal function - should work"""'], {}), "(u'I compare Facts o9987 and o9999 with a personal function - should work'\n )\n", (43577, 43657), False, 'from behave import given, when, then\n'), ((43722, 43818), 'behave.given', 'given', (['u"""I create a Facts object to test compare function with <base_mac> named o9988"""'], {}), "(\n u'I create a Facts object to test compare function with <base_mac> named o9988'\n )\n", (43727, 43818), False, 'from behave import given, when, then\n'), ((43976, 44081), 'behave.given', 'given', (['u"""I create a Facts object to test compare equal to o9988 without <base_mac> named o9989"""'], {}), "(\n u'I create a Facts object to test compare equal to o9988 without <base_mac> named o9989'\n )\n", (43981, 44081), False, 'from behave import given, when, then\n'), ((44174, 44268), 'behave.given', 'given', (['u"""I compare Facts o9988 and o9999 with a personal function - should not work"""'], {}), "(\n u'I compare Facts o9988 and o9999 with a personal function - should not work'\n )\n", (44179, 44268), False, 'from behave import given, when, then\n'), ((44328, 44413), 'behave.given', 'given', (['u"""I compare Facts o9989 and o9999 with a personal function - should work"""'], {}), "(u'I compare Facts o9989 and o9999 with a personal function - should work'\n )\n", (44333, 44413), False, 'from behave import given, when, then\n'), ((44478, 44572), 'behave.given', 'given', (['u"""I create a Facts object to test compare function with <memory> named o9990"""'], {}), "(\n u'I create a Facts object to test compare function with <memory> named o9990'\n )\n", (44483, 44572), False, 'from behave import given, when, then\n'), ((44728, 44831), 'behave.given', 'given', (['u"""I create a Facts object to test compare equal to o9990 without <memory> named o9991"""'], {}), "(\n u'I create a Facts object to test compare equal to o9990 without <memory> named o9991'\n )\n", (44733, 44831), False, 'from behave import given, when, then\n'), ((44924, 45018), 'behave.given', 'given', (['u"""I compare Facts o9990 and o9999 with a personal function - should not work"""'], {}), "(\n u'I compare Facts o9990 and o9999 with a personal function - should not work'\n )\n", (44929, 45018), False, 'from behave import given, when, then\n'), ((45078, 45163), 'behave.given', 'given', (['u"""I compare Facts o9991 and o9999 with a personal function - should work"""'], {}), "(u'I compare Facts o9991 and o9999 with a personal function - should work'\n )\n", (45083, 45163), False, 'from behave import given, when, then\n'), ((45228, 45322), 'behave.given', 'given', (['u"""I create a Facts object to test compare function with <vendor> named o9992"""'], {}), "(\n u'I create a Facts object to test compare function with <vendor> named o9992'\n )\n", (45233, 45322), False, 'from behave import given, when, then\n'), ((45478, 45581), 'behave.given', 'given', (['u"""I create a Facts object to test compare equal to o9992 without <vendor> named o9993"""'], {}), "(\n u'I create a Facts object to test compare equal to o9992 without <vendor> named o9993'\n )\n", (45483, 45581), False, 'from behave import given, when, then\n'), ((45674, 45768), 'behave.given', 'given', (['u"""I compare Facts o9992 and o9999 with a personal function - should not work"""'], {}), "(\n u'I compare Facts o9992 and o9999 with a personal function - should not work'\n )\n", (45679, 45768), False, 'from behave import given, when, then\n'), ((45828, 45913), 'behave.given', 'given', (['u"""I compare Facts o9993 and o9999 with a personal function - should work"""'], {}), "(u'I compare Facts o9993 and o9999 with a personal function - should work'\n )\n", (45833, 45913), False, 'from behave import given, when, then\n'), ((45978, 46071), 'behave.given', 'given', (['u"""I create a Facts object to test compare function with <model> named o9994"""'], {}), "(\n u'I create a Facts object to test compare function with <model> named o9994'\n )\n", (45983, 46071), False, 'from behave import given, when, then\n'), ((46226, 46328), 'behave.given', 'given', (['u"""I create a Facts object to test compare equal to o9994 without <model> named o9995"""'], {}), "(\n u'I create a Facts object to test compare equal to o9994 without <model> named o9995'\n )\n", (46231, 46328), False, 'from behave import given, when, then\n'), ((46421, 46515), 'behave.given', 'given', (['u"""I compare Facts o9994 and o9999 with a personal function - should not work"""'], {}), "(\n u'I compare Facts o9994 and o9999 with a personal function - should not work'\n )\n", (46426, 46515), False, 'from behave import given, when, then\n'), ((46575, 46660), 'behave.given', 'given', (['u"""I compare Facts o9995 and o9999 with a personal function - should work"""'], {}), "(u'I compare Facts o9995 and o9999 with a personal function - should work'\n )\n", (46580, 46660), False, 'from behave import given, when, then\n'), ((46725, 46827), 'behave.given', 'given', (['u"""I create a Facts object to test compare function with <interfaces_lst> named o9996"""'], {}), "(\n u'I create a Facts object to test compare function with <interfaces_lst> named o9996'\n )\n", (46730, 46827), False, 'from behave import given, when, then\n'), ((46991, 47102), 'behave.given', 'given', (['u"""I create a Facts object to test compare equal to o9996 without <interfaces_lst> named o9997"""'], {}), "(\n u'I create a Facts object to test compare equal to o9996 without <interfaces_lst> named o9997'\n )\n", (46996, 47102), False, 'from behave import given, when, then\n'), ((47195, 47289), 'behave.given', 'given', (['u"""I compare Facts o9996 and o9999 with a personal function - should not work"""'], {}), "(\n u'I compare Facts o9996 and o9999 with a personal function - should not work'\n )\n", (47200, 47289), False, 'from behave import given, when, then\n'), ((47349, 47434), 'behave.given', 'given', (['u"""I compare Facts o9997 and o9999 with a personal function - should work"""'], {}), "(u'I compare Facts o9997 and o9999 with a personal function - should work'\n )\n", (47354, 47434), False, 'from behave import given, when, then\n'), ((48204, 48268), 'behave.given', 'given', (['u"""I Finish my Facts tests and list tests not implemented"""'], {}), "(u'I Finish my Facts tests and list tests not implemented')\n", (48209, 48268), False, 'from behave import given, when, then\n'), ((2059, 2425), 'netests.protocols.facts.Facts', 'Facts', ([], {'hostname': '"""leaf03"""', 'domain': '"""dh.local"""', 'version': '"""4.24.0F"""', 'build': '"""da8d6269-c25f-4a12-930b-c3c42c12c38a"""', 'serial': '""""""', 'base_mac': '"""50:00:00:d7:ee:0b"""', 'memory': '(2014424)', 'vendor': '"""Arista"""', 'model': '"""vEOS"""', 'interfaces_lst': "['Management1', 'Ethernet8', 'Ethernet2', 'Ethernet3', 'Ethernet1',\n 'Ethernet6', 'Ethernet7', 'Ethernet4', 'Ethernet5']", 'options': '{}'}), "(hostname='leaf03', domain='dh.local', version='4.24.0F', build=\n 'da8d6269-c25f-4a12-930b-c3c42c12c38a', serial='', base_mac=\n '50:00:00:d7:ee:0b', memory=2014424, vendor='Arista', model='vEOS',\n interfaces_lst=['Management1', 'Ethernet8', 'Ethernet2', 'Ethernet3',\n 'Ethernet1', 'Ethernet6', 'Ethernet7', 'Ethernet4', 'Ethernet5'],\n options={})\n", (2064, 2425), False, 'from netests.protocols.facts import Facts\n'), ((2805, 2903), 'netests.tools.file.open_json_file', 'open_json_file', ([], {'path': 'f"""{FEATURES_SRC_PATH}outputs/facts/arista/api/arista_api_get_facts.json"""'}), "(path=\n f'{FEATURES_SRC_PATH}outputs/facts/arista/api/arista_api_get_facts.json')\n", (2819, 2903), False, 'from netests.tools.file import open_file, open_txt_file, open_json_file, open_txt_file_as_bytes\n'), ((2972, 3057), 'netests.converters.facts.arista.api._arista_facts_api_converter', '_arista_facts_api_converter', ([], {'hostname': '"""leaf03"""', 'cmd_output': 'cmd_output', 'options': '{}'}), "(hostname='leaf03', cmd_output=cmd_output,\n options={})\n", (2999, 3057), False, 'from netests.converters.facts.arista.api import _arista_facts_api_converter\n'), ((3386, 3492), 'netests.tools.file.open_json_file', 'open_json_file', ([], {'path': 'f"""{FEATURES_SRC_PATH}outputs/facts/arista/ssh/arista_cli_show_version.json"""'}), "(path=\n f'{FEATURES_SRC_PATH}outputs/facts/arista/ssh/arista_cli_show_version.json'\n )\n", (3400, 3492), False, 'from netests.tools.file import open_file, open_txt_file, open_json_file, open_txt_file_as_bytes\n'), ((3573, 3688), 'netests.tools.file.open_json_file', 'open_json_file', ([], {'path': 'f"""{FEATURES_SRC_PATH}outputs/facts/arista/ssh/arista_cli_show_interface_status.json"""'}), "(path=\n f'{FEATURES_SRC_PATH}outputs/facts/arista/ssh/arista_cli_show_interface_status.json'\n )\n", (3587, 3688), False, 'from netests.tools.file import open_file, open_txt_file, open_json_file, open_txt_file_as_bytes\n'), ((3772, 3879), 'netests.tools.file.open_json_file', 'open_json_file', ([], {'path': 'f"""{FEATURES_SRC_PATH}outputs/facts/arista/ssh/arista_cli_show_hostname.json"""'}), "(path=\n f'{FEATURES_SRC_PATH}outputs/facts/arista/ssh/arista_cli_show_hostname.json'\n )\n", (3786, 3879), False, 'from netests.tools.file import open_file, open_txt_file, open_json_file, open_txt_file_as_bytes\n'), ((3943, 4028), 'netests.converters.facts.arista.ssh._arista_facts_ssh_converter', '_arista_facts_ssh_converter', ([], {'hostname': '"""leaf03"""', 'cmd_output': 'cmd_output', 'options': '{}'}), "(hostname='leaf03', cmd_output=cmd_output,\n options={})\n", (3970, 4028), False, 'from netests.converters.facts.arista.ssh import _arista_facts_ssh_converter\n'), ((4175, 4486), 'netests.protocols.facts.Facts', 'Facts', ([], {'hostname': '"""cumulus"""', 'domain': 'NOT_SET', 'version': '"""3.7.5"""', 'build': '"""Cumulus Linux 3.7.5"""', 'serial': '"""50:00:00:01:00:00"""', 'base_mac': '"""50:00:00:01:00:00"""', 'memory': '(951264)', 'vendor': '"""Cumulus Networks"""', 'model': '"""VX"""', 'interfaces_lst': "['swp5', 'swp7', 'swp2', 'swp3', 'swp1', 'swp6', 'swp4', 'eth0']", 'options': '{}'}), "(hostname='cumulus', domain=NOT_SET, version='3.7.5', build=\n 'Cumulus Linux 3.7.5', serial='50:00:00:01:00:00', base_mac=\n '50:00:00:01:00:00', memory=951264, vendor='Cumulus Networks', model=\n 'VX', interfaces_lst=['swp5', 'swp7', 'swp2', 'swp3', 'swp1', 'swp6',\n 'swp4', 'eth0'], options={})\n", (4180, 4486), False, 'from netests.protocols.facts import Facts\n'), ((4890, 5005), 'netests.tools.file.open_txt_file_as_bytes', 'open_txt_file_as_bytes', ([], {'path': 'f"""{FEATURES_SRC_PATH}outputs/facts/cumulus/api/cumulus_api_show_system.json"""'}), "(path=\n f'{FEATURES_SRC_PATH}outputs/facts/cumulus/api/cumulus_api_show_system.json'\n )\n", (4912, 5005), False, 'from netests.tools.file import open_file, open_txt_file, open_json_file, open_txt_file_as_bytes\n'), ((5086, 5208), 'netests.tools.file.open_txt_file_as_bytes', 'open_txt_file_as_bytes', ([], {'path': 'f"""{FEATURES_SRC_PATH}outputs/facts/cumulus/api/cumulus_api_show_interface_all.json"""'}), "(path=\n f'{FEATURES_SRC_PATH}outputs/facts/cumulus/api/cumulus_api_show_interface_all.json'\n )\n", (5108, 5208), False, 'from netests.tools.file import open_file, open_txt_file, open_json_file, open_txt_file_as_bytes\n'), ((5272, 5358), 'netests.converters.facts.cumulus.api._cumulus_facts_api_converter', '_cumulus_facts_api_converter', ([], {'hostname': '"""leaf01"""', 'cmd_output': 'cmd_output', 'options': '{}'}), "(hostname='leaf01', cmd_output=cmd_output,\n options={})\n", (5300, 5358), False, 'from netests.converters.facts.cumulus.api import _cumulus_facts_api_converter\n'), ((5708, 5815), 'netests.tools.file.open_json_file', 'open_json_file', ([], {'path': 'f"""{FEATURES_SRC_PATH}outputs/facts/cumulus/ssh/cumulus_net_show_system.json"""'}), "(path=\n f'{FEATURES_SRC_PATH}outputs/facts/cumulus/ssh/cumulus_net_show_system.json'\n )\n", (5722, 5815), False, 'from netests.tools.file import open_file, open_txt_file, open_json_file, open_txt_file_as_bytes\n'), ((5896, 6010), 'netests.tools.file.open_json_file', 'open_json_file', ([], {'path': 'f"""{FEATURES_SRC_PATH}outputs/facts/cumulus/ssh/cumulus_net_show_interface_all.json"""'}), "(path=\n f'{FEATURES_SRC_PATH}outputs/facts/cumulus/ssh/cumulus_net_show_interface_all.json'\n )\n", (5910, 6010), False, 'from netests.tools.file import open_file, open_txt_file, open_json_file, open_txt_file_as_bytes\n'), ((6074, 6160), 'netests.converters.facts.cumulus.ssh._cumulus_facts_ssh_converter', '_cumulus_facts_ssh_converter', ([], {'hostname': '"""leaf01"""', 'cmd_output': 'cmd_output', 'options': '{}'}), "(hostname='leaf01', cmd_output=cmd_output,\n options={})\n", (6102, 6160), False, 'from netests.converters.facts.cumulus.ssh import _cumulus_facts_ssh_converter\n'), ((6311, 7258), 'netests.protocols.facts.Facts', 'Facts', ([], {'hostname': '"""spine02"""', 'domain': '"""dh.local"""', 'version': '"""8.1.0.0"""', 'build': 'NOT_SET', 'serial': '"""SDNIPM624B36"""', 'base_mac': '"""00:51:00:02:00:00"""', 'memory': '(2087444480)', 'vendor': '"""Extreme Networks"""', 'model': '"""8284XSQ"""', 'interfaces_lst': "['mgmt', '1/1', '1/2', '1/3', '1/4', '1/5', '1/6', '1/7', '1/8', '1/9',\n '1/10', '1/11', '1/12', '1/13', '1/14', '1/15', '1/16', '1/17', '1/18',\n '1/19', '1/20', '1/21', '1/22', '1/23', '1/24', '1/25', '1/26', '1/27',\n '1/28', '1/29', '1/30', '1/31', '1/32', '1/33', '1/34', '1/35', '1/36',\n '1/37', '1/38', '1/39', '1/40', '1/41', '1/42', '2/1', '2/2', '2/3',\n '2/4', '2/5', '2/6', '2/7', '2/8', '2/9', '2/10', '2/11', '2/12',\n '2/13', '2/14', '2/15', '2/16', '2/17', '2/18', '2/19', '2/20', '2/21',\n '2/22', '2/23', '2/24', '2/25', '2/26', '2/27', '2/28', '2/29', '2/30',\n '2/31', '2/32', '2/33', '2/34', '2/35', '2/36', '2/37', '2/38', '2/39',\n '2/40', '2/41', '2/42', 'Default']", 'options': '{}'}), "(hostname='spine02', domain='dh.local', version='8.1.0.0', build=\n NOT_SET, serial='SDNIPM624B36', base_mac='00:51:00:02:00:00', memory=\n 2087444480, vendor='Extreme Networks', model='8284XSQ', interfaces_lst=\n ['mgmt', '1/1', '1/2', '1/3', '1/4', '1/5', '1/6', '1/7', '1/8', '1/9',\n '1/10', '1/11', '1/12', '1/13', '1/14', '1/15', '1/16', '1/17', '1/18',\n '1/19', '1/20', '1/21', '1/22', '1/23', '1/24', '1/25', '1/26', '1/27',\n '1/28', '1/29', '1/30', '1/31', '1/32', '1/33', '1/34', '1/35', '1/36',\n '1/37', '1/38', '1/39', '1/40', '1/41', '1/42', '2/1', '2/2', '2/3',\n '2/4', '2/5', '2/6', '2/7', '2/8', '2/9', '2/10', '2/11', '2/12',\n '2/13', '2/14', '2/15', '2/16', '2/17', '2/18', '2/19', '2/20', '2/21',\n '2/22', '2/23', '2/24', '2/25', '2/26', '2/27', '2/28', '2/29', '2/30',\n '2/31', '2/32', '2/33', '2/34', '2/35', '2/36', '2/37', '2/38', '2/39',\n '2/40', '2/41', '2/42', 'Default'], options={})\n", (6316, 7258), False, 'from netests.protocols.facts import Facts\n'), ((9592, 9713), 'netests.tools.file.open_json_file', 'open_json_file', ([], {'path': 'f"""{FEATURES_SRC_PATH}outputs/facts/extreme_vsp/api/extreme_vsp_api_openconfig_system.json"""'}), "(path=\n f'{FEATURES_SRC_PATH}outputs/facts/extreme_vsp/api/extreme_vsp_api_openconfig_system.json'\n )\n", (9606, 9713), False, 'from netests.tools.file import open_file, open_txt_file, open_json_file, open_txt_file_as_bytes\n'), ((9794, 9919), 'netests.tools.file.open_json_file', 'open_json_file', ([], {'path': 'f"""{FEATURES_SRC_PATH}outputs/facts/extreme_vsp/api/extreme_vsp_api_openconfig_interfaces.json"""'}), "(path=\n f'{FEATURES_SRC_PATH}outputs/facts/extreme_vsp/api/extreme_vsp_api_openconfig_interfaces.json'\n )\n", (9808, 9919), False, 'from netests.tools.file import open_file, open_txt_file, open_json_file, open_txt_file_as_bytes\n'), ((9984, 10075), 'netests.converters.facts.extreme_vsp.api._extreme_vsp_facts_api_converter', '_extreme_vsp_facts_api_converter', ([], {'hostname': '"""spine02"""', 'cmd_output': 'cmd_output', 'options': '{}'}), "(hostname='spine02', cmd_output=cmd_output,\n options={})\n", (10016, 10075), False, 'from netests.converters.facts.extreme_vsp.api import _extreme_vsp_facts_api_converter\n'), ((10442, 10549), 'netests.tools.file.open_txt_file', 'open_txt_file', ([], {'path': 'f"""{FEATURES_SRC_PATH}outputs/facts/extreme_vsp/ssh/extreme_vsp_show_tech.txt"""'}), "(path=\n f'{FEATURES_SRC_PATH}outputs/facts/extreme_vsp/ssh/extreme_vsp_show_tech.txt'\n )\n", (10455, 10549), False, 'from netests.tools.file import open_file, open_txt_file, open_json_file, open_txt_file_as_bytes\n'), ((10630, 10764), 'netests.tools.file.open_txt_file', 'open_txt_file', ([], {'path': 'f"""{FEATURES_SRC_PATH}outputs/facts/extreme_vsp/ssh/extreme_vsp_show_interfaces_gigabitethernet_name.txt"""'}), "(path=\n f'{FEATURES_SRC_PATH}outputs/facts/extreme_vsp/ssh/extreme_vsp_show_interfaces_gigabitethernet_name.txt'\n )\n", (10643, 10764), False, 'from netests.tools.file import open_file, open_txt_file, open_json_file, open_txt_file_as_bytes\n'), ((10848, 10958), 'netests.tools.file.open_txt_file', 'open_txt_file', ([], {'path': 'f"""{FEATURES_SRC_PATH}outputs/facts/extreme_vsp/ssh/extreme_vsp_show_sys_dns.txt"""'}), "(path=\n f'{FEATURES_SRC_PATH}outputs/facts/extreme_vsp/ssh/extreme_vsp_show_sys_dns.txt'\n )\n", (10861, 10958), False, 'from netests.tools.file import open_file, open_txt_file, open_json_file, open_txt_file_as_bytes\n'), ((11023, 11114), 'netests.converters.facts.extreme_vsp.ssh._extreme_vsp_facts_ssh_converter', '_extreme_vsp_facts_ssh_converter', ([], {'hostname': '"""spine02"""', 'cmd_output': 'cmd_output', 'options': '{}'}), "(hostname='spine02', cmd_output=cmd_output,\n options={})\n", (11055, 11114), False, 'from netests.converters.facts.extreme_vsp.ssh import _extreme_vsp_facts_ssh_converter\n'), ((11257, 11525), 'netests.protocols.facts.Facts', 'Facts', ([], {'hostname': '"""leaf05"""', 'domain': '"""dh.local"""', 'version': '"""16.8.1"""', 'build': '"""fc3"""', 'serial': '"""9YEI1T9ZCIY"""', 'base_mac': 'NOT_SET', 'memory': '"""8113376"""', 'vendor': '"""Cisco"""', 'model': '"""CSR1000V"""', 'interfaces_lst': "['GigabitEthernet1', 'GigabitEthernet2', 'GigabitEthernet3']", 'options': '{}'}), "(hostname='leaf05', domain='dh.local', version='16.8.1', build='fc3',\n serial='9YEI1T9ZCIY', base_mac=NOT_SET, memory='8113376', vendor=\n 'Cisco', model='CSR1000V', interfaces_lst=['GigabitEthernet1',\n 'GigabitEthernet2', 'GigabitEthernet3'], options={})\n", (11262, 11525), False, 'from netests.protocols.facts import Facts\n'), ((12557, 12652), 'netests.tools.file.open_txt_file', 'open_txt_file', ([], {'path': 'f"""{FEATURES_SRC_PATH}outputs/facts/ios/ssh/cisco_ios_show_version.txt"""'}), "(path=\n f'{FEATURES_SRC_PATH}outputs/facts/ios/ssh/cisco_ios_show_version.txt')\n", (12570, 12652), False, 'from netests.tools.file import open_file, open_txt_file, open_json_file, open_txt_file_as_bytes\n'), ((12738, 12844), 'netests.tools.file.open_txt_file', 'open_txt_file', ([], {'path': 'f"""{FEATURES_SRC_PATH}outputs/facts/ios/ssh/cisco_ios_ip_interface_brief.txt"""'}), "(path=\n f'{FEATURES_SRC_PATH}outputs/facts/ios/ssh/cisco_ios_ip_interface_brief.txt'\n )\n", (12751, 12844), False, 'from netests.tools.file import open_file, open_txt_file, open_json_file, open_txt_file_as_bytes\n'), ((12908, 12987), 'netests.converters.facts.ios.ssh._ios_facts_ssh_converter', '_ios_facts_ssh_converter', ([], {'hostname': '"""spine02"""', 'cmd_output': 'cmd_output', 'options': '{}'}), "(hostname='spine02', cmd_output=cmd_output, options={})\n", (12932, 12987), False, 'from netests.converters.facts.ios.ssh import _ios_facts_ssh_converter\n'), ((13138, 13656), 'netests.protocols.facts.Facts', 'Facts', ([], {'hostname': '"""csr1000v"""', 'domain': '"""abc.inc"""', 'version': '"""16.9"""', 'build': 'NOT_SET', 'serial': '"""9KAAMNP24B9"""', 'base_mac': 'NOT_SET', 'memory': 'NOT_SET', 'vendor': '"""Cisco"""', 'model': '"""CSR1000V"""', 'interfaces_lst': "['GigabitEthernet1', 'GigabitEthernet2', 'GigabitEthernet3', 'Loopback12',\n 'Loopback101', 'Loopback854', 'Loopback1500', 'Loopback1501',\n 'Loopback1609', 'Loopback1974', 'Loopback1996', 'Loopback1997',\n 'Loopback1998', 'Loopback2000', 'Loopback2222', 'Loopback3000',\n 'Loopback4321', 'Loopback5263']", 'options': '{}'}), "(hostname='csr1000v', domain='abc.inc', version='16.9', build=NOT_SET,\n serial='9KAAMNP24B9', base_mac=NOT_SET, memory=NOT_SET, vendor='Cisco',\n model='CSR1000V', interfaces_lst=['GigabitEthernet1',\n 'GigabitEthernet2', 'GigabitEthernet3', 'Loopback12', 'Loopback101',\n 'Loopback854', 'Loopback1500', 'Loopback1501', 'Loopback1609',\n 'Loopback1974', 'Loopback1996', 'Loopback1997', 'Loopback1998',\n 'Loopback2000', 'Loopback2222', 'Loopback3000', 'Loopback4321',\n 'Loopback5263'], options={})\n", (13143, 13656), False, 'from netests.protocols.facts import Facts\n'), ((14635, 15153), 'netests.protocols.facts.Facts', 'Facts', ([], {'hostname': '"""spine03"""', 'domain': '"""dh.local"""', 'version': '"""6.5.3"""', 'build': 'NOT_SET', 'serial': 'NOT_SET', 'base_mac': 'NOT_SET', 'memory': 'NOT_SET', 'vendor': '"""Cisco"""', 'model': '"""IOS-XRv 9000"""', 'interfaces_lst': "['Bundle-Ether1', 'Bundle-Ether1.1234', 'Bundle-Ether1.4321', 'Loopback100',\n 'Loopback200', 'MgmtEth0/RP0/CPU0/0', 'GigabitEthernet0/0/0/0',\n 'GigabitEthernet0/0/0/1', 'GigabitEthernet0/0/0/2',\n 'GigabitEthernet0/0/0/3', 'GigabitEthernet0/0/0/4',\n 'GigabitEthernet0/0/0/5', 'GigabitEthernet0/0/0/6']", 'options': '{}'}), "(hostname='spine03', domain='dh.local', version='6.5.3', build=NOT_SET,\n serial=NOT_SET, base_mac=NOT_SET, memory=NOT_SET, vendor='Cisco', model\n ='IOS-XRv 9000', interfaces_lst=['Bundle-Ether1', 'Bundle-Ether1.1234',\n 'Bundle-Ether1.4321', 'Loopback100', 'Loopback200',\n 'MgmtEth0/RP0/CPU0/0', 'GigabitEthernet0/0/0/0',\n 'GigabitEthernet0/0/0/1', 'GigabitEthernet0/0/0/2',\n 'GigabitEthernet0/0/0/3', 'GigabitEthernet0/0/0/4',\n 'GigabitEthernet0/0/0/5', 'GigabitEthernet0/0/0/6'], options={})\n", (14640, 15153), False, 'from netests.protocols.facts import Facts\n'), ((15961, 16060), 'netests.tools.file.open_txt_file', 'open_txt_file', ([], {'path': 'f"""{FEATURES_SRC_PATH}outputs/facts/iosxr/ssh/cisco_iosxr_show_version.txt"""'}), "(path=\n f'{FEATURES_SRC_PATH}outputs/facts/iosxr/ssh/cisco_iosxr_show_version.txt')\n", (15974, 16060), False, 'from netests.tools.file import open_file, open_txt_file, open_json_file, open_txt_file_as_bytes\n'), ((16146, 16261), 'netests.tools.file.open_txt_file', 'open_txt_file', ([], {'path': 'f"""{FEATURES_SRC_PATH}outputs/facts/iosxr/ssh/cisco_iosxr_show_ip_interface_brief.txt"""'}), "(path=\n f'{FEATURES_SRC_PATH}outputs/facts/iosxr/ssh/cisco_iosxr_show_ip_interface_brief.txt'\n )\n", (16159, 16261), False, 'from netests.tools.file import open_file, open_txt_file, open_json_file, open_txt_file_as_bytes\n'), ((16325, 16410), 'netests.converters.facts.iosxr.ssh._iosxr_facts_ssh_converter', '_iosxr_facts_ssh_converter', ([], {'hostname': '"""spine02"""', 'cmd_output': 'cmd_output', 'options': '{}'}), "(hostname='spine02', cmd_output=cmd_output,\n options={})\n", (16351, 16410), False, 'from netests.converters.facts.iosxr.ssh import _iosxr_facts_ssh_converter\n'), ((16870, 17076), 'netests.protocols.facts.Facts', 'Facts', ([], {'hostname': '"""leaf04"""', 'domain': '"""dh.local"""', 'version': '"""18.3R1.9"""', 'build': 'NOT_SET', 'serial': '"""VM5E983D143E"""', 'base_mac': 'NOT_SET', 'memory': '(2052008)', 'vendor': '"""Juniper"""', 'model': '"""VMX"""', 'interfaces_lst': '[]', 'options': '{}'}), "(hostname='leaf04', domain='dh.local', version='18.3R1.9', build=\n NOT_SET, serial='VM5E983D143E', base_mac=NOT_SET, memory=2052008,\n vendor='Juniper', model='VMX', interfaces_lst=[], options={})\n", (16875, 17076), False, 'from netests.protocols.facts import Facts\n'), ((17322, 17440), 'netests.tools.file.open_txt_file', 'open_txt_file', ([], {'path': 'f"""{FEATURES_SRC_PATH}outputs/facts/juniper/api/juniper_api_get_software_information.xml"""'}), "(path=\n f'{FEATURES_SRC_PATH}outputs/facts/juniper/api/juniper_api_get_software_information.xml'\n )\n", (17335, 17440), False, 'from netests.tools.file import open_file, open_txt_file, open_json_file, open_txt_file_as_bytes\n'), ((17521, 17646), 'netests.tools.file.open_txt_file', 'open_txt_file', ([], {'path': 'f"""{FEATURES_SRC_PATH}outputs/facts/juniper/api/juniper_api_get_interface_information_terse.xml"""'}), "(path=\n f'{FEATURES_SRC_PATH}outputs/facts/juniper/api/juniper_api_get_interface_information_terse.xml'\n )\n", (17534, 17646), False, 'from netests.tools.file import open_file, open_txt_file, open_json_file, open_txt_file_as_bytes\n'), ((17730, 17852), 'netests.tools.file.open_txt_file', 'open_txt_file', ([], {'path': 'f"""{FEATURES_SRC_PATH}outputs/facts/juniper/api/juniper_api_get_chassis_inventory_detail.xml"""'}), "(path=\n f'{FEATURES_SRC_PATH}outputs/facts/juniper/api/juniper_api_get_chassis_inventory_detail.xml'\n )\n", (17743, 17852), False, 'from netests.tools.file import open_file, open_txt_file, open_json_file, open_txt_file_as_bytes\n'), ((17936, 18059), 'netests.tools.file.open_txt_file', 'open_txt_file', ([], {'path': 'f"""{FEATURES_SRC_PATH}outputs/facts/juniper/api/juniper_api_get_system_memory_information.xml"""'}), "(path=\n f'{FEATURES_SRC_PATH}outputs/facts/juniper/api/juniper_api_get_system_memory_information.xml'\n )\n", (17949, 18059), False, 'from netests.tools.file import open_file, open_txt_file, open_json_file, open_txt_file_as_bytes\n'), ((18124, 18210), 'netests.converters.facts.juniper.api._juniper_facts_api_converter', '_juniper_facts_api_converter', ([], {'hostname': '"""leaf04"""', 'cmd_output': 'cmd_output', 'options': '{}'}), "(hostname='leaf04', cmd_output=cmd_output,\n options={})\n", (18152, 18210), False, 'from netests.converters.facts.juniper.api import _juniper_facts_api_converter\n'), ((18401, 18509), 'netests.tools.file.open_json_file', 'open_json_file', ([], {'path': 'f"""{FEATURES_SRC_PATH}outputs/facts/juniper/netconf/juniper_nc_get_facts.json"""'}), "(path=\n f'{FEATURES_SRC_PATH}outputs/facts/juniper/netconf/juniper_nc_get_facts.json'\n )\n", (18415, 18509), False, 'from netests.tools.file import open_file, open_txt_file, open_json_file, open_txt_file_as_bytes\n'), ((18590, 18703), 'netests.tools.file.open_file', 'open_file', ([], {'path': 'f"""{FEATURES_SRC_PATH}outputs/facts/juniper/netconf/juniper_nc_get_interfaces_terse.xml"""'}), "(path=\n f'{FEATURES_SRC_PATH}outputs/facts/juniper/netconf/juniper_nc_get_interfaces_terse.xml'\n )\n", (18599, 18703), False, 'from netests.tools.file import open_file, open_txt_file, open_json_file, open_txt_file_as_bytes\n'), ((18767, 18852), 'netests.converters.facts.juniper.nc._juniper_facts_nc_converter', '_juniper_facts_nc_converter', ([], {'hostname': '"""leaf04"""', 'cmd_output': 'cmd_output', 'options': '{}'}), "(hostname='leaf04', cmd_output=cmd_output,\n options={})\n", (18794, 18852), False, 'from netests.converters.facts.juniper.nc import _juniper_facts_nc_converter\n'), ((19039, 19138), 'netests.tools.file.open_json_file', 'open_json_file', ([], {'path': 'f"""{FEATURES_SRC_PATH}outputs/facts/juniper/ssh/juniper_show_version.json"""'}), "(path=\n f'{FEATURES_SRC_PATH}outputs/facts/juniper/ssh/juniper_show_version.json')\n", (19053, 19138), False, 'from netests.tools.file import open_file, open_txt_file, open_json_file, open_txt_file_as_bytes\n'), ((19224, 19337), 'netests.tools.file.open_json_file', 'open_json_file', ([], {'path': 'f"""{FEATURES_SRC_PATH}outputs/facts/juniper/ssh/juniper_show_interfaces_terse.json"""'}), "(path=\n f'{FEATURES_SRC_PATH}outputs/facts/juniper/ssh/juniper_show_interfaces_terse.json'\n )\n", (19238, 19337), False, 'from netests.tools.file import open_file, open_txt_file, open_json_file, open_txt_file_as_bytes\n'), ((19418, 19531), 'netests.tools.file.open_json_file', 'open_json_file', ([], {'path': 'f"""{FEATURES_SRC_PATH}outputs/facts/juniper/ssh/juniper_show_interfaces_terse.json"""'}), "(path=\n f'{FEATURES_SRC_PATH}outputs/facts/juniper/ssh/juniper_show_interfaces_terse.json'\n )\n", (19432, 19531), False, 'from netests.tools.file import open_file, open_txt_file, open_json_file, open_txt_file_as_bytes\n'), ((19615, 19725), 'netests.tools.file.open_json_file', 'open_json_file', ([], {'path': 'f"""{FEATURES_SRC_PATH}outputs/facts/juniper/ssh/juniper_show_system_memory.json"""'}), "(path=\n f'{FEATURES_SRC_PATH}outputs/facts/juniper/ssh/juniper_show_system_memory.json'\n )\n", (19629, 19725), False, 'from netests.tools.file import open_file, open_txt_file, open_json_file, open_txt_file_as_bytes\n'), ((19809, 19917), 'netests.tools.file.open_json_file', 'open_json_file', ([], {'path': 'f"""{FEATURES_SRC_PATH}outputs/facts/juniper/ssh/juniper_show_conf_system.json"""'}), "(path=\n f'{FEATURES_SRC_PATH}outputs/facts/juniper/ssh/juniper_show_conf_system.json'\n )\n", (19823, 19917), False, 'from netests.tools.file import open_file, open_txt_file, open_json_file, open_txt_file_as_bytes\n'), ((20001, 20113), 'netests.tools.file.open_json_file', 'open_json_file', ([], {'path': 'f"""{FEATURES_SRC_PATH}outputs/facts/juniper/ssh/juniper_show_hardware_detail.json"""'}), "(path=\n f'{FEATURES_SRC_PATH}outputs/facts/juniper/ssh/juniper_show_hardware_detail.json'\n )\n", (20015, 20113), False, 'from netests.tools.file import open_file, open_txt_file, open_json_file, open_txt_file_as_bytes\n'), ((20177, 20263), 'netests.converters.facts.juniper.ssh._juniper_facts_ssh_converter', '_juniper_facts_ssh_converter', ([], {'hostname': '"""leaf04"""', 'cmd_output': 'cmd_output', 'options': '{}'}), "(hostname='leaf04', cmd_output=cmd_output,\n options={})\n", (20205, 20263), False, 'from netests.converters.facts.juniper.ssh import _juniper_facts_ssh_converter\n'), ((20409, 20626), 'netests.protocols.facts.Facts', 'Facts', ([], {'hostname': '"""leaf03"""', 'domain': 'NOT_SET', 'version': 'NOT_SET', 'build': 'NOT_SET', 'serial': '"""9QXOX90PJ62"""', 'base_mac': 'NOT_SET', 'memory': 'NOT_SET', 'vendor': '"""Cisco"""', 'model': '"""Nexus9000 C9300v Chassis"""', 'interfaces_lst': '[]', 'options': '{}'}), "(hostname='leaf03', domain=NOT_SET, version=NOT_SET, build=NOT_SET,\n serial='9QXOX90PJ62', base_mac=NOT_SET, memory=NOT_SET, vendor='Cisco',\n model='Nexus9000 C9300v Chassis', interfaces_lst=[], options={})\n", (20414, 20626), False, 'from netests.protocols.facts import Facts\n'), ((21179, 21403), 'netests.protocols.facts.Facts', 'Facts', ([], {'hostname': '"""leaf02"""', 'domain': '"""dh.local"""', 'version': '"""9.3(3)"""', 'build': 'NOT_SET', 'serial': '"""9QXOX90PJ62"""', 'base_mac': 'NOT_SET', 'memory': '"""16409064"""', 'vendor': '"""Cisco Systems, Inc."""', 'model': '"""Nexus9000"""', 'interfaces_lst': '[]', 'options': '{}'}), "(hostname='leaf02', domain='dh.local', version='9.3(3)', build=NOT_SET,\n serial='9QXOX90PJ62', base_mac=NOT_SET, memory='16409064', vendor=\n 'Cisco Systems, Inc.', model='Nexus9000', interfaces_lst=[], options={})\n", (21184, 21403), False, 'from netests.protocols.facts import Facts\n'), ((21646, 21740), 'netests.tools.file.open_json_file', 'open_json_file', ([], {'path': 'f"""{FEATURES_SRC_PATH}outputs/facts/nxos/api/nxos_api_get_facts.json"""'}), "(path=\n f'{FEATURES_SRC_PATH}outputs/facts/nxos/api/nxos_api_get_facts.json')\n", (21660, 21740), False, 'from netests.tools.file import open_file, open_txt_file, open_json_file, open_txt_file_as_bytes\n'), ((21826, 21925), 'netests.tools.file.open_json_file', 'open_json_file', ([], {'path': 'f"""{FEATURES_SRC_PATH}outputs/facts/nxos/api/nxos_api_get_interfaces.json"""'}), "(path=\n f'{FEATURES_SRC_PATH}outputs/facts/nxos/api/nxos_api_get_interfaces.json')\n", (21840, 21925), False, 'from netests.tools.file import open_file, open_txt_file, open_json_file, open_txt_file_as_bytes\n'), ((22014, 22109), 'netests.tools.file.open_json_file', 'open_json_file', ([], {'path': 'f"""{FEATURES_SRC_PATH}outputs/facts/nxos/api/nxos_api_get_domain.json"""'}), "(path=\n f'{FEATURES_SRC_PATH}outputs/facts/nxos/api/nxos_api_get_domain.json')\n", (22028, 22109), False, 'from netests.tools.file import open_file, open_txt_file, open_json_file, open_txt_file_as_bytes\n'), ((22179, 22258), 'netests.converters.facts.nxos.api._nxos_facts_api_converter', '_nxos_facts_api_converter', ([], {'hostname': '"""leaf03"""', 'cmd_output': 'cmd_output', 'options': '{}'}), "(hostname='leaf03', cmd_output=cmd_output, options={})\n", (22204, 22258), False, 'from netests.converters.facts.nxos.api import _nxos_facts_api_converter\n'), ((22594, 22687), 'netests.tools.file.open_json_file', 'open_json_file', ([], {'path': 'f"""{FEATURES_SRC_PATH}outputs/facts/nxos/ssh/nxos_show_version.json"""'}), "(path=\n f'{FEATURES_SRC_PATH}outputs/facts/nxos/ssh/nxos_show_version.json')\n", (22608, 22687), False, 'from netests.tools.file import open_file, open_txt_file, open_json_file, open_txt_file_as_bytes\n'), ((22773, 22869), 'netests.tools.file.open_json_file', 'open_json_file', ([], {'path': 'f"""{FEATURES_SRC_PATH}outputs/facts/nxos/ssh/nxos_show_interfaces.json"""'}), "(path=\n f'{FEATURES_SRC_PATH}outputs/facts/nxos/ssh/nxos_show_interfaces.json')\n", (22787, 22869), False, 'from netests.tools.file import open_file, open_txt_file, open_json_file, open_txt_file_as_bytes\n'), ((22958, 23052), 'netests.tools.file.open_json_file', 'open_json_file', ([], {'path': 'f"""{FEATURES_SRC_PATH}outputs/facts/nxos/ssh/nxos_show_hostname.json"""'}), "(path=\n f'{FEATURES_SRC_PATH}outputs/facts/nxos/ssh/nxos_show_hostname.json')\n", (22972, 23052), False, 'from netests.tools.file import open_file, open_txt_file, open_json_file, open_txt_file_as_bytes\n'), ((23126, 23205), 'netests.converters.facts.nxos.ssh._nxos_facts_ssh_converter', '_nxos_facts_ssh_converter', ([], {'hostname': '"""leaf03"""', 'cmd_output': 'cmd_output', 'options': '{}'}), "(hostname='leaf03', cmd_output=cmd_output, options={})\n", (23151, 23205), False, 'from netests.converters.facts.nxos.ssh import _nxos_facts_ssh_converter\n'), ((25692, 25819), 'netests.comparators.facts_compare._compare_facts', '_compare_facts', ([], {'host_keys': 'FACTS_DATA_HOST_KEY', 'hostname': '"""leaf03"""', 'groups': "['eos']", 'facts_host_data': 'context.o0002', 'test': '(True)'}), "(host_keys=FACTS_DATA_HOST_KEY, hostname='leaf03', groups=[\n 'eos'], facts_host_data=context.o0002, test=True)\n", (25706, 25819), False, 'from netests.comparators.facts_compare import _compare_facts\n'), ((26076, 26203), 'netests.comparators.facts_compare._compare_facts', '_compare_facts', ([], {'host_keys': 'FACTS_DATA_HOST_KEY', 'hostname': '"""leaf03"""', 'groups': "['eos']", 'facts_host_data': 'context.o0004', 'test': '(True)'}), "(host_keys=FACTS_DATA_HOST_KEY, hostname='leaf03', groups=[\n 'eos'], facts_host_data=context.o0004, test=True)\n", (26090, 26203), False, 'from netests.comparators.facts_compare import _compare_facts\n'), ((27723, 27852), 'netests.comparators.facts_compare._compare_facts', '_compare_facts', ([], {'host_keys': 'FACTS_DATA_HOST_KEY', 'hostname': '"""leaf01"""', 'groups': "['linux']", 'facts_host_data': 'context.o0102', 'test': '(True)'}), "(host_keys=FACTS_DATA_HOST_KEY, hostname='leaf01', groups=[\n 'linux'], facts_host_data=context.o0102, test=True)\n", (27737, 27852), False, 'from netests.comparators.facts_compare import _compare_facts\n'), ((28128, 28257), 'netests.comparators.facts_compare._compare_facts', '_compare_facts', ([], {'host_keys': 'FACTS_DATA_HOST_KEY', 'hostname': '"""leaf01"""', 'groups': "['linux']", 'facts_host_data': 'context.o0104', 'test': '(True)'}), "(host_keys=FACTS_DATA_HOST_KEY, hostname='leaf01', groups=[\n 'linux'], facts_host_data=context.o0104, test=True)\n", (28142, 28257), False, 'from netests.comparators.facts_compare import _compare_facts\n'), ((34011, 34138), 'netests.comparators.facts_compare._compare_facts', '_compare_facts', ([], {'host_keys': 'FACTS_DATA_HOST_KEY', 'hostname': '"""leaf05"""', 'groups': "['ios']", 'facts_host_data': 'context.o0304', 'test': '(True)'}), "(host_keys=FACTS_DATA_HOST_KEY, hostname='leaf05', groups=[\n 'ios'], facts_host_data=context.o0304, test=True)\n", (34025, 34138), False, 'from netests.comparators.facts_compare import _compare_facts\n'), ((35690, 35820), 'netests.comparators.facts_compare._compare_facts', '_compare_facts', ([], {'host_keys': 'FACTS_DATA_HOST_KEY', 'hostname': '"""spine03"""', 'groups': "['iosxr']", 'facts_host_data': 'context.o0404', 'test': '(True)'}), "(host_keys=FACTS_DATA_HOST_KEY, hostname='spine03', groups=[\n 'iosxr'], facts_host_data=context.o0404, test=True)\n", (35704, 35820), False, 'from netests.comparators.facts_compare import _compare_facts\n'), ((38334, 38463), 'netests.comparators.facts_compare._compare_facts', '_compare_facts', ([], {'host_keys': 'FACTS_DATA_HOST_KEY', 'hostname': '"""leaf04"""', 'groups': "['junos']", 'facts_host_data': 'context.o0504', 'test': '(True)'}), "(host_keys=FACTS_DATA_HOST_KEY, hostname='leaf04', groups=[\n 'junos'], facts_host_data=context.o0504, test=True)\n", (38348, 38463), False, 'from netests.comparators.facts_compare import _compare_facts\n'), ((40245, 40373), 'netests.comparators.facts_compare._compare_facts', '_compare_facts', ([], {'host_keys': 'FACTS_DATA_HOST_KEY', 'hostname': '"""leaf02"""', 'groups': "['nxos']", 'facts_host_data': 'context.o0702', 'test': '(True)'}), "(host_keys=FACTS_DATA_HOST_KEY, hostname='leaf02', groups=[\n 'nxos'], facts_host_data=context.o0702, test=True)\n", (40259, 40373), False, 'from netests.comparators.facts_compare import _compare_facts\n'), ((40630, 40758), 'netests.comparators.facts_compare._compare_facts', '_compare_facts', ([], {'host_keys': 'FACTS_DATA_HOST_KEY', 'hostname': '"""leaf02"""', 'groups': "['nxos']", 'facts_host_data': 'context.o0704', 'test': '(True)'}), "(host_keys=FACTS_DATA_HOST_KEY, hostname='leaf02', groups=[\n 'nxos'], facts_host_data=context.o0704, test=True)\n", (40644, 40758), False, 'from netests.comparators.facts_compare import _compare_facts\n'), ((40918, 41228), 'netests.protocols.facts.Facts', 'Facts', ([], {'hostname': '"""leaf01"""', 'domain': 'NOT_SET', 'version': '"""4.0.0"""', 'build': '"""Cumulus Linux 4.0.0"""', 'serial': '"""50:00:00:02:00:00"""', 'base_mac': '"""50:00:00:02:00:00"""', 'memory': '(944388)', 'vendor': '"""Cumulus Networks"""', 'model': '"""VX"""', 'interfaces_lst': "['swp5', 'swp7', 'swp2', 'swp3', 'swp1', 'swp6', 'swp4', 'eth0']", 'options': '{}'}), "(hostname='leaf01', domain=NOT_SET, version='4.0.0', build=\n 'Cumulus Linux 4.0.0', serial='50:00:00:02:00:00', base_mac=\n '50:00:00:02:00:00', memory=944388, vendor='Cumulus Networks', model=\n 'VX', interfaces_lst=['swp5', 'swp7', 'swp2', 'swp3', 'swp1', 'swp6',\n 'swp4', 'eth0'], options={})\n", (40923, 41228), False, 'from netests.protocols.facts import Facts\n'), ((47552, 47935), 'netests.protocols.facts.Facts', 'Facts', ([], {'hostname': '"""leaf01"""', 'domain': '"""a_very_cool_domain.com"""', 'version': '"""4.0.0"""', 'build': '"""WRONG__BUILD__!!"""', 'serial': '"""WRONG__SERIAL__!!"""', 'base_mac': '"""WRONG__BASE_MAC__!!"""', 'memory': '(0)', 'vendor': '"""NOT CUMULUS SO NOT GOOD"""', 'model': '"""NOT A CUMULUS MODEL"""', 'interfaces_lst': "['swp5', 'swp7', 'swp2', 'swp3', 'swp1', 'swp6', 'swp4', 'eth0',\n 'WHAT IS THIS INTERFACE ????']", 'options': 'options'}), "(hostname='leaf01', domain='a_very_cool_domain.com', version='4.0.0',\n build='WRONG__BUILD__!!', serial='WRONG__SERIAL__!!', base_mac=\n 'WRONG__BASE_MAC__!!', memory=0, vendor='NOT CUMULUS SO NOT GOOD',\n model='NOT A CUMULUS MODEL', interfaces_lst=['swp5', 'swp7', 'swp2',\n 'swp3', 'swp1', 'swp6', 'swp4', 'eth0', 'WHAT IS THIS INTERFACE ????'],\n options=options)\n", (47557, 47935), False, 'from netests.protocols.facts import Facts\n'), ((33419, 33546), 'netests.comparators.facts_compare._compare_facts', '_compare_facts', ([], {'host_keys': 'FACTS_DATA_HOST_KEY', 'hostname': '"""leaf05"""', 'groups': "['ios']", 'facts_host_data': 'context.o0302', 'test': '(True)'}), "(host_keys=FACTS_DATA_HOST_KEY, hostname='leaf05', groups=[\n 'ios'], facts_host_data=context.o0302, test=True)\n", (33433, 33546), False, 'from netests.comparators.facts_compare import _compare_facts\n'), ((33753, 33880), 'netests.comparators.facts_compare._compare_facts', '_compare_facts', ([], {'host_keys': 'FACTS_DATA_HOST_KEY', 'hostname': '"""leaf05"""', 'groups': "['ios']", 'facts_host_data': 'context.o0303', 'test': '(True)'}), "(host_keys=FACTS_DATA_HOST_KEY, hostname='leaf05', groups=[\n 'ios'], facts_host_data=context.o0303, test=True)\n", (33767, 33880), False, 'from netests.comparators.facts_compare import _compare_facts\n'), ((11842, 11945), 'netests.tools.file.open_file', 'open_file', ([], {'path': 'f"""{FEATURES_SRC_PATH}outputs/facts/ios/api/cisco_ios_api_get_facts_16.8.json"""'}), "(path=\n f'{FEATURES_SRC_PATH}outputs/facts/ios/api/cisco_ios_api_get_facts_16.8.json'\n )\n", (11851, 11945), False, 'from netests.tools.file import open_file, open_txt_file, open_json_file, open_txt_file_as_bytes\n'), ((12218, 12313), 'netests.tools.file.open_file', 'open_file', ([], {'path': 'f"""{FEATURES_SRC_PATH}outputs/facts/ios/netconf/cisco_ios_nc_get_facts.xml"""'}), "(path=\n f'{FEATURES_SRC_PATH}outputs/facts/ios/netconf/cisco_ios_nc_get_facts.xml')\n", (12227, 12313), False, 'from netests.tools.file import open_file, open_txt_file, open_json_file, open_txt_file_as_bytes\n'), ((14323, 14426), 'netests.tools.file.open_file', 'open_file', ([], {'path': 'f"""{FEATURES_SRC_PATH}outputs/facts/ios/api/cisco_ios_api_get_facts_16.9.json"""'}), "(path=\n f'{FEATURES_SRC_PATH}outputs/facts/ios/api/cisco_ios_api_get_facts_16.9.json'\n )\n", (14332, 14426), False, 'from netests.tools.file import open_file, open_txt_file, open_json_file, open_txt_file_as_bytes\n'), ((20898, 20986), 'netests.tools.file.open_json_file', 'open_json_file', ([], {'path': 'f"""{FEATURES_SRC_PATH}outputs/facts/napalm/nxos_get_facts.json"""'}), "(path=\n f'{FEATURES_SRC_PATH}outputs/facts/napalm/nxos_get_facts.json')\n", (20912, 20986), False, 'from netests.tools.file import open_file, open_txt_file, open_json_file, open_txt_file_as_bytes\n')]
|
#!/usr/bin/env python
# Copyright 2014-2018 The PySCF Developers. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Author: <NAME>
#
from .util import meta, MetaArray, ltri_ix, p
from pyscf.lib.diis import DIIS
from pyscf.lib.linalg_helper import davidson_nosym1 as davidson
from pyscf.cc.uccsd_slow import _PhysicistsERIs as ERIS_uccsd_slow
from pyscf.cc.gccsd import _PhysicistsERIs as ERIS_gccsd
import numpy
import inspect
from numbers import Number
from collections import OrderedDict
from warnings import warn
import string
def res2amps(residuals, e_occ, e_vir, constant=None):
"""
Converts residuals into amplitudes update.
Args:
residuals (iterable): a list of residuals;
e_occ (array): occupied energies;
e_vir (array): virtual energies;
virtual spaces;
constant (float): a constant in the denominator;
Returns:
A list of updates to amplitudes.
"""
result = []
for res in residuals:
if isinstance(res, Number) and res == 0:
result.append(0)
elif isinstance(res, MetaArray):
diagonal = numpy.zeros_like(res)
ix = [numpy.newaxis] * len(diagonal.shape)
if "labels" not in res.metadata:
raise ValueError("Missing metadata: axes labels")
for j, s in enumerate(res.metadata["labels"]):
ix[j] = slice(None)
if s == 'o':
diagonal += e_occ[tuple(ix)]
elif s == 'v':
diagonal -= e_vir[tuple(ix)]
else:
raise ValueError("Unknown spec '{}' in {}".format(s, residuals.metadata["labels"]))
ix[j] = numpy.newaxis
if constant is not None:
result.append(res / (constant + diagonal))
else:
result.append(res / diagonal)
else:
raise ValueError("Unsupported type: {}".format(type(res)))
return result
def a2v(amplitudes):
"""List of amplitudes into a single array."""
result = []
for v in amplitudes:
result.append(numpy.reshape(v, -1))
return numpy.concatenate(result)
def v2a(vec, like):
"""Array into a list amplitudes."""
result = []
offset = 0
for v in like:
s = v.size
result.append(numpy.reshape(vec[offset:offset+s], v.shape))
if isinstance(v, MetaArray):
result[-1] = MetaArray(result[-1], **v.metadata)
offset += s
return result
def eris_hamiltonian(eris):
"""
Retrieves Hamiltonian matrix elements from pyscf ERIS.
Args:
eris (pyscf.cc.ccsd.ERIS): pyscf ERIS;
Returns:
A dict with Hamiltonian matrix elements.
"""
# TODO: decide on adding '**ov', 'vo**'
nocc = eris.oooo.shape[0]
if isinstance(eris, ERIS_uccsd_slow):
def chess(a):
ix = []
for d in a.shape:
ix.append(numpy.dstack((
numpy.arange(d // 2),
numpy.arange(d // 2, d),
)).reshape(-1))
return a[numpy.ix_(*ix)]
return {k: chess(v) for k, v in dict(
ov=eris.fock[:nocc, nocc:],
vo=eris.fock[nocc:, :nocc],
oo=eris.fock[:nocc, :nocc],
vv=eris.fock[nocc:, nocc:],
oooo=eris.oooo,
oovo=-numpy.transpose(eris.ooov, (0, 1, 3, 2)),
oovv=eris.oovv,
ovoo=eris.ovoo,
ovvo=-numpy.transpose(eris.ovov, (0, 1, 3, 2)),
ovvv=eris.ovvv,
vvoo=numpy.transpose(eris.oovv, (2, 3, 0, 1)),
vvvo=-numpy.transpose(eris.ovvv, (2, 3, 1, 0)),
vvvv=eris.vvvv,
).items()}
elif isinstance(eris, ERIS_gccsd):
return dict(
ov=eris.fock[:nocc, nocc:], #OK
vo=eris.fock[nocc:, :nocc], #OK
oo=eris.fock[:nocc, :nocc], #OK
vv=eris.fock[nocc:, nocc:], #OK
oooo=eris.oooo, #OK
oovo=-numpy.transpose(eris.ooov, (0, 1, 3, 2)), #OK
oovv=eris.oovv,
# ovoo=eris.ovoo,
ovoo=numpy.transpose(eris.ooov, (2, 3, 0, 1)), #OK
# ovvo=-numpy.transpose(eris.ovov, (0, 1, 3, 2)),
ovvo=eris.ovvo,
ovvv=eris.ovvv,
vvoo=numpy.transpose(eris.oovv, (2, 3, 0, 1)),
vvvo=-numpy.transpose(eris.ovvv, (2, 3, 1, 0)),
vvvv=eris.vvvv,
)
else:
raise ValueError("Unknown object: {}".format(eris))
def oneshot(equations, *args):
"""
A one-shot calculation.
Args:
equations (callable): coupled-cluster equations;
args (iterable): amplitudes and hamiltonian matrix elements as dicts;
Returns:
Results of the calculation.
"""
input_args = inspect.getargspec(equations).args
fw_args = {}
for i in args:
fw_args.update(i)
# Remove excess arguments from the Hamiltonian
fw_args = {k: v for k, v in fw_args.items() if k in input_args}
# Check missing arguments
missing = set(input_args) - set(fw_args.keys())
if len(missing) > 0:
raise ValueError("Following arguments are missing: {}".format(', '.join(missing)))
return equations(**fw_args)
def kernel_solve(hamiltonian, equations, initial_guess, tolerance=1e-9, debug=False, diis=True, equation_energy=None,
dim_spec=None, maxiter=50):
"""
Coupled-cluster solver (linear systems).
Args:
hamiltonian (dict): hamiltonian matrix elements or pyscf ERIS;
equations (callable): coupled-cluster equations;
initial_guess (OrderedDict): starting amplitudes;
tolerance (float): convergence criterion;
debug (bool): prints iterations if True;
diis (bool, DIIS): converger for iterations;
equation_energy (callable): energy equation;
dim_spec (iterable): if `initial_guess` is a dict, this parameter defines shapes of arrays in 'ov' notation
(list of strings);
maxiter (int): maximal number of iterations;
Returns:
Resulting coupled-cluster amplitudes and energy if specified.
"""
# Convert ERIS to hamiltonian dict if needed
if not isinstance(hamiltonian, dict):
hamiltonian = eris_hamiltonian(hamiltonian)
if isinstance(initial_guess, (tuple, list)):
initial_guess = OrderedDict((k, 0) for k in initial_guess)
if dim_spec is None:
raise ValueError("dim_spec is not specified")
elif isinstance(initial_guess, OrderedDict):
if dim_spec is None and any(not isinstance(i, MetaArray) for i in initial_guess.values()):
raise ValueError("One or more of initial_guess values is not a MetaArray. Either specify dim_spec or use "
"MetaArrays to provide dimensions' labels in the 'ov' notation")
dim_spec = tuple(i.metadata["labels"] for i in initial_guess.values())
else:
raise ValueError("OrderedDict expected for 'initial_guess'")
tol = None
e_occ = numpy.diag(hamiltonian["oo"])
e_vir = numpy.diag(hamiltonian["vv"])
if diis is True:
diis = DIIS()
while tol is None or tol > tolerance and maxiter > 0:
output = oneshot(equations, hamiltonian, initial_guess)
if not isinstance(output, tuple):
output = (output,)
output = tuple(MetaArray(i, labels=j) if isinstance(i, numpy.ndarray) else i for i, j in zip(output, dim_spec))
dt = res2amps(output, e_occ, e_vir)
tol = max(numpy.linalg.norm(i) for i in dt)
for k, delta in zip(initial_guess, dt):
initial_guess[k] = initial_guess[k] + delta
if diis and not any(isinstance(i, Number) for i in initial_guess.values()):
v = a2v(initial_guess.values())
initial_guess = OrderedDict(zip(
initial_guess.keys(),
v2a(diis.update(v), initial_guess.values())
))
maxiter -= 1
if debug:
if equation_energy is not None:
e = oneshot(equation_energy, hamiltonian, initial_guess)
print("E = {:.10f} delta={:.3e}".format(e, tol))
else:
print("delta={:.3e}".format(tol))
if equation_energy is not None:
return initial_guess, oneshot(equation_energy, hamiltonian, initial_guess)
else:
return initial_guess
def koopmans_guess_ip(nocc, nvir, amplitudes, n, **kwargs):
"""
Koopman's guess for IP-EOM-CC amplitudes.
Args:
nocc (int): occupied space size;
nvir (int): virtual space size;
amplitudes (OrderedDict): an ordered dict with variable name-variable order pairs;
n (int): the root number;
kwargs: keyword arguments to `numpy.zeros`.
Returns:
An ordered dict with variable name-initial guess pairs.
"""
result = OrderedDict()
valid = False
for k, v in amplitudes.items():
result[k] = meta(numpy.zeros((nocc,) * v + (nvir,) * (v-1), **kwargs), labels='o' * v + 'v' * (v-1))
if v == 1:
if valid:
raise ValueError("Several first-order amplitudes encountered: {}".format(amplitudes))
else:
result[k][-n-1] = 1
valid = True
if not valid:
raise ValueError("No first-order amplitudes found: {}".format(amplitudes))
return result
def koopmans_guess_ea(nocc, nvir, amplitudes, n, **kwargs):
"""
Koopman's guess for EA-EOM-CC amplitudes.
Args:
nocc (int): occupied space size;
nvir (int): virtual space size;
amplitudes (OrderedDict): an ordered dict with variable name-variable order pairs;
n (int): the root number;
kwargs: keyword arguments to `numpy.zeros`.
Returns:
An ordered dict with variable name-initial guess pairs.
"""
result = OrderedDict()
valid = False
for k, v in amplitudes.items():
result[k] = meta(numpy.zeros((nocc,) * (v-1) + (nvir,) * v, **kwargs), labels='o' * (v-1) + 'v' * v)
if v == 1:
if valid:
raise ValueError("Several first-order amplitudes encountered: {}".format(amplitudes))
else:
result[k][n] = 1
valid = True
if not valid:
raise ValueError("No first-order amplitudes found: {}".format(amplitudes))
return result
def ltri_ix_amplitudes(a):
"""
Collects lower-triangular indexes of antisymetric amplitudes.
Args:
a (MetaArray): amplitudes to process;
Returns:
Lower-triangular indexes.
"""
if not isinstance(a, MetaArray) or "labels" not in a.metadata:
raise ValueError("Labels metadata is missing")
labels = a.metadata["labels"]
if len(labels) != len(a.shape):
raise ValueError("The length of 'labels' spec does not match the tensor rank")
dim_sizes = OrderedDict()
for label_i, label in enumerate(labels):
dim_size = a.shape[label_i]
if label in dim_sizes:
if dim_sizes[label] != dim_size:
raise ValueError("Dimensions of the same type '{}' do not match: {:d} vs {:d} in {}".format(
label,
dim_sizes[label],
dim_size,
repr(a.shape),
))
else:
dim_sizes[label] = dim_size
ix = OrderedDict()
ix_size = []
for label, dim_size in dim_sizes.items():
indexes = ltri_ix(dim_size, labels.count(label))
ix[label] = iter(indexes)
ix_size.append(len(indexes[0]))
# Label order
label_order = ''.join(ix.keys())
result = []
for label in labels:
x = next(ix[label])
pos = label_order.index(label)
bf = numpy.prod([1] + ix_size[:pos])
ft = numpy.prod([1] + ix_size[pos+1:])
x = numpy.tile(numpy.repeat(x, ft), bf)
result.append(x)
return tuple(result)
def a2v_sym(amplitudes, ixs):
"""
Symmetric amplitudes into vector.
Args:
amplitudes (iterable): amplitudes to join;
ixs (iterable): indexes of lower-triangle parts;
Returns:
A numpy array with amplitudes joined.
"""
return a2v(a[i] for a, i in zip(amplitudes, ixs))
def v2a_sym(a, labels, shapes, ixs):
"""
Decompresses the antisymmetric array.
Args:
a (numpy.ndarray): array to decompress;
labels (iterable): array's axes' labels;
shapes (iterable): arrays' shapes;
ixs (iterable): indexes of lower-triangle parts;
Returns:
Decompressed amplitude tensors.
"""
result = []
pos = 0
for lbls, shape, ix in zip(labels, shapes, ixs):
ampl = numpy.zeros(shape, dtype=a.dtype)
end = pos + len(ix[0])
ampl[ix] = a[pos:end]
pos = end
for l in set(lbls):
letters = iter(string.ascii_lowercase)
str_spec = ''.join(next(letters) if i == l else '.' for i in lbls)
ampl = p(str_spec, ampl)
result.append(ampl)
return result
def kernel_eig(hamiltonian, equations, amplitudes, tolerance=1e-9):
"""
Coupled-cluster solver (eigenvalue problem).
Args:
hamiltonian (dict): hamiltonian matrix elements or pyscf ERIS;
equations (callable): coupled-cluster equations;
amplitudes (iterable): starting amplitudes (a list of OrderedDicts);
tolerance (float): convergence criterion;
Returns:
Resulting coupled-cluster amplitudes and energy if specified.
"""
# Convert ERIS to hamiltonian dict if needed
if not isinstance(hamiltonian, dict):
hamiltonian = eris_hamiltonian(hamiltonian)
# Preconditioning
e_occ = numpy.diag(hamiltonian["oo"])
e_vir = numpy.diag(hamiltonian["vv"])
# Antisymmetry data
sample = amplitudes[0].values()
labels = list(i.metadata["labels"] for i in sample)
ixs = list(ltri_ix_amplitudes(i) for i in sample)
shapes = list(i.shape for i in sample)
def matvec(vec):
result = []
for i in vec:
a = v2a_sym(i, labels, shapes, ixs)
a = OrderedDict(zip(amplitudes[0].keys(), a))
r = oneshot(equations, hamiltonian, a)
result.append(a2v_sym(r, ixs))
return result
def precond(res, e0, x0):
a = v2a_sym(res, labels, shapes, ixs)
a = list(MetaArray(i, **j.metadata) for i, j in zip(a, amplitudes[0].values()))
a = res2amps(a, e_occ, e_vir, constant=e0)
return a2v_sym(a, ixs)
amplitudes_plain = tuple(a2v_sym(i.values(), ixs) for i in amplitudes)
conv, values, vectors = davidson(matvec, amplitudes_plain, precond, tol=tolerance, nroots=len(amplitudes))
if any(not i for i in conv):
warn("Following eigenvalues did not converge: {}".format(list(
i for i, x in enumerate(conv) if not x
)))
return values, list(v2a_sym(i, labels, shapes, ixs) for i in vectors)
|
[
"numpy.zeros_like",
"numpy.ix_",
"pyscf.lib.diis.DIIS",
"numpy.zeros",
"numpy.transpose",
"numpy.prod",
"inspect.getargspec",
"numpy.reshape",
"numpy.linalg.norm",
"numpy.arange",
"collections.OrderedDict",
"numpy.diag",
"numpy.concatenate",
"numpy.repeat"
] |
[((2682, 2707), 'numpy.concatenate', 'numpy.concatenate', (['result'], {}), '(result)\n', (2699, 2707), False, 'import numpy\n'), ((7597, 7626), 'numpy.diag', 'numpy.diag', (["hamiltonian['oo']"], {}), "(hamiltonian['oo'])\n", (7607, 7626), False, 'import numpy\n'), ((7639, 7668), 'numpy.diag', 'numpy.diag', (["hamiltonian['vv']"], {}), "(hamiltonian['vv'])\n", (7649, 7668), False, 'import numpy\n'), ((9450, 9463), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (9461, 9463), False, 'from collections import OrderedDict\n'), ((10455, 10468), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (10466, 10468), False, 'from collections import OrderedDict\n'), ((11486, 11499), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (11497, 11499), False, 'from collections import OrderedDict\n'), ((11979, 11992), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (11990, 11992), False, 'from collections import OrderedDict\n'), ((14330, 14359), 'numpy.diag', 'numpy.diag', (["hamiltonian['oo']"], {}), "(hamiltonian['oo'])\n", (14340, 14359), False, 'import numpy\n'), ((14372, 14401), 'numpy.diag', 'numpy.diag', (["hamiltonian['vv']"], {}), "(hamiltonian['vv'])\n", (14382, 14401), False, 'import numpy\n'), ((5346, 5375), 'inspect.getargspec', 'inspect.getargspec', (['equations'], {}), '(equations)\n', (5364, 5375), False, 'import inspect\n'), ((6916, 6958), 'collections.OrderedDict', 'OrderedDict', (['((k, 0) for k in initial_guess)'], {}), '((k, 0) for k in initial_guess)\n', (6927, 6958), False, 'from collections import OrderedDict\n'), ((7706, 7712), 'pyscf.lib.diis.DIIS', 'DIIS', ([], {}), '()\n', (7710, 7712), False, 'from pyscf.lib.diis import DIIS\n'), ((12365, 12396), 'numpy.prod', 'numpy.prod', (['([1] + ix_size[:pos])'], {}), '([1] + ix_size[:pos])\n', (12375, 12396), False, 'import numpy\n'), ((12410, 12445), 'numpy.prod', 'numpy.prod', (['([1] + ix_size[pos + 1:])'], {}), '([1] + ix_size[pos + 1:])\n', (12420, 12445), False, 'import numpy\n'), ((13314, 13347), 'numpy.zeros', 'numpy.zeros', (['shape'], {'dtype': 'a.dtype'}), '(shape, dtype=a.dtype)\n', (13325, 13347), False, 'import numpy\n'), ((2649, 2669), 'numpy.reshape', 'numpy.reshape', (['v', '(-1)'], {}), '(v, -1)\n', (2662, 2669), False, 'import numpy\n'), ((2861, 2907), 'numpy.reshape', 'numpy.reshape', (['vec[offset:offset + s]', 'v.shape'], {}), '(vec[offset:offset + s], v.shape)\n', (2874, 2907), False, 'import numpy\n'), ((9543, 9597), 'numpy.zeros', 'numpy.zeros', (['((nocc,) * v + (nvir,) * (v - 1))'], {}), '((nocc,) * v + (nvir,) * (v - 1), **kwargs)\n', (9554, 9597), False, 'import numpy\n'), ((10548, 10602), 'numpy.zeros', 'numpy.zeros', (['((nocc,) * (v - 1) + (nvir,) * v)'], {}), '((nocc,) * (v - 1) + (nvir,) * v, **kwargs)\n', (10559, 10602), False, 'import numpy\n'), ((12467, 12486), 'numpy.repeat', 'numpy.repeat', (['x', 'ft'], {}), '(x, ft)\n', (12479, 12486), False, 'import numpy\n'), ((1639, 1660), 'numpy.zeros_like', 'numpy.zeros_like', (['res'], {}), '(res)\n', (1655, 1660), False, 'import numpy\n'), ((3637, 3651), 'numpy.ix_', 'numpy.ix_', (['*ix'], {}), '(*ix)\n', (3646, 3651), False, 'import numpy\n'), ((8091, 8111), 'numpy.linalg.norm', 'numpy.linalg.norm', (['i'], {}), '(i)\n', (8108, 8111), False, 'import numpy\n'), ((4665, 4705), 'numpy.transpose', 'numpy.transpose', (['eris.ooov', '(2, 3, 0, 1)'], {}), '(eris.ooov, (2, 3, 0, 1))\n', (4680, 4705), False, 'import numpy\n'), ((4846, 4886), 'numpy.transpose', 'numpy.transpose', (['eris.oovv', '(2, 3, 0, 1)'], {}), '(eris.oovv, (2, 3, 0, 1))\n', (4861, 4886), False, 'import numpy\n'), ((4544, 4584), 'numpy.transpose', 'numpy.transpose', (['eris.ooov', '(0, 1, 3, 2)'], {}), '(eris.ooov, (0, 1, 3, 2))\n', (4559, 4584), False, 'import numpy\n'), ((4906, 4946), 'numpy.transpose', 'numpy.transpose', (['eris.ovvv', '(2, 3, 1, 0)'], {}), '(eris.ovvv, (2, 3, 1, 0))\n', (4921, 4946), False, 'import numpy\n'), ((4108, 4148), 'numpy.transpose', 'numpy.transpose', (['eris.oovv', '(2, 3, 0, 1)'], {}), '(eris.oovv, (2, 3, 0, 1))\n', (4123, 4148), False, 'import numpy\n'), ((3517, 3537), 'numpy.arange', 'numpy.arange', (['(d // 2)'], {}), '(d // 2)\n', (3529, 3537), False, 'import numpy\n'), ((3559, 3582), 'numpy.arange', 'numpy.arange', (['(d // 2)', 'd'], {}), '(d // 2, d)\n', (3571, 3582), False, 'import numpy\n'), ((3905, 3945), 'numpy.transpose', 'numpy.transpose', (['eris.ooov', '(0, 1, 3, 2)'], {}), '(eris.ooov, (0, 1, 3, 2))\n', (3920, 3945), False, 'import numpy\n'), ((4021, 4061), 'numpy.transpose', 'numpy.transpose', (['eris.ovov', '(0, 1, 3, 2)'], {}), '(eris.ovov, (0, 1, 3, 2))\n', (4036, 4061), False, 'import numpy\n'), ((4168, 4208), 'numpy.transpose', 'numpy.transpose', (['eris.ovvv', '(2, 3, 1, 0)'], {}), '(eris.ovvv, (2, 3, 1, 0))\n', (4183, 4208), False, 'import numpy\n')]
|
import re
from pathlib import Path
class UsbId:
file = Path(__file__).parent.joinpath('usb.ids')
@staticmethod
def get_usbid_names(vendorid, deviceid=None, interfaceid=None):
vendor = None
device = None
interface = None
with open(UsbId.file, encoding='iso-8859-1') as f:
text = f.read()
m = re.search(fr'\n{vendorid} (.*?)\n', text)
if m:
vendor = m.group(1)
if deviceid:
m = re.search(f'\t{deviceid} (.*?)\n', text[m.end():])
if m:
device = m.group(1)
if interfaceid:
m = re.search(f'\t\t{interfaceid} (.*?)\n', text[m.end():])
if m:
interface = m.group(1)
if not vendor and not device and not interface:
return None
else:
return vendor, device, interface
|
[
"pathlib.Path",
"re.search"
] |
[((361, 404), 're.search', 're.search', (['f"""\\\\n{vendorid} (.*?)\\\\n"""', 'text'], {}), "(f'\\\\n{vendorid} (.*?)\\\\n', text)\n", (370, 404), False, 'import re\n'), ((60, 74), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (64, 74), False, 'from pathlib import Path\n')]
|
# -*- coding: utf-8 -*-
"""
"""
from __future__ import unicode_literals
import glob
from pathlib import Path
import os
import youtube_dl
import urllib
# FIXME: ganna be duplicated.
class Download(object):
def __init__(self):
pass
def run(self, video_ids: list, output_dir: Path = 'result'):
if os.path.exists(output_dir):
raise OSError(17)
def download(self, video_id: str, output_dir: Path):
base_url = 'https://www.youtube.com/watch?v='
config = {
"format": "bestaudio/best",
"postprocessors": [{
"key": "FFmpegExtractAudio",
"preferredcodec": "wav",
"preferredquality": "192"
}]
}
config['outtmpl'] = os.path.join(output_dir, '%(id)s.%(ext)s')
youtube_link = base_url + video_id
with youtube_dl.YoutubeDL(config) as ydl:
ydl.download([youtube_link])
def directory_init(self, output_dir: Path):
output_dir.mkdir(parents=True, exist_ok=True)
#保存したmp3から字幕を取得
def mp3totext(self, content_id: str):
#英語の場合はhl=en
url = 'http://video.google.com/timedtext?hl=en&lang=en&name=&v=' + content_id
with urllib.request.urlopen(url) as response:
XmlData = response.read()
print(XmlData)
root = ET.fromstring(XmlData)
start = []
dur = []
txt = []
for i, child in enumerate(root):
start.append(child.get('start'))
dur.append(child.get('dur'))
txt.append(child.text)
return start, dur, txt
#字幕をテキストに出力
def write_data(self, start, dur, txt, filename):
print(filename)
y, sr = librosa.load(filename, sr=44100)
#basename = os.path.splitext(filename)[0]
basename = os.path.splitext(os.path.split(filename)[1])[0]
for i in range(len(start)):
#テキストへの書き込み
f = open('bigger_data/txt/' + basename+'_'+str(i)+'.txt', 'w')
f.write(txt[i].replace('\n', '').replace(' ', ''))
f.close()
#音声の切り取り
time_start = math.floor(float(start[i])*sr)
time_end = time_start + math.floor(float(dur[i])*sr)
snd = y[time_start:time_end]
#TODO: ファイル名の長さが固定前提になってるのを可変にする
librosa.output.write_wav('bigger_data/audio/' + basename + '_' + str(i) + '.wav', snd, sr=44100)
def main(self, root):
filenames = glob.glob(os.path.join(root, '*.wav'))
print(filenames)
for filename in filenames:
#content_id = filename[-15:-4]
content_id = os.path.splitext(os.path.split(filename)[1])[0]
print(content_id)
start, dur, txt = self.mp3totext(content_id)
self.write_data(start, dur, txt, filename)
if __name__ == '__main__':
download = Download()
download.main('.')
|
[
"os.path.exists",
"urllib.request.urlopen",
"youtube_dl.YoutubeDL",
"os.path.split",
"os.path.join"
] |
[((322, 348), 'os.path.exists', 'os.path.exists', (['output_dir'], {}), '(output_dir)\n', (336, 348), False, 'import os\n'), ((767, 809), 'os.path.join', 'os.path.join', (['output_dir', '"""%(id)s.%(ext)s"""'], {}), "(output_dir, '%(id)s.%(ext)s')\n", (779, 809), False, 'import os\n'), ((867, 895), 'youtube_dl.YoutubeDL', 'youtube_dl.YoutubeDL', (['config'], {}), '(config)\n', (887, 895), False, 'import youtube_dl\n'), ((1231, 1258), 'urllib.request.urlopen', 'urllib.request.urlopen', (['url'], {}), '(url)\n', (1253, 1258), False, 'import urllib\n'), ((2491, 2518), 'os.path.join', 'os.path.join', (['root', '"""*.wav"""'], {}), "(root, '*.wav')\n", (2503, 2518), False, 'import os\n'), ((1846, 1869), 'os.path.split', 'os.path.split', (['filename'], {}), '(filename)\n', (1859, 1869), False, 'import os\n'), ((2666, 2689), 'os.path.split', 'os.path.split', (['filename'], {}), '(filename)\n', (2679, 2689), False, 'import os\n')]
|
from collections import OrderedDict
import os.path as osp
import matplotlib.pyplot as plt
import numpy as np
from rlkit.torch.networks import ConcatMlp
from rlkit.torch.sets import set_vae_trainer as svt
from rlkit.torch.sets import models
from rlkit.torch.sets.discriminator import (
DiscriminatorDataset,
DiscriminatorTrainer,
)
from rlkit.torch.sets.set_vae_trainer import PriorModel, CustomDictLoader
from rlkit.torch.sets.batch_algorithm import (
BatchTorchAlgorithm,
)
from rlkit.torch.sets.parallel_algorithms import ParallelAlgorithms
from torch.utils import data
from rlkit.torch import pytorch_util as ptu
from rlkit.torch.vae.vae_torch_trainer import VAE
def create_circle_dataset(num_examples, radius=3, scale=0.5, origin=(0, 0)):
angle = np.random.uniform(size=(num_examples, 1)) * 2 * np.pi
r = scale * np.random.randn(num_examples, 1) + radius
y = r * np.sin(angle) + origin[1]
x = r * np.cos(angle) + origin[0]
return np.concatenate([x, y], axis=1)
def create_box_dataset(num_examples, xlim, ylim):
x = np.random.uniform(xlim[0], xlim[1], size=(num_examples, 1))
y = np.random.uniform(ylim[0], ylim[1], size=(num_examples, 1))
return np.concatenate([x, y], axis=1)
def create_datasets(create_set_kwargs_list=None):
if create_set_kwargs_list is None:
create_set_kwargs_list = [
dict(num_examples=128, version='circle'),
dict(num_examples=128, version='box', xlim=(0, 2), ylim=(0, 2)),
dict(num_examples=128, version='box', xlim=(-2, 0), ylim=(-2, 0)),
dict(num_examples=128, version='box', xlim=(0, 2), ylim=(-2, 0)),
dict(num_examples=128, version='box', xlim=(-2, 2), ylim=(0, 2)),
]
return np.array([
create_set(**kwargs) for kwargs in create_set_kwargs_list
])
def create_set(version, **kwargs):
if version == 'circle':
return create_circle_dataset(**kwargs)
elif version == 'box':
return create_box_dataset(**kwargs)
else:
raise NotImplementedError()
def setup_discriminator(
vae: VAE,
examples,
prior,
discriminator_kwargs=None,
dataset_kwargs=None,
trainer_kwargs=None,
algo_kwargs=None,
name='',
):
if discriminator_kwargs is None:
discriminator_kwargs = {}
if dataset_kwargs is None:
dataset_kwargs = {}
if trainer_kwargs is None:
trainer_kwargs = {}
if algo_kwargs is None:
algo_kwargs = {}
discriminator = ConcatMlp(
input_size=vae.representation_size,
output_size=1,
**discriminator_kwargs
)
discriminator_data_loader = DiscriminatorDataset(
vae, examples, prior, **dataset_kwargs)
discriminator_trainer = DiscriminatorTrainer(
discriminator,
prior,
name=name,
**trainer_kwargs,
)
discriminator_algo = BatchTorchAlgorithm(
discriminator_trainer,
discriminator_data_loader,
**algo_kwargs
)
return discriminator_algo, discriminator, prior
def train_2d_set_vae(
create_set_vae_kwargs,
vae_trainer_kwargs,
vae_algo_kwargs,
debug_kwargs,
num_iters,
x_depends_on_c=False,
vae_data_loader_kwargs=None,
create_train_dataset_kwargs=None,
create_eval_dataset_kwargs=None,
setup_discriminator_kwargs=None,
set_dict_loader_kwargs=None,
):
if set_dict_loader_kwargs is None:
set_dict_loader_kwargs = {}
if vae_data_loader_kwargs is None:
vae_data_loader_kwargs = {}
if setup_discriminator_kwargs is None:
setup_discriminator_kwargs = {}
if create_eval_dataset_kwargs is None:
create_eval_dataset_kwargs = create_train_dataset_kwargs
data_dim = 2
eval_sets = create_datasets(**create_eval_dataset_kwargs)
train_sets = create_datasets(**create_train_dataset_kwargs)
for set_ in train_sets:
plt.scatter(*set_.T)
all_obs = np.vstack(train_sets)
# vae = models.create_vector_vae(
# data_dim=data_dim,
# **create_vae_kwargs,
# )
vae = models.create_vector_set_vae(
data_dim=data_dim,
x_depends_on_c=x_depends_on_c,
**create_set_vae_kwargs,
)
data_key = 'data'
set_key = 'set'
set_index_key = 'set_index'
train_sets_pt = [ptu.from_numpy(s) for s in train_sets]
eval_sets_pt = [ptu.from_numpy(s) for s in eval_sets]
all_obs_pt = ptu.from_numpy(all_obs)
all_obs_iterator_pt = data.DataLoader(all_obs_pt, **vae_data_loader_kwargs)
dict_loader = CustomDictLoader(
data=all_obs_iterator_pt,
sets=train_sets_pt,
data_key=data_key,
set_key=set_key,
set_index_key=set_index_key,
**set_dict_loader_kwargs
)
algos = OrderedDict()
discriminator_algos = []
discriminators = []
if setup_discriminator_kwargs:
prior_models = [PriorModel(vae.representation_size) for _ in train_sets_pt]
for i, examples in enumerate(train_sets_pt):
discriminator_algo, discriminator, prior_m = setup_discriminator(
vae,
examples,
prior_models[i],
name='discriminator{}'.format(i),
**setup_discriminator_kwargs
)
discriminator_algos.append(discriminator_algo)
discriminators.append(discriminator)
else:
prior_models = None
vae_trainer = svt.SetVAETrainer(
vae=vae,
set_key=set_key,
data_key=data_key,
train_sets=train_sets_pt,
eval_sets=eval_sets_pt,
prior_models=prior_models,
discriminators=discriminators,
**vae_trainer_kwargs)
vae_algorithm = BatchTorchAlgorithm(
vae_trainer,
dict_loader,
**vae_algo_kwargs,
)
algos['vae'] = vae_algorithm
for i, algo in enumerate(discriminator_algos):
algos['discriminator_{}'.format(i)] = algo
algorithm = ParallelAlgorithms(algos, num_iters)
algorithm.to(ptu.device)
set_up_debugging(vae_algorithm, prior_models, discriminator_algos, **debug_kwargs)
algorithm.run()
def set_up_debugging(
vae_algorithm,
prior_models,
discriminator_algos,
debug_period=10,
num_samples=25,
dump_posterior_and_prior_samples=False,
):
from rlkit.core import logger
logdir = logger.get_snapshot_dir()
set_loss_version = vae_algorithm.trainer.set_loss_version
# visualize the train/eval set once
plt_colors = plt.rcParams['axes.prop_cycle'].by_key()['color']
xmin = xmax = ymin = ymax = 0
for name, list_of_sets in [
('train', vae_algorithm.trainer.train_sets),
('eval', vae_algorithm.trainer.eval_sets),
]:
plt.figure()
for i, set in enumerate(list_of_sets):
set_examples = ptu.get_numpy(set)
plt.scatter(*set_examples.T, color=plt_colors[i])
xmin, xmax, ymin, ymax = plt.axis()
plt.savefig(osp.join(logdir, '{}_set_visualization.png'.format(name)))
plt.close()
def dump_debug_images(
algo,
epoch,
tag='',
):
trainer = algo.trainer
trainer.vae.train()
if debug_period <= 0 or epoch % debug_period != 0:
return
def draw_reconstruction(batch, color=None):
x_np = ptu.get_numpy(batch)
x_hat_np = ptu.get_numpy(trainer.vae.reconstruct(batch))
delta = x_hat_np - x_np
plt.quiver(
x_np[:, 0],
x_np[:, 1],
delta[:, 0],
delta[:, 1],
scale=1.,
scale_units='xy',
linewidth=0.5,
alpha=0.5,
color=color,
)
# batch = trainer.example_batch[trainer.data_key]
# plt.figure()
# draw_reconstruction(batch)
# plt.savefig(osp.join(logdir, '{}_recon.png'.format(epoch)))
#
raw_samples = ptu.get_numpy(trainer.vae.sample(num_samples))
plt.figure()
plt.scatter(*raw_samples.T)
plt.title('samples, epoch {}'.format(epoch))
plt.savefig(osp.join(logdir, 'vae_samples_{epoch}.png'.format(
epoch=epoch)))
plt.close()
for prefix, list_of_sets in [
('eval', trainer.eval_sets),
]:
name = prefix + tag
plt.figure()
for i, set in enumerate(list_of_sets):
draw_reconstruction(set, color=plt_colors[i])
plt.xlim((xmin, xmax))
plt.ylim((ymin, ymax))
plt.title('{}, epoch {}'.format(name, epoch))
plt.savefig(
osp.join(logdir, 'set_recons_{name}_{epoch}.png'.format(
epoch=epoch, name=name)))
plt.close()
for prefix, list_of_sets in [
('train', trainer.train_sets),
('eval', trainer.eval_sets),
]:
name = prefix + tag
for fix_xy_lims in [True, False]:
plt.figure()
for set_i, set in enumerate(list_of_sets):
set_samples = ptu.get_numpy(
trainer.vae.set_sample(num_samples, set))
plt.scatter(*set_samples.T, color=plt_colors[set_i])
if fix_xy_lims:
plt.xlim((xmin, xmax))
plt.ylim((ymin, ymax))
file_name = 'set_vae_samples_fixed_axes_{name}_{epoch}.png'.format(
epoch=epoch, name=name,
)
else:
file_name = 'set_vae_samples_{name}_{epoch}.png'.format(
epoch=epoch, name=name,
)
plt.title('{}, epoch {}'.format(name, epoch))
plt.savefig(osp.join(logdir, file_name))
plt.close()
plt.figure()
for i, set in enumerate(list_of_sets):
draw_reconstruction(set, color=plt_colors[i])
plt.xlim((xmin, xmax))
plt.ylim((ymin, ymax))
plt.title('{}, epoch {}'.format(name, epoch))
plt.savefig(
osp.join(logdir, 'set_recons_{name}_{epoch}.png'.format(
epoch=epoch, name=name)))
plt.close()
def dump_samples(
algo,
epoch,
):
if debug_period <= 0 or epoch % debug_period != 0:
return
# visualize the train/eval set once
data_loaders = [algo.data_loader for algo in discriminator_algos]
def get_last_batch(dl):
batch = None
for batch in dl:
pass
return batch
batches = [get_last_batch(dl) for dl in data_loaders]
nrows = len(batches)
ncols = algo.trainer.vae.representation_size // 2
fig, list_of_axes = plt.subplots(nrows=nrows, ncols=ncols, figsize=(2 * ncols, 2 * nrows))
for batch, axes in zip(batches, list_of_axes):
# y = batch['y']
x = batch['x']
xnp = x.cpu().detach().numpy()
posterior_samples = xnp[:128]
prior_samples = xnp[128:]
for i, ax in enumerate(axes):
post_x = posterior_samples[:, 2*i]
post_y = posterior_samples[:, 2*i + 1]
prior_x = prior_samples[:, 2*i]
prior_y = prior_samples[:, 2*i + 1]
ax.scatter(post_x, post_y, color='r')
ax.scatter(prior_x, prior_y, color='b')
plt.title('{}, epoch {}'.format(name, epoch))
plt.savefig(logdir + '/discriminator_samples_{epoch}.png'.format(
epoch=epoch,
))
plt.close()
vae_algorithm.post_epoch_funcs.append(dump_debug_images)
if dump_posterior_and_prior_samples:
vae_algorithm.post_epoch_funcs.append(dump_samples)
# if discriminator_algos:
# vae_algorithm.pre_train_funcs.append(
# functools.partial(dump_debug_images, tag='-pre-vae')
# )
|
[
"matplotlib.pyplot.quiver",
"rlkit.torch.sets.batch_algorithm.BatchTorchAlgorithm",
"rlkit.torch.pytorch_util.from_numpy",
"rlkit.torch.networks.ConcatMlp",
"matplotlib.pyplot.figure",
"numpy.sin",
"rlkit.torch.sets.parallel_algorithms.ParallelAlgorithms",
"rlkit.core.logger.get_snapshot_dir",
"os.path.join",
"rlkit.torch.sets.models.create_vector_set_vae",
"torch.utils.data.DataLoader",
"numpy.random.randn",
"rlkit.torch.sets.set_vae_trainer.SetVAETrainer",
"matplotlib.pyplot.close",
"rlkit.torch.sets.discriminator.DiscriminatorTrainer",
"matplotlib.pyplot.subplots",
"rlkit.torch.pytorch_util.get_numpy",
"matplotlib.pyplot.ylim",
"numpy.cos",
"rlkit.torch.sets.set_vae_trainer.CustomDictLoader",
"numpy.vstack",
"numpy.concatenate",
"numpy.random.uniform",
"matplotlib.pyplot.xlim",
"matplotlib.pyplot.scatter",
"rlkit.torch.sets.set_vae_trainer.PriorModel",
"matplotlib.pyplot.axis",
"rlkit.torch.sets.discriminator.DiscriminatorDataset",
"collections.OrderedDict"
] |
[((970, 1000), 'numpy.concatenate', 'np.concatenate', (['[x, y]'], {'axis': '(1)'}), '([x, y], axis=1)\n', (984, 1000), True, 'import numpy as np\n'), ((1061, 1120), 'numpy.random.uniform', 'np.random.uniform', (['xlim[0]', 'xlim[1]'], {'size': '(num_examples, 1)'}), '(xlim[0], xlim[1], size=(num_examples, 1))\n', (1078, 1120), True, 'import numpy as np\n'), ((1129, 1188), 'numpy.random.uniform', 'np.random.uniform', (['ylim[0]', 'ylim[1]'], {'size': '(num_examples, 1)'}), '(ylim[0], ylim[1], size=(num_examples, 1))\n', (1146, 1188), True, 'import numpy as np\n'), ((1200, 1230), 'numpy.concatenate', 'np.concatenate', (['[x, y]'], {'axis': '(1)'}), '([x, y], axis=1)\n', (1214, 1230), True, 'import numpy as np\n'), ((2536, 2625), 'rlkit.torch.networks.ConcatMlp', 'ConcatMlp', ([], {'input_size': 'vae.representation_size', 'output_size': '(1)'}), '(input_size=vae.representation_size, output_size=1, **\n discriminator_kwargs)\n', (2545, 2625), False, 'from rlkit.torch.networks import ConcatMlp\n'), ((2683, 2743), 'rlkit.torch.sets.discriminator.DiscriminatorDataset', 'DiscriminatorDataset', (['vae', 'examples', 'prior'], {}), '(vae, examples, prior, **dataset_kwargs)\n', (2703, 2743), False, 'from rlkit.torch.sets.discriminator import DiscriminatorDataset, DiscriminatorTrainer\n'), ((2781, 2852), 'rlkit.torch.sets.discriminator.DiscriminatorTrainer', 'DiscriminatorTrainer', (['discriminator', 'prior'], {'name': 'name'}), '(discriminator, prior, name=name, **trainer_kwargs)\n', (2801, 2852), False, 'from rlkit.torch.sets.discriminator import DiscriminatorDataset, DiscriminatorTrainer\n'), ((2917, 3006), 'rlkit.torch.sets.batch_algorithm.BatchTorchAlgorithm', 'BatchTorchAlgorithm', (['discriminator_trainer', 'discriminator_data_loader'], {}), '(discriminator_trainer, discriminator_data_loader, **\n algo_kwargs)\n', (2936, 3006), False, 'from rlkit.torch.sets.batch_algorithm import BatchTorchAlgorithm\n'), ((4019, 4040), 'numpy.vstack', 'np.vstack', (['train_sets'], {}), '(train_sets)\n', (4028, 4040), True, 'import numpy as np\n'), ((4158, 4266), 'rlkit.torch.sets.models.create_vector_set_vae', 'models.create_vector_set_vae', ([], {'data_dim': 'data_dim', 'x_depends_on_c': 'x_depends_on_c'}), '(data_dim=data_dim, x_depends_on_c=\n x_depends_on_c, **create_set_vae_kwargs)\n', (4186, 4266), False, 'from rlkit.torch.sets import models\n'), ((4503, 4526), 'rlkit.torch.pytorch_util.from_numpy', 'ptu.from_numpy', (['all_obs'], {}), '(all_obs)\n', (4517, 4526), True, 'from rlkit.torch import pytorch_util as ptu\n'), ((4553, 4606), 'torch.utils.data.DataLoader', 'data.DataLoader', (['all_obs_pt'], {}), '(all_obs_pt, **vae_data_loader_kwargs)\n', (4568, 4606), False, 'from torch.utils import data\n'), ((4625, 4788), 'rlkit.torch.sets.set_vae_trainer.CustomDictLoader', 'CustomDictLoader', ([], {'data': 'all_obs_iterator_pt', 'sets': 'train_sets_pt', 'data_key': 'data_key', 'set_key': 'set_key', 'set_index_key': 'set_index_key'}), '(data=all_obs_iterator_pt, sets=train_sets_pt, data_key=\n data_key, set_key=set_key, set_index_key=set_index_key, **\n set_dict_loader_kwargs)\n', (4641, 4788), False, 'from rlkit.torch.sets.set_vae_trainer import PriorModel, CustomDictLoader\n'), ((4846, 4859), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (4857, 4859), False, 'from collections import OrderedDict\n'), ((5516, 5717), 'rlkit.torch.sets.set_vae_trainer.SetVAETrainer', 'svt.SetVAETrainer', ([], {'vae': 'vae', 'set_key': 'set_key', 'data_key': 'data_key', 'train_sets': 'train_sets_pt', 'eval_sets': 'eval_sets_pt', 'prior_models': 'prior_models', 'discriminators': 'discriminators'}), '(vae=vae, set_key=set_key, data_key=data_key, train_sets=\n train_sets_pt, eval_sets=eval_sets_pt, prior_models=prior_models,\n discriminators=discriminators, **vae_trainer_kwargs)\n', (5533, 5717), True, 'from rlkit.torch.sets import set_vae_trainer as svt\n'), ((5794, 5858), 'rlkit.torch.sets.batch_algorithm.BatchTorchAlgorithm', 'BatchTorchAlgorithm', (['vae_trainer', 'dict_loader'], {}), '(vae_trainer, dict_loader, **vae_algo_kwargs)\n', (5813, 5858), False, 'from rlkit.torch.sets.batch_algorithm import BatchTorchAlgorithm\n'), ((6041, 6077), 'rlkit.torch.sets.parallel_algorithms.ParallelAlgorithms', 'ParallelAlgorithms', (['algos', 'num_iters'], {}), '(algos, num_iters)\n', (6059, 6077), False, 'from rlkit.torch.sets.parallel_algorithms import ParallelAlgorithms\n'), ((6461, 6486), 'rlkit.core.logger.get_snapshot_dir', 'logger.get_snapshot_dir', ([], {}), '()\n', (6484, 6486), False, 'from rlkit.core import logger\n'), ((3984, 4004), 'matplotlib.pyplot.scatter', 'plt.scatter', (['*set_.T'], {}), '(*set_.T)\n', (3995, 4004), True, 'import matplotlib.pyplot as plt\n'), ((4389, 4406), 'rlkit.torch.pytorch_util.from_numpy', 'ptu.from_numpy', (['s'], {}), '(s)\n', (4403, 4406), True, 'from rlkit.torch import pytorch_util as ptu\n'), ((4448, 4465), 'rlkit.torch.pytorch_util.from_numpy', 'ptu.from_numpy', (['s'], {}), '(s)\n', (4462, 4465), True, 'from rlkit.torch import pytorch_util as ptu\n'), ((6842, 6854), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (6852, 6854), True, 'import matplotlib.pyplot as plt\n'), ((7044, 7054), 'matplotlib.pyplot.axis', 'plt.axis', ([], {}), '()\n', (7052, 7054), True, 'import matplotlib.pyplot as plt\n'), ((7142, 7153), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (7151, 7153), True, 'import matplotlib.pyplot as plt\n'), ((8156, 8168), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (8166, 8168), True, 'import matplotlib.pyplot as plt\n'), ((8177, 8204), 'matplotlib.pyplot.scatter', 'plt.scatter', (['*raw_samples.T'], {}), '(*raw_samples.T)\n', (8188, 8204), True, 'import matplotlib.pyplot as plt\n'), ((8364, 8375), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (8373, 8375), True, 'import matplotlib.pyplot as plt\n'), ((11021, 11091), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'nrows': 'nrows', 'ncols': 'ncols', 'figsize': '(2 * ncols, 2 * nrows)'}), '(nrows=nrows, ncols=ncols, figsize=(2 * ncols, 2 * nrows))\n', (11033, 11091), True, 'import matplotlib.pyplot as plt\n'), ((11857, 11868), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (11866, 11868), True, 'import matplotlib.pyplot as plt\n'), ((771, 812), 'numpy.random.uniform', 'np.random.uniform', ([], {'size': '(num_examples, 1)'}), '(size=(num_examples, 1))\n', (788, 812), True, 'import numpy as np\n'), ((841, 873), 'numpy.random.randn', 'np.random.randn', (['num_examples', '(1)'], {}), '(num_examples, 1)\n', (856, 873), True, 'import numpy as np\n'), ((895, 908), 'numpy.sin', 'np.sin', (['angle'], {}), '(angle)\n', (901, 908), True, 'import numpy as np\n'), ((933, 946), 'numpy.cos', 'np.cos', (['angle'], {}), '(angle)\n', (939, 946), True, 'import numpy as np\n'), ((4972, 5007), 'rlkit.torch.sets.set_vae_trainer.PriorModel', 'PriorModel', (['vae.representation_size'], {}), '(vae.representation_size)\n', (4982, 5007), False, 'from rlkit.torch.sets.set_vae_trainer import PriorModel, CustomDictLoader\n'), ((6929, 6947), 'rlkit.torch.pytorch_util.get_numpy', 'ptu.get_numpy', (['set'], {}), '(set)\n', (6942, 6947), True, 'from rlkit.torch import pytorch_util as ptu\n'), ((6960, 7009), 'matplotlib.pyplot.scatter', 'plt.scatter', (['*set_examples.T'], {'color': 'plt_colors[i]'}), '(*set_examples.T, color=plt_colors[i])\n', (6971, 7009), True, 'import matplotlib.pyplot as plt\n'), ((7455, 7475), 'rlkit.torch.pytorch_util.get_numpy', 'ptu.get_numpy', (['batch'], {}), '(batch)\n', (7468, 7475), True, 'from rlkit.torch import pytorch_util as ptu\n'), ((7593, 7725), 'matplotlib.pyplot.quiver', 'plt.quiver', (['x_np[:, 0]', 'x_np[:, 1]', 'delta[:, 0]', 'delta[:, 1]'], {'scale': '(1.0)', 'scale_units': '"""xy"""', 'linewidth': '(0.5)', 'alpha': '(0.5)', 'color': 'color'}), "(x_np[:, 0], x_np[:, 1], delta[:, 0], delta[:, 1], scale=1.0,\n scale_units='xy', linewidth=0.5, alpha=0.5, color=color)\n", (7603, 7725), True, 'import matplotlib.pyplot as plt\n'), ((8511, 8523), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (8521, 8523), True, 'import matplotlib.pyplot as plt\n'), ((8649, 8671), 'matplotlib.pyplot.xlim', 'plt.xlim', (['(xmin, xmax)'], {}), '((xmin, xmax))\n', (8657, 8671), True, 'import matplotlib.pyplot as plt\n'), ((8684, 8706), 'matplotlib.pyplot.ylim', 'plt.ylim', (['(ymin, ymax)'], {}), '((ymin, ymax))\n', (8692, 8706), True, 'import matplotlib.pyplot as plt\n'), ((8921, 8932), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (8930, 8932), True, 'import matplotlib.pyplot as plt\n'), ((10025, 10037), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (10035, 10037), True, 'import matplotlib.pyplot as plt\n'), ((10163, 10185), 'matplotlib.pyplot.xlim', 'plt.xlim', (['(xmin, xmax)'], {}), '((xmin, xmax))\n', (10171, 10185), True, 'import matplotlib.pyplot as plt\n'), ((10198, 10220), 'matplotlib.pyplot.ylim', 'plt.ylim', (['(ymin, ymax)'], {}), '((ymin, ymax))\n', (10206, 10220), True, 'import matplotlib.pyplot as plt\n'), ((10435, 10446), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (10444, 10446), True, 'import matplotlib.pyplot as plt\n'), ((9160, 9172), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (9170, 9172), True, 'import matplotlib.pyplot as plt\n'), ((10000, 10011), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (10009, 10011), True, 'import matplotlib.pyplot as plt\n'), ((9367, 9419), 'matplotlib.pyplot.scatter', 'plt.scatter', (['*set_samples.T'], {'color': 'plt_colors[set_i]'}), '(*set_samples.T, color=plt_colors[set_i])\n', (9378, 9419), True, 'import matplotlib.pyplot as plt\n'), ((9472, 9494), 'matplotlib.pyplot.xlim', 'plt.xlim', (['(xmin, xmax)'], {}), '((xmin, xmax))\n', (9480, 9494), True, 'import matplotlib.pyplot as plt\n'), ((9515, 9537), 'matplotlib.pyplot.ylim', 'plt.ylim', (['(ymin, ymax)'], {}), '((ymin, ymax))\n', (9523, 9537), True, 'import matplotlib.pyplot as plt\n'), ((9955, 9982), 'os.path.join', 'osp.join', (['logdir', 'file_name'], {}), '(logdir, file_name)\n', (9963, 9982), True, 'import os.path as osp\n')]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
__author__ = "MPZinke"
########################################################################################################################
# #
# created by: MPZinke #
# on 2020.12.29 #
# #
# DESCRIPTION: #
# BUGS: #
# FUTURE: #
# #
########################################################################################################################
from datetime import datetime, timedelta;
from json import dumps as json_dumps; # use as to be specific, but do not import too much from json
from threading import Lock;
from Other.Class.ZWidget import ZWidget;
from Other.DB.DBCredentials import *;
from Other.DB.DBFunctions import __CLOSE__, __CONNECT__;
from Other.DB.DBFunctions import SELECT_Curtains, SELECT_Options, UPDATE_all_prior_CurtainsEvents_is_activated;
from Other.Global import *;
from Other.Global import tomorrow_00_00;
from System.Curtain import Curtain;
from System.Option import Option;
class System(ZWidget):
def __init__(self):
ZWidget.__init__(self, "System", self);
self._mutex = Lock();
self._Curtains = None;
self._Options = None;
self._Options_names = None;
self.refresh();
def refresh(self) -> None:
self._mutex.acquire(); # just to ensure things are executed properly
try:
cnx, cursor = __CONNECT__(DB_USER, DB_PASSWORD, DATABASE);
print(f"{UPDATE_all_prior_CurtainsEvents_is_activated(cnx, cursor)} old events cleared");
selected_curtains = SELECT_Curtains(cursor);
self._Curtains = {curtain["id"]: Curtain(**{**curtain, "System": self}) for curtain in selected_curtains};
self._Options = {option["id"]: Option(**option) for option in SELECT_Options(cursor)};
self._Options_names = {self._Options[opt].name(): self._Options[opt] for opt in self._Options};
__CLOSE__(cnx, cursor);
finally: self._mutex.release();
# Compliments of https://jacobbridges.github.io/post/how-many-seconds-until-midnight/
def sleep_time(self) -> int:
return (tomorrow_00_00() - datetime.now()).seconds + 30; # give time to let event creators to do their thing
def _loop_process(self) -> None:
self.refresh();
# ———————————————————————————————————————————————————— GETTERS ————————————————————————————————————————————————————
def Curtain(self, Curtain_id: int):
return self._Curtains.get(Curtain_id);
def Curtains(self) -> dict:
return self._Curtains;
def Event_Curtain(self, CurtainEvent_id: int):
for curtain in self._Curtains:
if(self._Curtains[curtain].CurtainEvent(CurtainEvent_id)):
return self._Curtains[curtain];
return None;
def Option(self, Options_id: int):
return self._Options.get(Options_id);
# Get an Option by its name.
def Option_name(self, Options_name: str):
return self._Options_names.get(Options_name);
def Options(self) -> dict:
return self._Options;
# ———————————————————————————————————————————————————— UTILITY ————————————————————————————————————————————————————
def dict(self):
class_dict = {};
class_dict["_Curtains"] = {curtain: self._Curtains[curtain].dict() for curtain in self._Curtains};
class_dict["_Options"] = {option: self._Options[option].dict() for option in self._Options};
return class_dict;
def print(self, tab=0, next_tab=0):
print('\t'*tab, "_Curtains: ");
for curtain in self._Curtains: self._Curtains[curtain].print(tab+next_tab, next_tab);
print('\t'*tab, "_Options: ");
for option in self._Options: self._Options[option].print(tab+next_tab, next_tab);
|
[
"Other.DB.DBFunctions.SELECT_Curtains",
"Other.DB.DBFunctions.__CLOSE__",
"Other.DB.DBFunctions.__CONNECT__",
"Other.Global.tomorrow_00_00",
"Other.Class.ZWidget.ZWidget.__init__",
"System.Option.Option",
"threading.Lock",
"Other.DB.DBFunctions.UPDATE_all_prior_CurtainsEvents_is_activated",
"Other.DB.DBFunctions.SELECT_Options",
"datetime.datetime.now",
"System.Curtain.Curtain"
] |
[((1890, 1928), 'Other.Class.ZWidget.ZWidget.__init__', 'ZWidget.__init__', (['self', '"""System"""', 'self'], {}), "(self, 'System', self)\n", (1906, 1928), False, 'from Other.Class.ZWidget import ZWidget\n'), ((1946, 1952), 'threading.Lock', 'Lock', ([], {}), '()\n', (1950, 1952), False, 'from threading import Lock\n'), ((2177, 2220), 'Other.DB.DBFunctions.__CONNECT__', '__CONNECT__', (['DB_USER', 'DB_PASSWORD', 'DATABASE'], {}), '(DB_USER, DB_PASSWORD, DATABASE)\n', (2188, 2220), False, 'from Other.DB.DBFunctions import __CLOSE__, __CONNECT__\n'), ((2339, 2362), 'Other.DB.DBFunctions.SELECT_Curtains', 'SELECT_Curtains', (['cursor'], {}), '(cursor)\n', (2354, 2362), False, 'from Other.DB.DBFunctions import SELECT_Curtains, SELECT_Options, UPDATE_all_prior_CurtainsEvents_is_activated\n'), ((2667, 2689), 'Other.DB.DBFunctions.__CLOSE__', '__CLOSE__', (['cnx', 'cursor'], {}), '(cnx, cursor)\n', (2676, 2689), False, 'from Other.DB.DBFunctions import __CLOSE__, __CONNECT__\n'), ((2400, 2438), 'System.Curtain.Curtain', 'Curtain', ([], {}), "(**{**curtain, 'System': self})\n", (2407, 2438), False, 'from System.Curtain import Curtain\n'), ((2508, 2524), 'System.Option.Option', 'Option', ([], {}), '(**option)\n', (2514, 2524), False, 'from System.Option import Option\n'), ((2539, 2561), 'Other.DB.DBFunctions.SELECT_Options', 'SELECT_Options', (['cursor'], {}), '(cursor)\n', (2553, 2561), False, 'from Other.DB.DBFunctions import SELECT_Curtains, SELECT_Options, UPDATE_all_prior_CurtainsEvents_is_activated\n'), ((2854, 2870), 'Other.Global.tomorrow_00_00', 'tomorrow_00_00', ([], {}), '()\n', (2868, 2870), False, 'from Other.Global import tomorrow_00_00\n'), ((2873, 2887), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (2885, 2887), False, 'from datetime import datetime, timedelta\n'), ((2235, 2292), 'Other.DB.DBFunctions.UPDATE_all_prior_CurtainsEvents_is_activated', 'UPDATE_all_prior_CurtainsEvents_is_activated', (['cnx', 'cursor'], {}), '(cnx, cursor)\n', (2279, 2292), False, 'from Other.DB.DBFunctions import SELECT_Curtains, SELECT_Options, UPDATE_all_prior_CurtainsEvents_is_activated\n')]
|
import calendar
#itermonthdaysの戻り値
#書式: d
# d: 指定した月の日付。ただし指定月の前後月の場合は0になる
def itermonthdays(calendar): return calendar.itermonthdays(2017, 9)
for calendar in [calendar.Calendar(firstweekday=0), calendar.Calendar(firstweekday=6)]:
print('-----',calendar,'-----')
print(itermonthdays(calendar))
for weekday in itermonthdays(calendar): print(weekday)
|
[
"calendar.Calendar",
"calendar.itermonthdays"
] |
[((113, 144), 'calendar.itermonthdays', 'calendar.itermonthdays', (['(2017)', '(9)'], {}), '(2017, 9)\n', (135, 144), False, 'import calendar\n'), ((162, 195), 'calendar.Calendar', 'calendar.Calendar', ([], {'firstweekday': '(0)'}), '(firstweekday=0)\n', (179, 195), False, 'import calendar\n'), ((197, 230), 'calendar.Calendar', 'calendar.Calendar', ([], {'firstweekday': '(6)'}), '(firstweekday=6)\n', (214, 230), False, 'import calendar\n')]
|
import pytest
from gtd.log import Metadata, SyncedMetadata
class TestMetadata(object):
@pytest.fixture
def m(self):
m = Metadata()
m['a'] = 10 # this is overwritten
m['b'] = 'test'
# namescope setitem
with m.name_scope('c'):
m['foo'] = 140
# nested setitem
m['a.foo'] = 120
m['c.bar'] = 'what'
return m
def test_getitem(self, m):
assert m['b'] == 'test'
def test_nested_getitem(self, m):
assert m['a.foo'] == 120
assert m['c.foo'] == 140
def test_namescope_getitem(self, m):
with m.name_scope('c'):
assert m['bar'] == 'what'
def test_nested_metadata(self, m):
m_sub = m['a']
assert isinstance(m_sub, Metadata)
assert m_sub['foo'] == 120
def test_contains(self, m):
assert 'b' in m
assert 'bar' not in m
assert 'c.bar' in m
class TestSyncedMetadata(TestMetadata): # run all the metadata tests
def test_syncing(self, tmpdir):
meta_path = str(tmpdir.join('meta.txt'))
s = SyncedMetadata(meta_path)
with s.name_scope('job'):
s['memory'] = 128
s2 = SyncedMetadata(meta_path) # reload the file
assert s2['job.memory'] == 128
|
[
"gtd.log.Metadata",
"gtd.log.SyncedMetadata"
] |
[((139, 149), 'gtd.log.Metadata', 'Metadata', ([], {}), '()\n', (147, 149), False, 'from gtd.log import Metadata, SyncedMetadata\n'), ((1108, 1133), 'gtd.log.SyncedMetadata', 'SyncedMetadata', (['meta_path'], {}), '(meta_path)\n', (1122, 1133), False, 'from gtd.log import Metadata, SyncedMetadata\n'), ((1213, 1238), 'gtd.log.SyncedMetadata', 'SyncedMetadata', (['meta_path'], {}), '(meta_path)\n', (1227, 1238), False, 'from gtd.log import Metadata, SyncedMetadata\n')]
|
#!/usr/bin/env python
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import sys
import os
import string
import subprocess
import time
import glob
import re
iterations = 1
resultsFile = "benchmarkResults.csv"
nanojitResults = {}
halfmoonResults= {}
def usage():
print("Usage")
print("timeTestSuite.py avmshell asc.jar builtinAbc testFile")
print
def runSomething(shellExec, testFile, vmArgs):
total = 0
avmshellCommand = os.path.abspath(shellExec)
callArgs = [avmshellCommand, "-Dnodebugger", testFile]
for i in range(0, iterations):
start = time.time()
avmshellProcess = subprocess.call(callArgs)
end = time.time()
total += end - start
average = total / iterations
return average
def runShell(shellExec, testFile, vmArgs):
os.putenv("MODE", "0")
return runSomething(shellExec, testFile, vmArgs)
def runHalfmoon(shellExec, testFile, vmArgs):
os.putenv("MODE", "4")
return runSomething(shellExec, testFile, vmArgs)
def compileAbc(ascJarFile, builtinAbc, testFile):
ascJarFile = os.path.abspath(ascJarFile)
builtinAbc = os.path.abspath(builtinAbc)
ascCommand = "java -jar " + ascJarFile + " -import " + builtinAbc + " " + testFile
compileToAbcProcess = os.popen(ascCommand)
compiledFileName = testFile.replace(".as", ".abc")
return compiledFileName
def writeResultsToFile(testFile):
print("looking at file: " + testFile)
avmshellResult = nanojitResults[testFile]
halfmoonResult = halfmoonResults[testFile]
halfmoonComparison = str(avmshellResult / halfmoonResult)
if (re.search('\\\\untyped\\\\', testFile)):
typedFileName = re.sub('\\\\untyped\\\\', '\\\\typed\\\\', testFile)
csvFile.write(testFile + "," + str(avmshellResult) + "," + str(halfmoonResult) + "," + str(halfmoonComparison) + "\n")
def runTest(shellExec, ascJarFile, builtinAbc, testFile):
print("Running test: " + testFile);
compiledFileName = compileAbc(ascJarFile, builtinAbc, testFile)
# Note, last arg is vmargs eg -Dverbose=. atm unsupported
shellResult = runShell(shellExec, compiledFileName, "")
halfmoonResult = runHalfmoon(shellExec, compiledFileName, "")
nanojitResults[testFile] = shellResult
halfmoonResults[testFile] = halfmoonResult
def writeFileHeader(openedFile):
openedFile.write("Benchmark, NanoJIT, Halfmoon, Difference\n")
numberOfArgs = len(sys.argv)
if (numberOfArgs != 5):
usage()
exit(0)
csvFile = None
if (os.path.exists(resultsFile)):
csvFile = open(resultsFile, "a")
else:
csvFile = open(resultsFile, "w")
writeFileHeader(csvFile)
def runSuite(shellExec, ascJarFile, builtinAbc, testDirectory):
for file in os.listdir(testDirectory):
fullPath = os.path.normpath(os.path.join(testDirectory, file))
if (os.path.isdir(fullPath)):
runSuite(shellExec, ascJarFile, builtinAbc, fullPath)
elif (re.search('\.as', fullPath)):
runTest(shellExec, ascJarFile, builtinAbc, fullPath)
def writeResults(testDirectory):
for file in os.listdir(testDirectory):
fullPath = os.path.normpath(os.path.join(testDirectory, file))
if (os.path.isdir(fullPath)):
writeResults(fullPath)
# don't want .abc files
elif (re.search('\.as$', fullPath)):
writeResultsToFile(fullPath)
runSuite(sys.argv[1], sys.argv[2], sys.argv[3], sys.argv[4])
writeResults(sys.argv[4])
csvFile.close()
|
[
"os.path.abspath",
"os.putenv",
"os.path.join",
"os.path.isdir",
"os.popen",
"os.path.exists",
"time.time",
"subprocess.call",
"re.search",
"os.listdir",
"re.sub"
] |
[((2656, 2683), 'os.path.exists', 'os.path.exists', (['resultsFile'], {}), '(resultsFile)\n', (2670, 2683), False, 'import os\n'), ((598, 624), 'os.path.abspath', 'os.path.abspath', (['shellExec'], {}), '(shellExec)\n', (613, 624), False, 'import os\n'), ((965, 987), 'os.putenv', 'os.putenv', (['"""MODE"""', '"""0"""'], {}), "('MODE', '0')\n", (974, 987), False, 'import os\n'), ((1092, 1114), 'os.putenv', 'os.putenv', (['"""MODE"""', '"""4"""'], {}), "('MODE', '4')\n", (1101, 1114), False, 'import os\n'), ((1236, 1263), 'os.path.abspath', 'os.path.abspath', (['ascJarFile'], {}), '(ascJarFile)\n', (1251, 1263), False, 'import os\n'), ((1281, 1308), 'os.path.abspath', 'os.path.abspath', (['builtinAbc'], {}), '(builtinAbc)\n', (1296, 1308), False, 'import os\n'), ((1422, 1442), 'os.popen', 'os.popen', (['ascCommand'], {}), '(ascCommand)\n', (1430, 1442), False, 'import os\n'), ((1767, 1805), 're.search', 're.search', (['"""\\\\\\\\untyped\\\\\\\\"""', 'testFile'], {}), "('\\\\\\\\untyped\\\\\\\\', testFile)\n", (1776, 1805), False, 'import re\n'), ((2876, 2901), 'os.listdir', 'os.listdir', (['testDirectory'], {}), '(testDirectory)\n', (2886, 2901), False, 'import os\n'), ((3238, 3263), 'os.listdir', 'os.listdir', (['testDirectory'], {}), '(testDirectory)\n', (3248, 3263), False, 'import os\n'), ((736, 747), 'time.time', 'time.time', ([], {}), '()\n', (745, 747), False, 'import time\n'), ((775, 800), 'subprocess.call', 'subprocess.call', (['callArgs'], {}), '(callArgs)\n', (790, 800), False, 'import subprocess\n'), ((815, 826), 'time.time', 'time.time', ([], {}), '()\n', (824, 826), False, 'import time\n'), ((1832, 1884), 're.sub', 're.sub', (['"""\\\\\\\\untyped\\\\\\\\"""', '"""\\\\\\\\typed\\\\\\\\"""', 'testFile'], {}), "('\\\\\\\\untyped\\\\\\\\', '\\\\\\\\typed\\\\\\\\', testFile)\n", (1838, 1884), False, 'import re\n'), ((2986, 3009), 'os.path.isdir', 'os.path.isdir', (['fullPath'], {}), '(fullPath)\n', (2999, 3009), False, 'import os\n'), ((3348, 3371), 'os.path.isdir', 'os.path.isdir', (['fullPath'], {}), '(fullPath)\n', (3361, 3371), False, 'import os\n'), ((2939, 2972), 'os.path.join', 'os.path.join', (['testDirectory', 'file'], {}), '(testDirectory, file)\n', (2951, 2972), False, 'import os\n'), ((3092, 3120), 're.search', 're.search', (['"""\\\\.as"""', 'fullPath'], {}), "('\\\\.as', fullPath)\n", (3101, 3120), False, 'import re\n'), ((3301, 3334), 'os.path.join', 'os.path.join', (['testDirectory', 'file'], {}), '(testDirectory, file)\n', (3313, 3334), False, 'import os\n'), ((3455, 3484), 're.search', 're.search', (['"""\\\\.as$"""', 'fullPath'], {}), "('\\\\.as$', fullPath)\n", (3464, 3484), False, 'import re\n')]
|
import numpy as np
def avoid_backward_action(action):
## if backward movement is initiated, stop the car
if np.all(action > 0.0):
action[0] = 0.0
action[1] = 0.0
return action
def reward_path_divergence(position_history, pos_ptr, reward_multiplier):
v2 = position_history[pos_ptr] - position_history[pos_ptr - 1]
v1 = position_history[pos_ptr - 1] - position_history[pos_ptr - 2]
l2_v1 = np.linalg.norm(v1)
l2_v2 = np.linalg.norm(v2)
if l2_v1 == 0 and l2_v2 == 0:
return -1.0 * reward_multiplier
## L2 normalize
if l2_v1 > 0:
v1 = v1 / l2_v1
if l2_v2 > 0:
v2 = v2 / l2_v2
cosine_similarity = np.sum(v1 * v2)
if cosine_similarity > 0.0:
return reward_multiplier * (1.0 - cosine_similarity)
else:
return reward_multiplier * cosine_similarity
|
[
"numpy.linalg.norm",
"numpy.sum",
"numpy.all"
] |
[((114, 134), 'numpy.all', 'np.all', (['(action > 0.0)'], {}), '(action > 0.0)\n', (120, 134), True, 'import numpy as np\n'), ((421, 439), 'numpy.linalg.norm', 'np.linalg.norm', (['v1'], {}), '(v1)\n', (435, 439), True, 'import numpy as np\n'), ((449, 467), 'numpy.linalg.norm', 'np.linalg.norm', (['v2'], {}), '(v2)\n', (463, 467), True, 'import numpy as np\n'), ((640, 655), 'numpy.sum', 'np.sum', (['(v1 * v2)'], {}), '(v1 * v2)\n', (646, 655), True, 'import numpy as np\n')]
|
import pandas as pd
def process_raw_df(df_raw, name='Value', copy=False):
data = df_raw.drop('Total', axis=1).stack()
data = data.rename_axis(['Name', 'Year']).to_frame(name=name)
return data
def printif(msg, verbose=True):
if verbose:
print(msg)
def merge_data(girls_file, boys_file, verbose=True):
printif(f'Loading {girls_file}', verbose)
girl_names = pd.read_csv(girls_file, index_col=0)
printif(f'Loading {boys_file}')
boy_names = pd.read_csv(boys_file, index_col=0)
printif('Processing raw data')
girl_data = process_raw_df(girl_names, name='Girls Count')
boy_data = process_raw_df(boy_names, name='Boys Count')
printif('Merging data')
data = girl_data.join(boy_data, how='outer').fillna(0)
data = data.reset_index(level='Year')
data['Year'] = data['Year'].astype(int)
return data
def get_names_data(girls_file='data/bc-popular-girls-names.csv',
boys_file='data/bc-popular-boys-names.csv',
verbose=True):
data = merge_data(girls_file, boys_file, verbose=verbose)
printif('Performing calculations', verbose)
data['Both Count'] = data[['Girls Count', 'Boys Count']].sum(axis=1)
data = data[data['Both Count'] > 0]
nms = ['Girls', 'Boys', 'Both']
cols_map = {f'{nm} Count' : f'{nm} Yearly Total (All Names)' for nm in nms}
yrly_totals = data.groupby('Year').sum().rename(columns=cols_map)
data = data.merge(yrly_totals, how='left', left_on='Year',
right_index=True)
columns = list(data.columns)
data['First Letter'] = data.index.str[0]
data['Last Letter'] = data.index.str[-1]
data['Last 3 Letters'] = data.index.str[-3:]
data = data[['First Letter', 'Last Letter', 'Last 3 Letters'] + columns]
for nm in nms:
col_numerator = f'{nm} Count'
col_denominator = f'{nm} Yearly Total (All Names)'
data[f'% of {nm}'] = 100 * data[col_numerator] / data[col_denominator]
for nm in ['Girls', 'Boys']:
data[f'{nm} Fraction'] = data[f'{nm} Count'] / data['Both Count']
data = data.drop(cols_map.values(), axis=1)
return data
|
[
"pandas.read_csv"
] |
[((391, 427), 'pandas.read_csv', 'pd.read_csv', (['girls_file'], {'index_col': '(0)'}), '(girls_file, index_col=0)\n', (402, 427), True, 'import pandas as pd\n'), ((480, 515), 'pandas.read_csv', 'pd.read_csv', (['boys_file'], {'index_col': '(0)'}), '(boys_file, index_col=0)\n', (491, 515), True, 'import pandas as pd\n')]
|
import numpy as np
from gym import spaces
from brs_envs.base_envs import BaseURDFBulletEnv
from brs_envs.base_envs import parse_collision
from brs_envs.rocket_landing_scene import RocketLandingScene
from brs_envs.martlet9.martlet9_robot import Martlet9Robot
class RocketLanderEnv(BaseURDFBulletEnv):
LANDING_SPEED_PENALTY = 5
LANDING_SPEED_SURVIVE_THRESH = 10
DEATH_PENALTY = 500
LANDED_SPEED_THRESH = 1e-1
LANDED_BONUS = DEATH_PENALTY
ACCURACY_BONUS = DEATH_PENALTY / 10
HIT_WATER_PENALTY = DEATH_PENALTY
POSITION_THRESH = 20
def __init__(self,
render=False,
gravity=9.8,
timestep=1/60,
sticky=1,
max_lateral_offset=10,
max_vertical_offset=10,
max_roll_offset=0.5,
max_pitch_offset=0.5,
max_yaw_offset=0.1,
mean_robot_start_height=100):
BaseURDFBulletEnv.__init__(self, render)
self._feet_landed = set()
self._gravity = gravity
self._timestep = timestep
self._sticky = sticky
self._max_lateral_offset = max_lateral_offset
self._max_vertical_offset = max_vertical_offset
self._max_roll_offset = max_roll_offset
self._max_pitch_offset = max_pitch_offset
self._max_yaw_offset = max_yaw_offset
self._mean_robot_start_height = mean_robot_start_height
self.observation_space = Martlet9Robot.observation_space
self.action_space = Martlet9Robot.action_space
def step(self, a):
control_cost = self.robot.applyControls(self.p, a)
self.scene.step()
state = self.robot.getState(self.p)
if self.renderable:
self.moveCamera(state)
self.drawArtifacts(a)
collisions_in_water = self.p.getContactPoints(bodyA=self.scene.plane)
collisions_on_pad = self.p.getContactPoints(bodyA=self.scene.pad)
collision_cost, done = self.processCollisions(state,
self._prev_state,
collisions_in_water,
collisions_on_pad)
self._prev_state = state
return state, -(control_cost + collision_cost), done, {}
def processCollisions(self, state, prev_state, water_col, pad_col):
if len(water_col) > 0:
return RocketLanderEnv.HIT_WATER_PENALTY + RocketLanderEnv.DEATH_PENALTY, True
num_landed_feet = 0
new_landed_feet = 0
allowable_contacts = [self.robot.foot1_link, self.robot.foot2_link, self.robot.foot3_link]
feet_landed = set()
prev_state_desc = Martlet9Robot.describeState(prev_state)
state_desc = Martlet9Robot.describeState(state)
if state_desc['position'][-1] >= self._mean_robot_start_height + self._max_vertical_offset:
return RocketLanderEnv.DEATH_PENALTY, True
lateral_dist_to_center = np.linalg.norm(state_desc['position'][:2])
if lateral_dist_to_center > RocketLanderEnv.POSITION_THRESH:
return RocketLanderEnv.DEATH_PENALTY, True
for collision in map(parse_collision, pad_col):
other_link = collision['linkIndexB']
if other_link not in allowable_contacts:
return RocketLanderEnv.DEATH_PENALTY, True
num_landed_feet += 1
feet_landed.add(other_link)
if other_link not in self._feet_landed:
new_landed_feet += 1
self._feet_landed = feet_landed
if num_landed_feet == 0: # No collisions, no penalties
return 0.00 * np.linalg.norm(state_desc['position']), False
speed = np.linalg.norm(state_desc['velocity'])
prev_speed = np.linalg.norm(prev_state_desc['velocity'])
if max(speed, prev_speed) > RocketLanderEnv.LANDING_SPEED_SURVIVE_THRESH:
speed_overshoot = max(speed, prev_speed) - RocketLanderEnv.LANDING_SPEED_SURVIVE_THRESH
speed_penalty = RocketLanderEnv.LANDING_SPEED_PENALTY * (speed_overshoot**2)
center_bonus = max(1, 10 - lateral_dist_to_center) * RocketLanderEnv.ACCURACY_BONUS
# print("Landed too fast! Speed was {}".format(max(speed, prev_speed)))
return RocketLanderEnv.DEATH_PENALTY + speed_penalty - center_bonus, True
landing_cost = max(speed, prev_speed) * RocketLanderEnv.LANDING_SPEED_PENALTY * min(1, new_landed_feet)
if (num_landed_feet < 3) or (speed > RocketLanderEnv.LANDED_SPEED_THRESH):
# return landing_cost, False
return 0, False
# print("Smooth landing!")
return landing_cost - RocketLanderEnv.LANDED_BONUS, True
def reset(self):
state = BaseURDFBulletEnv.reset(self)
self.robot.addToScene(self.scene, self.robotStartPos(), self.robotStartOri())
self._prev_state = self.robot.getState(self.p)
return self._prev_state
def drawArtifacts(self, control):
if self.robot.thruster_fire_id is not None:
r = 1.0
g = 0.8 * control[0]
b = 0.3
a = min(1.0, 0.9 * control[0])
self.p.changeVisualShape(self.robot.uid,
self.robot.thruster_fire_id,
rgbaColor=[r, g, b, a])
if self.robot.steer_smoke_id is not None:
r = 0.4
g = 0.4
b = 0.4
a = min(1.0, 0.2 * control[2])
self.p.changeVisualShape(self.robot.uid,
self.robot.steer_smoke_id,
rgbaColor=[r, g, b, a])
def moveCamera(self, state):
target = state[:3]
ori = self.p.getEulerFromQuaternion(state[3:7])
yaw = 20
pitch = state[2] / 100
distance = 0.3 * state[2] + 50
self.p.resetDebugVisualizerCamera(distance, yaw, pitch, target)
def initializeScene(self):
return RocketLandingScene(self.p, gravity=self._gravity, timestep=self._timestep, sticky=self._sticky)
def initializeRobot(self):
return Martlet9Robot()
def robotStartPos(self):
max_lateral_offset = float(self._max_lateral_offset)
max_vertical_offset = float(self._max_vertical_offset)
mean_robot_start_height = float(self._mean_robot_start_height)
x = np.random.uniform(-max_lateral_offset, max_lateral_offset)
y = np.random.uniform(-max_lateral_offset, max_lateral_offset)
z = mean_robot_start_height + np.random.uniform(-max_vertical_offset, max_vertical_offset)
return [x, y, z]
def robotStartOri(self):
max_roll_offset = float(self._max_roll_offset)
max_pitch_offset = float(self._max_pitch_offset)
max_yaw_offset = float(self._max_yaw_offset)
roll = np.random.uniform(-max_roll_offset, max_roll_offset)
pitch = np.random.uniform(-max_pitch_offset, max_pitch_offset)
yaw = np.random.uniform(-max_yaw_offset, max_yaw_offset)
return self.p.getQuaternionFromEuler([roll, pitch, yaw])
|
[
"brs_envs.rocket_landing_scene.RocketLandingScene",
"numpy.random.uniform",
"brs_envs.base_envs.BaseURDFBulletEnv.__init__",
"brs_envs.martlet9.martlet9_robot.Martlet9Robot",
"numpy.linalg.norm",
"brs_envs.martlet9.martlet9_robot.Martlet9Robot.describeState",
"brs_envs.base_envs.BaseURDFBulletEnv.reset"
] |
[((955, 995), 'brs_envs.base_envs.BaseURDFBulletEnv.__init__', 'BaseURDFBulletEnv.__init__', (['self', 'render'], {}), '(self, render)\n', (981, 995), False, 'from brs_envs.base_envs import BaseURDFBulletEnv\n'), ((2750, 2789), 'brs_envs.martlet9.martlet9_robot.Martlet9Robot.describeState', 'Martlet9Robot.describeState', (['prev_state'], {}), '(prev_state)\n', (2777, 2789), False, 'from brs_envs.martlet9.martlet9_robot import Martlet9Robot\n'), ((2811, 2845), 'brs_envs.martlet9.martlet9_robot.Martlet9Robot.describeState', 'Martlet9Robot.describeState', (['state'], {}), '(state)\n', (2838, 2845), False, 'from brs_envs.martlet9.martlet9_robot import Martlet9Robot\n'), ((3034, 3076), 'numpy.linalg.norm', 'np.linalg.norm', (["state_desc['position'][:2]"], {}), "(state_desc['position'][:2])\n", (3048, 3076), True, 'import numpy as np\n'), ((3775, 3813), 'numpy.linalg.norm', 'np.linalg.norm', (["state_desc['velocity']"], {}), "(state_desc['velocity'])\n", (3789, 3813), True, 'import numpy as np\n'), ((3835, 3878), 'numpy.linalg.norm', 'np.linalg.norm', (["prev_state_desc['velocity']"], {}), "(prev_state_desc['velocity'])\n", (3849, 3878), True, 'import numpy as np\n'), ((4815, 4844), 'brs_envs.base_envs.BaseURDFBulletEnv.reset', 'BaseURDFBulletEnv.reset', (['self'], {}), '(self)\n', (4838, 4844), False, 'from brs_envs.base_envs import BaseURDFBulletEnv\n'), ((6059, 6158), 'brs_envs.rocket_landing_scene.RocketLandingScene', 'RocketLandingScene', (['self.p'], {'gravity': 'self._gravity', 'timestep': 'self._timestep', 'sticky': 'self._sticky'}), '(self.p, gravity=self._gravity, timestep=self._timestep,\n sticky=self._sticky)\n', (6077, 6158), False, 'from brs_envs.rocket_landing_scene import RocketLandingScene\n'), ((6202, 6217), 'brs_envs.martlet9.martlet9_robot.Martlet9Robot', 'Martlet9Robot', ([], {}), '()\n', (6215, 6217), False, 'from brs_envs.martlet9.martlet9_robot import Martlet9Robot\n'), ((6455, 6513), 'numpy.random.uniform', 'np.random.uniform', (['(-max_lateral_offset)', 'max_lateral_offset'], {}), '(-max_lateral_offset, max_lateral_offset)\n', (6472, 6513), True, 'import numpy as np\n'), ((6526, 6584), 'numpy.random.uniform', 'np.random.uniform', (['(-max_lateral_offset)', 'max_lateral_offset'], {}), '(-max_lateral_offset, max_lateral_offset)\n', (6543, 6584), True, 'import numpy as np\n'), ((6919, 6971), 'numpy.random.uniform', 'np.random.uniform', (['(-max_roll_offset)', 'max_roll_offset'], {}), '(-max_roll_offset, max_roll_offset)\n', (6936, 6971), True, 'import numpy as np\n'), ((6988, 7042), 'numpy.random.uniform', 'np.random.uniform', (['(-max_pitch_offset)', 'max_pitch_offset'], {}), '(-max_pitch_offset, max_pitch_offset)\n', (7005, 7042), True, 'import numpy as np\n'), ((7057, 7107), 'numpy.random.uniform', 'np.random.uniform', (['(-max_yaw_offset)', 'max_yaw_offset'], {}), '(-max_yaw_offset, max_yaw_offset)\n', (7074, 7107), True, 'import numpy as np\n'), ((6623, 6683), 'numpy.random.uniform', 'np.random.uniform', (['(-max_vertical_offset)', 'max_vertical_offset'], {}), '(-max_vertical_offset, max_vertical_offset)\n', (6640, 6683), True, 'import numpy as np\n'), ((3713, 3751), 'numpy.linalg.norm', 'np.linalg.norm', (["state_desc['position']"], {}), "(state_desc['position'])\n", (3727, 3751), True, 'import numpy as np\n')]
|
import Resources.Usuario.querys_constants as qc
import Resources.Usuario.params_constants as pc
from Utils.crypto import Crypto
from Querys.query import Query
from pymysql import Error
class UsuarioQuery(Query):
def insert_usuario(self, usuario):
usuario[1] = Crypto.get_crypto(usuario[1])
try:
with self.get_cursor() as cursor:
if self.check_user(usuario[0], usuario[4]):
query = self.get_insert_query(pc.PARAMS, qc.USER_TABLE)
cursor.execute(query, usuario)
id_usuario = cursor.lastrowid
cursor.execute(qc.USUARIO_ASIGNATURA_ALL, [id_usuario])
self.get_connection().commit()
return 201
else:
return 202
except Error as e:
print("Error %d: %s" % (e.args[0], e.args[1]))
self.get_connection().rollback()
return 500
finally:
cursor.close()
def check_user(self, usuario, correo):
check = self.execute_custom_query(qc.UNIQUE_USER, [usuario, correo])
return len(check) is 0
def add_coins(self, id_usuario, monedas):
return self.execute_update(qc.COINS_COLUMN, qc.USER_TABLE, [
monedas], qc.USERID_WHERE_COLUMN, [id_usuario])
|
[
"Utils.crypto.Crypto.get_crypto"
] |
[((275, 304), 'Utils.crypto.Crypto.get_crypto', 'Crypto.get_crypto', (['usuario[1]'], {}), '(usuario[1])\n', (292, 304), False, 'from Utils.crypto import Crypto\n')]
|
# -*- coding: utf-8 -*-
"""
Defines a convolutional neural network with residual connections.
Based on the architecture described in:
<NAME>, <NAME>, <NAME>, <NAME>. "Deep residual learning
for image recognition". https://arxiv.org/abs/1512.03385
With batch normalization as described in:
<NAME>, <NAME>. "Batch normalization: Accelerating
deep network training by reducing internal covariate shift".
https://arxiv.org/abs/1502.03167
And parametric ReLU activations as described in:
<NAME>, <NAME>, <NAME>, <NAME>. "Delving Deep into
Rectifiers: Surpassing Human-Level Performance on ImageNet Classification".
https://arxiv.org/abs/1502.01852
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow as tf
from nnetmaker.model import *
from nnetmaker.util import *
class ConvNetClassifierModel0(BaseModel):
def _process_args(self, model_args_validator, **kwargs):
self._learn_alpha = model_args_validator.get("learn_alpha", ATYPE_BOOL, True)
self._alpha = model_args_validator.get("alpha", ATYPE_FLOAT, True)
self._dropout_rate = model_args_validator.get("dropout_rate", ATYPE_FLOAT, True)
self._conv_layer_sizes = model_args_validator.get("conv_layer_sizes", ATYPE_INTS_LIST, True)
self._conv_layer_dims = model_args_validator.get("conv_layer_dims", ATYPE_INTS_LIST, True)
self._fc_layer_dims = model_args_validator.get("fc_layer_dims", ATYPE_INTS_LIST, True)
self._num_input_channels = model_args_validator.get("num_input_channels", ATYPE_INT, True)
self._input_size = model_args_validator.get("input_size", ATYPE_INT, True)
self._output_size = model_args_validator.get("output_size", ATYPE_INT, True)
self._add_biases = model_args_validator.get("add_biases", ATYPE_BOOL, True)
def _get_input_var_names(self, **kwargs):
return ["img"]
def _get_target_var_names(self, **kwargs):
return ["predictions"]
def _build_cost_targets(self, in_vars, target_vars, out_vars, **kwargs):
cost_targets = []
cost_targets.append((target_vars["predictions"], out_vars["predictions"], None))
return cost_targets
def _build_metrics(self, in_vars, target_vars, out_vars, **kwargs):
metrics = {}
targets = tf.argmax(target_vars["predictions"], axis=1)
predicted = tf.argmax(out_vars["predictions"], axis=1)
metrics["accuracy"] = tf.metrics.accuracy(targets, predicted)
return metrics
def _build_prediction_network(self, input_vars, is_training, **kwargs):
weight_vars = []
weight_init_tups = []
# Build convolutional layers.
prev_var = input_vars["img"]
prev_dims = self._num_input_channels
for i in range(len(self._conv_layer_sizes)):
size = self._conv_layer_sizes[i]
cur_dims = self._conv_layer_dims[i]
h_var = self._add_op_square_conv2d(prev_var, "conv%d" % i, weight_vars,
weight_init_tups, self._add_biases,
prev_dims, cur_dims, size)
h_var = self._add_op_batch_norm(h_var, "norm%d" % i, 3, is_training)
h_var = self._add_op_relu(h_var, "relu%d" % i, alpha=self._alpha,
is_variable=self._learn_alpha)
# Add zero padded residual connection.
if cur_dims > prev_dims:
num_zeros = cur_dims - prev_dims
paddings = np.zeros((4, 2), dtype=int)
paddings[3, 1] = num_zeros
h_var = h_var + tf.pad(prev_var, paddings)
else:
h_var = h_var + prev_var[:cur_dims]
prev_dims = cur_dims
prev_var = h_var
# Build fully connected and output layers.
h_var = prev_var
h_size = self._input_size
for i, cur_dims in enumerate(self._fc_layer_dims + [self._output_size]):
if self._dropout_rate > 0:
h_var = self._add_op_dropout(h_var, "dropout%d" % i, self._dropout_rate,
is_training)
h_var = self._add_op_square_conv2d(h_var, "fc%d" % i, weight_vars,
weight_init_tups, self._add_biases,
prev_dims, cur_dims, h_size, pad=False)
h_size = 1
prev_dims = cur_dims
if i < len(self._fc_layer_dims): # Hidden fully connected layer.
h_var = self._add_op_batch_norm(h_var, "fc_norm%d" % i, 3, is_training)
h_var = self._add_op_relu(h_var, "fc_relu%d" % i, alpha=self._alpha,
is_variable=self._learn_alpha)
else: # Final output layer.
h_var = tf.reduce_mean(h_var, axis=1)
h_var = tf.reduce_mean(h_var, axis=1)
h_var = tf.nn.softmax(h_var)
out_vars = {}
out_vars["predictions"] = h_var
return out_vars, weight_vars, weight_init_tups
|
[
"tensorflow.nn.softmax",
"tensorflow.metrics.accuracy",
"tensorflow.argmax",
"tensorflow.pad",
"numpy.zeros",
"tensorflow.reduce_mean"
] |
[((2294, 2339), 'tensorflow.argmax', 'tf.argmax', (["target_vars['predictions']"], {'axis': '(1)'}), "(target_vars['predictions'], axis=1)\n", (2303, 2339), True, 'import tensorflow as tf\n'), ((2356, 2398), 'tensorflow.argmax', 'tf.argmax', (["out_vars['predictions']"], {'axis': '(1)'}), "(out_vars['predictions'], axis=1)\n", (2365, 2398), True, 'import tensorflow as tf\n'), ((2426, 2465), 'tensorflow.metrics.accuracy', 'tf.metrics.accuracy', (['targets', 'predicted'], {}), '(targets, predicted)\n', (2445, 2465), True, 'import tensorflow as tf\n'), ((3443, 3470), 'numpy.zeros', 'np.zeros', (['(4, 2)'], {'dtype': 'int'}), '((4, 2), dtype=int)\n', (3451, 3470), True, 'import numpy as np\n'), ((4634, 4663), 'tensorflow.reduce_mean', 'tf.reduce_mean', (['h_var'], {'axis': '(1)'}), '(h_var, axis=1)\n', (4648, 4663), True, 'import tensorflow as tf\n'), ((4680, 4709), 'tensorflow.reduce_mean', 'tf.reduce_mean', (['h_var'], {'axis': '(1)'}), '(h_var, axis=1)\n', (4694, 4709), True, 'import tensorflow as tf\n'), ((4726, 4746), 'tensorflow.nn.softmax', 'tf.nn.softmax', (['h_var'], {}), '(h_var)\n', (4739, 4746), True, 'import tensorflow as tf\n'), ((3530, 3556), 'tensorflow.pad', 'tf.pad', (['prev_var', 'paddings'], {}), '(prev_var, paddings)\n', (3536, 3556), True, 'import tensorflow as tf\n')]
|
import argparse
import gevent.monkey
from closeio_api import APIError, Client as CloseIO_API
from gevent.pool import Pool
gevent.monkey.patch_all()
parser = argparse.ArgumentParser(
description='Restore an array of deleted leads by ID. This CANNOT restore status changes or call recordings.'
)
parser.add_argument('--api-key', '-k', required=True, help='API Key')
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument(
'--leads', help='List of lead IDs in a form of a comma separated list'
)
group.add_argument(
'--leads-file',
help='List of lead IDs in a form of a textual file with single column of lead IDs',
)
args = parser.parse_args()
api = CloseIO_API(args.api_key)
# Array of Lead IDs. Add the IDs you want to restore here.
if args.leads:
lead_ids = args.leads.split(",")
elif args.leads_file:
with open(args.leads_file) as f:
lines = f.readlines()
lead_ids = [el.strip() for el in lines] # Strip new lines
lead_ids = list(filter(None, lead_ids)) # Strip empty lines
# Create a list of active users for the sake of posting opps.
org_id = api.get('me')['organizations'][0]['id']
memberships = api.get(
'organization/' + org_id, params={'_fields': 'memberships'}
)['memberships']
active_users = [i['user_id'] for i in memberships]
# Array to keep track of number of leads restored. Because we use gevent, we can't have a standard counter variable.
total_leads_restored = []
# This is a list of object types you want to restore on the lead. We can also add activity.email, but in this script
# it's assumed that email sync will take care of all of the emails that were deleted, assuming the same email accounts
# are connected to Close.
object_types = [
'contact',
'opportunity',
'task.lead',
'activity.call',
'activity.note',
'activity.sms',
]
# This is a dictionary that stores a mapping between old contact ids and new contact ids for restoration purposes.
contact_id_mapping = {}
def restore_objects(object_type, old_lead_id, new_lead_id):
has_more = True
cursor = ''
while has_more:
resp_objects = api.get(
'event',
params={
'object_type': object_type,
'action': 'deleted',
'_cursor': cursor,
'lead_id': old_lead_id,
},
)
for event in resp_objects['data']:
old_contact_id = None
if 'previous_data' in event:
prev = event['previous_data']
if 'id' in prev:
del prev['id']
# Map old contact ID to new contact ID
if 'contact_id' in prev:
if prev['contact_id'] in contact_id_mapping:
prev['contact_id'] = contact_id_mapping[
prev['contact_id']
]
else:
del prev['contact_id']
# Delete quality_info when posting a call
if 'quality_info' in prev:
del prev['quality_info']
# Set call source to External
if object_type == 'activity.call':
prev['source'] = 'External'
# If the user assigned to the opp is no longer in the organization, we still want to post the opp, we just
# can't have it assigned to that user_id.
if (
object_type == 'opportunity'
and 'user_id' in prev
and prev['user_id'] not in active_users
):
del prev['user_id']
# If anything was in outbox or scheduled, switch it to draft so it doesn't send accidentally at the wrong time.
if object_type in ['activity.sms', 'activity.call'] and prev[
'status'
] in ['outbox', 'scheduled']:
prev['status'] == 'draft'
# Set endpoint for posting. We need to change the activity and task object types to match the post endpoint
# for their respective types.
endpoint = object_type
if 'activity' in endpoint:
endpoint = endpoint.replace('.', '/')
elif '.lead' in endpoint:
endpoint = endpoint.replace('.lead', '')
prev['lead_id'] = new_lead_id
# Post the object to the new lead.
try:
post_request = api.post(endpoint, data=prev)
# If we posted a contact, add the new contact id to the dictionary.
if object_type == 'contact':
contact_id_mapping[event['object_id']] = post_request[
'id'
]
except APIError as e:
print(
f"ERROR: Could not post {object_type} {event['object_id']} because {str(e)}"
)
cursor = resp_objects['cursor_next']
has_more = bool(resp_objects['cursor_next'])
def remove_task_completed_activities(new_lead_id):
has_more = True
offset = 0
task_completed_ids = []
while has_more:
resp_task_completed = api.get(
'activity/task_completed',
params={'_skip': offset, 'lead_id': new_lead_id, '_fields': 'id'},
)
task_completed_ids = [i['id'] for i in resp_task_completed['data']]
offset += len(resp_task_completed['data'])
has_more = resp_task_completed['has_more']
for completed_id in task_completed_ids:
try:
api.delete('activity/task_completed/' + completed_id)
except APIError as e:
print(
f"Cannot delete completed task activity {completed_id} because {str(e)}"
)
def restore_lead(old_lead_id):
resp_lead = api.get(
'event',
params={
'object_type': 'lead',
'action': 'deleted',
'lead_id': old_lead_id,
},
)
if len(resp_lead['data']) > 0 and resp_lead['data'][0].get(
'previous_data'
):
prev = resp_lead['data'][0]['previous_data']
if 'id' in prev:
del prev['id']
# Post New Lead.
try:
post_lead = api.post('lead', data=prev)
if 'id' in post_lead:
new_lead_id = post_lead['id']
# Restore all objects on the lead.
for object_type in object_types:
restore_objects(object_type, old_lead_id, new_lead_id)
# We want to remove task completed activities on the new lead because they will be posted at the top of the activity timeline
# regardless of when they were actually completed.
remove_task_completed_activities(new_lead_id)
total_leads_restored.append(1)
print(f"{len(total_leads_restored)}: Restored {old_lead_id}")
except APIError as e:
print(f"{old_lead_id}: Lead could not be posted because {str(e)}")
else:
print(
f"{old_lead_id} could not be restored because there is no data to restore"
)
print(f"Total leads being restored: {len(lead_ids)}")
pool = Pool(5)
pool.map(restore_lead, lead_ids)
print(f"Total leads restored {len(total_leads_restored)}")
print(
f"Total leads not restored {(len(lead_ids) - len(total_leads_restored))}"
)
|
[
"argparse.ArgumentParser",
"gevent.pool.Pool",
"closeio_api.Client"
] |
[((160, 304), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Restore an array of deleted leads by ID. This CANNOT restore status changes or call recordings."""'}), "(description=\n 'Restore an array of deleted leads by ID. This CANNOT restore status changes or call recordings.'\n )\n", (183, 304), False, 'import argparse\n'), ((690, 715), 'closeio_api.Client', 'CloseIO_API', (['args.api_key'], {}), '(args.api_key)\n', (701, 715), True, 'from closeio_api import APIError, Client as CloseIO_API\n'), ((7371, 7378), 'gevent.pool.Pool', 'Pool', (['(5)'], {}), '(5)\n', (7375, 7378), False, 'from gevent.pool import Pool\n')]
|
import sys
sys.path.append("./")
import shiftnet_cuda
import numpy as np
import torch
import torch.cuda
def main():
pattern = np.arange(18 * 18).reshape(18, 18)
src_buf = np.zeros((32, 64, 18, 18)).astype(np.float32)
for bnr in range(32):
for ch in range(64):
src_buf[bnr,ch,:,:] = pattern
x_hin = torch.zeros(32, 64, 18, 18).type(torch.FloatTensor)
#x_hin[:,:,1:4,1:4] = 1.0
x_hin.copy_(torch.from_numpy(src_buf))
y_hin = torch.zeros(32, 64, 18, 18).type(torch.FloatTensor)
x = x_hin.cuda()
y = y_hin.cuda()
#ret = shiftnet_cuda.moduloshift3x3_nchw(x, y)
ret = shiftnet_cuda.moduloshiftgeneric_nchw(x, y, 7, 2, -1)
assert ret == 1
x_hout = x.cpu()
y_hout = y.cpu()
print(x_hout[0,0,:18,:18])
for ch in range(9):
print(y_hout[0,ch,:18,:18])
if __name__ == "__main__":
main()
|
[
"sys.path.append",
"numpy.zeros",
"numpy.arange",
"torch.zeros",
"shiftnet_cuda.moduloshiftgeneric_nchw",
"torch.from_numpy"
] |
[((11, 32), 'sys.path.append', 'sys.path.append', (['"""./"""'], {}), "('./')\n", (26, 32), False, 'import sys\n'), ((601, 654), 'shiftnet_cuda.moduloshiftgeneric_nchw', 'shiftnet_cuda.moduloshiftgeneric_nchw', (['x', 'y', '(7)', '(2)', '(-1)'], {}), '(x, y, 7, 2, -1)\n', (638, 654), False, 'import shiftnet_cuda\n'), ((414, 439), 'torch.from_numpy', 'torch.from_numpy', (['src_buf'], {}), '(src_buf)\n', (430, 439), False, 'import torch\n'), ((131, 149), 'numpy.arange', 'np.arange', (['(18 * 18)'], {}), '(18 * 18)\n', (140, 149), True, 'import numpy as np\n'), ((178, 204), 'numpy.zeros', 'np.zeros', (['(32, 64, 18, 18)'], {}), '((32, 64, 18, 18))\n', (186, 204), True, 'import numpy as np\n'), ((320, 347), 'torch.zeros', 'torch.zeros', (['(32)', '(64)', '(18)', '(18)'], {}), '(32, 64, 18, 18)\n', (331, 347), False, 'import torch\n'), ((452, 479), 'torch.zeros', 'torch.zeros', (['(32)', '(64)', '(18)', '(18)'], {}), '(32, 64, 18, 18)\n', (463, 479), False, 'import torch\n')]
|
# -*- coding: UTF8 -*-
# vim: set expandtab tabstop=2 shiftwidth=2 softtabstop=2 foldmethod=marker: #
import json
def get_config() :
config = {
"RocksdbNodeConfigs": {
"group-a#1": "default",
"group-a#2": "default",
}
}
return json.dumps(config);
|
[
"json.dumps"
] |
[((255, 273), 'json.dumps', 'json.dumps', (['config'], {}), '(config)\n', (265, 273), False, 'import json\n')]
|
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
from .anchor_head_template import AnchorHeadTemplate
class GradReverse(torch.autograd.Function):
def __init__(self, lambd):
self.lambd = lambd
def forward(self, x):
return x.view_as(x)
def backward(self, grad_output):
return (grad_output * self.lambd)
def grad_reverse(x, lambd):
return GradReverse(lambd)(x)
class RangeIntervalAttentionLayer(nn.Module):
def __init__(self, num_channels, kernel_size, division=6, prior=False):
super(RangeIntervalAttentionLayer, self).__init__()
if prior:
param_list = []
for i in range(division):
param_list.append(torch.tensor([[[i * (division-1)]]], dtype=torch.float32))
param = torch.cat(param_list, dim=-2)
else:
param = torch.randn(1, division, 1)
self.patch_param = nn.Parameter(param, requires_grad=True)
# self.patch_param = nn.Parameter(torch.randn(1, division, 1), requires_grad=True)
self.sigmoid = nn.Sigmoid()
self.kernel_size = kernel_size
self.division = division
self.elem = int(self.kernel_size / division)
def forward(self, input_tensor):
bt, c, h, w = input_tensor.size()
input_tensor = input_tensor.view(-1, h, w)
self.patch_matrix = self.patch_param.repeat(1, 1, self.elem).view(1, -1, 1)
self.patch_matrix = self.patch_matrix.repeat(bt*c, 1, w)
input_tensor = input_tensor * self.patch_matrix
input_tensor = self.sigmoid(input_tensor).view(bt, c, h, w)
return input_tensor
class RoadIntervalAttentionLayer(nn.Module):
def __init__(self, num_channels, kernel_size, division=6, prior=False):
super(RoadIntervalAttentionLayer, self).__init__()
if prior:
param_list = []
for i in range(division):
param_list.append(torch.tensor([[[i * (division-1)]]], dtype=torch.float32))
param = torch.cat(param_list, dim=-1)
else:
param = torch.randn(1, 1, division)
self.patch_param = nn.Parameter(param, requires_grad=True)
self.sigmoid = nn.Sigmoid()
self.kernel_size = kernel_size
self.division = division
self.elem = int(self.kernel_size / division)
def forward(self, input_tensor):
bt, c, h, w = input_tensor.size()
input_tensor = input_tensor.view(-1, h, w)
self.patch_matrix = self.patch_param.repeat(1, self.elem, 1).permute(0,2,1).contiguous().view(1,-1,1).permute(0,2,1)
self.patch_matrix = self.patch_matrix.repeat(bt*c, h, 1)
input_tensor = input_tensor * self.patch_matrix
input_tensor = self.sigmoid(input_tensor).view(bt, c, h, w)
return input_tensor
class LocationAttentionLayer(nn.Module):
def __init__(self, num_channels, kernel_size, prior=False):
super(LocationAttentionLayer, self).__init__()
self.patch_matrix = nn.Parameter(torch.randn(1, kernel_size, kernel_size), requires_grad=True)
# self.patch_conv = nn.Conv2d(num_channels, 1, kernel_size, kernel_size)
self.sigmoid = nn.Sigmoid()
def forward(self, input_tensor):
#2, 512, 126, 126
bt, c, h, w = input_tensor.size()
# print("input_tensor", input_tensor.shape)
# patch_tensor = self.patch_conv(input_tensor)
# print("patch_tensor", patch_tensor.shape)
input_tensor = input_tensor.view(-1, h, w)
# self.patch_matrix = self.patch_matrix.repeat(bt*c, 1, 1)
# print("self.patch_matrix.repeat(bt*c, 1, 1)x", self.patch_matrix.repeat(bt*c, 1, 1).shape)
input_tensor = input_tensor * self.patch_matrix.repeat(bt*c, 1, 1)
input_tensor = self.sigmoid(input_tensor).view(bt, c, h, w)
# print("input_tensor")
# print("self.input_tensor", input_tensor.shape)
return input_tensor
class SpatialSELayer(nn.Module):
"""
Re-implementation of SE block -- squeezing spatially and exciting channel-wise described in:
*<NAME> al., Concurrent Spatial and Channel Squeeze & Excitation in Fully Convolutional Networks, MICCAI 2018*
"""
def __init__(self, num_channels):
"""
:param num_channels: No of input channels
"""
super(SpatialSELayer, self).__init__()
self.conv = nn.Conv2d(num_channels, 1, 1)
self.sigmoid = nn.Sigmoid()
def forward(self, input_tensor, weights=None):
"""
:param weights: weights for few shot learning
:param input_tensor: X, shape = (batch_size, num_channels, H, W)
:return: output_tensor
"""
# spatial squeeze
batch_size, channel, a, b = input_tensor.size()
# print("input_tensor.size()", input_tensor.size()) #2, 512, 126, 126
if weights is not None:
weights = torch.mean(weights, dim=0)
weights = weights.view(1, channel, 1, 1)
out = F.conv2d(input_tensor, weights)
else:
out = self.conv(input_tensor)
# print("out.size()", out.size()) #2, 1, 126, 126
squeeze_tensor = self.sigmoid(out)
# print("squeeze_tensor.size()", squeeze_tensor.size()) # 2, 1, 126, 126
# spatial excitation
squeeze_tensor = squeeze_tensor.view(batch_size, 1, a, b)
# print("squeeze_tensor 2.size()", squeeze_tensor.size()) # 2, 1, 126, 126
output_tensor = torch.mul(input_tensor, squeeze_tensor)
# print("output_tensor 2.size()", output_tensor.size()) #2, 512, 126, 126
#output_tensor = torch.mul(input_tensor, squeeze_tensor)
return output_tensor
class ChannelSELayer(nn.Module):
"""
Re-implementation of Squeeze-and-Excitation (SE) block described in:
*<NAME>., Squeeze-and-Excitation Networks, arXiv:1709.01507*
"""
def __init__(self, num_channels, reduction_ratio=2):
"""
:param num_channels: No of input channels
:param reduction_ratio: By how much should the num_channels should be reduced
"""
super(ChannelSELayer, self).__init__()
num_channels_reduced = num_channels // reduction_ratio
self.reduction_ratio = reduction_ratio
self.fc1 = nn.Linear(num_channels, num_channels_reduced, bias=True)
self.fc2 = nn.Linear(num_channels_reduced, num_channels, bias=True)
self.relu = nn.ReLU()
self.sigmoid = nn.Sigmoid()
def forward(self, input_tensor):
"""
:param input_tensor: X, shape = (batch_size, num_channels, H, W)
:return: output tensor
"""
batch_size, num_channels, H, W = input_tensor.size() #2, 512, 126, 126
# Average along each channel
squeeze_tensor = input_tensor.view(batch_size, num_channels, -1).mean(dim=2) #2, 512, 126*126(1)
# channel excitation
fc_out_1 = self.relu(self.fc1(squeeze_tensor))
fc_out_2 = self.sigmoid(self.fc2(fc_out_1))
a, b = squeeze_tensor.size()
output_tensor = torch.mul(input_tensor, fc_out_2.view(a, b, 1, 1))
return output_tensor
class LocalDomainClassifier(nn.Module):
def __init__(self, input_channels=256, context=False):
super(LocalDomainClassifier, self).__init__()
self.conv1 = nn.Conv2d(input_channels, 256, kernel_size=1, stride=1,
padding=0, bias=False)
self.conv2 = nn.Conv2d(256, 128, kernel_size=1, stride=1,
padding=0, bias=False)
self.conv3 = nn.Conv2d(128, 1, kernel_size=1, stride=1,
padding=0, bias=False)
self.context = context
self._init_weights()
def _init_weights(self):
def normal_init(m, mean, stddev, truncated=False):
"""
weight initalizer: truncated normal and random normal.
"""
# x is a parameter
if truncated:
m.weight.data.normal_().fmod_(2).mul_(stddev).add_(mean) # not a perfect approximation
else:
m.weight.data.normal_(mean, stddev)
#m.bias.data.zero_()
normal_init(self.conv1, 0, 0.01)
normal_init(self.conv2, 0, 0.01)
normal_init(self.conv3, 0, 0.01)
def forward(self, x):
x = F.relu(self.conv1(x))
x = F.relu(self.conv2(x))
if self.context:
feat = F.avg_pool2d(x, (x.size(2), x.size(3)))
x = self.conv3(x)
return F.sigmoid(x),feat
else:
x = self.conv3(x)
return F.sigmoid(x)
class AnchorHeadSingleRangeNewConvDom(AnchorHeadTemplate):
def __init__(self, model_cfg, input_channels, num_class, class_names, grid_size, point_cloud_range,
predict_boxes_when_training=True, nusc=False, fpn_layers=[], **kwargs):
super().__init__(
model_cfg=model_cfg, num_class=num_class, class_names=class_names, grid_size=grid_size, point_cloud_range=point_cloud_range,
predict_boxes_when_training=predict_boxes_when_training, nusc=nusc, fpn_layers=fpn_layers
)
self.num_anchors_per_location = sum(self.num_anchors_per_location)
self.voxel_det_seconv_attention = self.model_cfg.get('VOXEL_DET_SECONV_ATTENTION', False)
self.voxel_det_se_attention = self.model_cfg.get('VOXEL_DET_SE_ATTENTION', False)
self.voxel_det_patch_attention = self.model_cfg.get('VOXEL_DET_PATCH_ATTENTION', False)
self.voxel_dom_seconv_attention = self.model_cfg.get('VOXEL_DOM_SECONV_ATTENTION', False)
self.voxel_dom_se_attention = self.model_cfg.get('VOXEL_DOM_SE_ATTENTION', False)
self.voxel_dom_patch_attention = self.model_cfg.get('VOXEL_DOM_PATCH_ATTENTION', False)
self.voxel_dom_rangeinterval_attention = self.model_cfg.get('VOXEL_DOM_RANGEINTERVAL_ATTENTION', False)
self.voxel_dom_roadinterval_attention = self.model_cfg.get('VOXEL_DOM_ROADINTERVAL_ATTENTION', False)
self.joint_attention = self.model_cfg.get('VOXEL_DETDOM_JOINT_ATTENTION', False)
self.dom_patch_first = self.model_cfg.get('DOM_PATCH_FIRST', False)
if self.range_guidance:
if self.range_guidance_dom_only:
input_channels_dom = input_channels + 2
else:
input_channels = input_channels + 2
input_channels_dom = input_channels
else:
input_channels_dom = input_channels
self.conv_cls = nn.Conv2d(
input_channels, self.num_anchors_per_location * self.num_class,
kernel_size=1
)
self.conv_box = nn.Conv2d(
input_channels, self.num_anchors_per_location * self.box_coder.code_size,
kernel_size=1
)
self.rangeinv = self.model_cfg.get('RANGE_INV', False)
self.keep_x = self.model_cfg.get('KEEP_X', False)
self.keep_y = self.model_cfg.get('KEEP_Y', False)
self.keep_xy = self.model_cfg.get('KEEP_XY', False)
self.center_xy = self.model_cfg.get('CENTER_XY', False)
self.zeroone_prior = self.model_cfg.get('ZEROONE_PRIOR', False)
self.rm_thresh = self.model_cfg.get('RM_THRESH', 0)
if self.rangeinv:
self.conv_range = nn.Conv2d(
input_channels, 1,
kernel_size=1
)
#nn.Sequential(
if self.voxel_det_seconv_attention and not self.joint_attention:
self.att_spatial_se_layer_det = SpatialSELayer(512)
if self.voxel_det_se_attention and not self.joint_attention:
self.att_se_layer_det = ChannelSELayer(512)
if self.voxel_det_patch_attention and not self.joint_attention:
self.att_patch_layer_det = LocationAttentionLayer(512, self.model_cfg.PATCH_SIZE, prior=self.zeroone_prior)
###################
if self.voxel_dom_seconv_attention:
self.att_spatial_se_layer = SpatialSELayer(512)
if self.voxel_dom_se_attention:
self.att_se_layer = ChannelSELayer(512)
if self.voxel_dom_patch_attention:
self.att_patch_layer = LocationAttentionLayer(512, self.model_cfg.PATCH_SIZE, prior=self.zeroone_prior)
if self.voxel_dom_rangeinterval_attention:
self.att_rangeinterval_layer = RangeIntervalAttentionLayer(512, self.model_cfg.PATCH_SIZE, division=self.model_cfg.get('RANGE_INTERVAL_DIVISION', 6), prior=self.zeroone_prior)
if self.voxel_dom_roadinterval_attention:
self.att_roadinterval_layer = RoadIntervalAttentionLayer(512, self.model_cfg.PATCH_SIZE, division=self.model_cfg.get('ROAD_INTERVAL_DIVISION', 6), prior=self.zeroone_prior)
if self.model_cfg.get('USE_DIRECTION_CLASSIFIER', None) is not None:
self.conv_dir_cls = nn.Conv2d(
input_channels,
self.num_anchors_per_location * self.model_cfg.NUM_DIR_BINS,
kernel_size=1
)
else:
self.conv_dir_cls = None
dom_fc1, dom_fc2 = self.model_cfg.get('DOM_FC', [1024, 1024])
# print("dom_fc ", dom_fc1, dom_fc2)
# if self.model_cfg.get('USE_DOMAIN_CLASSIFIER', None) is not None:
if self.range_da > 0:
self.domain_pool = nn.AdaptiveAvgPool2d(1)
self.domain_classifier_range = nn.ModuleDict()
for n in range(0+self.remove_near_range, self.range_da-self.remove_far_range):
self.domain_classifier_range[str(n)] = nn.Sequential(nn.Linear(input_channels, dom_fc1),
nn.ReLU(True), nn.Dropout(),
nn.Linear(dom_fc1, dom_fc2), nn.ReLU(True),
nn.Dropout(), nn.Linear(dom_fc2, 1))
if self.keep_xy:
self.domain_classifier_range2 = nn.ModuleDict()
for n in range(0+self.remove_near_range2, self.range_da-self.remove_far_range2):
self.domain_classifier_range2[str(n)] = nn.Sequential(nn.Linear(input_channels, dom_fc1),
nn.ReLU(True), nn.Dropout(),
nn.Linear(dom_fc1, dom_fc2), nn.ReLU(True),
nn.Dropout(), nn.Linear(dom_fc2, 1))
elif self.interval_da > 0:
self.domain_pool = nn.AdaptiveAvgPool2d(1)
self.domain_classifier_interval = nn.ModuleDict()
for n in range(self.interval_da):
self.domain_classifier_interval[str(n)] = nn.Sequential(nn.Linear(input_channels, dom_fc1),
nn.ReLU(True), nn.Dropout(),
nn.Linear(dom_fc1, dom_fc2), nn.ReLU(True),
nn.Dropout(), nn.Linear(dom_fc2, 1))
elif self.range_guidance_conv_dom:
self.conv_dom_layers = self.make_conv_layers(
conv_cfg=self.model_cfg.LOCAL_DOM_FC,
input_channels=input_channels_dom,
output_channels=1
)
if self.range_guidance_double_dom:
self.domain_pool = nn.AdaptiveAvgPool2d(1)
self.domain_classifier = nn.Sequential(nn.Linear(input_channels_dom, dom_fc1),
nn.ReLU(True), nn.Dropout(),
nn.Linear(dom_fc1, dom_fc2), nn.ReLU(True),
nn.Dropout(), nn.Linear(dom_fc2, 1))
elif self.range_guidance_new_conv_dom:
print("input_channels_dom", input_channels_dom)
self.conv_dom_layers = LocalDomainClassifier(input_channels=input_channels_dom, context=self.range_guidance_new_conv_dom_context) #
# elif self.range_guidance_pixelfc_dom:
# # for i in range()
# self.pixelfc_layers = nn.ModuleList()
# # for i in range(self.model_cfg.PATCH_SIZE):
# self.make_fc_layers(
# conv_cfg=self.model_cfg.LOCAL_DOM_FC,
# input_channels=input_channels_dom,
# output_channels=1
# )
else:
self.domain_pool = nn.AdaptiveAvgPool2d(1)
self.domain_classifier = nn.Sequential(nn.Linear(input_channels_dom, dom_fc1),
nn.ReLU(True), nn.Dropout(),
nn.Linear(dom_fc1, dom_fc2), nn.ReLU(True),
nn.Dropout(), nn.Linear(dom_fc2, 1))
self.init_weights()
def init_weights(self):
pi = 0.01
nn.init.constant_(self.conv_cls.bias, -np.log((1 - pi) / pi))
nn.init.normal_(self.conv_box.weight, mean=0, std=0.001)
def local_attention(self, features, d):
# features.size() = [1, 256, h, w]
# d.size() = [1, 1, h, w] after sigmoid
d = d.clamp(1e-6, 1)
H = - ( d * d.log() + (1-d) * (1-d).log() )
w = 1 - H
features_new = (1 + w) * features
return features_new
def forward(self, data_dict):
t_mode = data_dict['t_mode']
l = data_dict['l']
if 'pseudo' in t_mode:
pseudo = True
else:
pseudo = False
spatial_features_2d = data_dict['spatial_features_2d']
if t_mode == 'tsne':
self.range_da = 2
mid_dim = int(spatial_features_2d.shape[-1]/2.)
range_interval = int(spatial_features_2d.shape[-1]/(2*self.range_da))
start_dim = {}
mid1_dim = {}
mid2_dim = {}
end_dim = {}
interval_idx = {}
interval_feat = {}
if self.keep_xy:
interval_feat2 = {}
# for each range 0,1,2,3 (4)
for n in range(0+self.remove_near_range, self.range_da-self.remove_far_range): # no0,1
start_dim[n] = mid_dim - range_interval*(n+1) # 2-1=1, 2-2=0
mid1_dim[n] = mid_dim - range_interval*n # 2-0=2 2-1=1 #int(spatial_features_2d.shape[-1]/2.)
mid2_dim[n] = mid_dim + range_interval*n # 2+0=2 2+1=3
end_dim[n] = mid_dim + range_interval*(n+1) # 2+1=3 2+2=4
interval_idx[n] = torch.LongTensor([i for i in range(start_dim[n], mid1_dim[n])]+[i for i in range(mid2_dim[n], end_dim[n])])
feat1 = spatial_features_2d[:,:,:,interval_idx[n]]
feat1 = self.domain_pool(feat1).view(feat1.size(0), -1)
data_dict[f'spatial_features_2d_x_{n}'] = feat1
feat2 = spatial_features_2d[:,:,interval_idx[n],:]
feat2 = self.domain_pool(feat2).view(feat2.size(0), -1)
data_dict[f'spatial_features_2d_y_{n}'] = feat2
return data_dict
###########################
if self.range_guidance and not self.range_guidance_dom_only:
total_range = spatial_features_2d.shape[-1]
half_range = int(spatial_features_2d.shape[-1] * 0.5)
x_range = torch.abs(torch.arange(-half_range, half_range, 1).float() + 0.5).unsqueeze(0).unsqueeze(0).unsqueeze(0).repeat(spatial_features_2d.shape[0],1, total_range, 1).cuda()
# print("x_range", x_range)
y_range = torch.abs(torch.arange(-half_range, half_range, 1).float() + 0.5).unsqueeze(-1).unsqueeze(0).unsqueeze(0).repeat(spatial_features_2d.shape[0],1,1,total_range).cuda()
spatial_features_2d = torch.cat((spatial_features_2d, x_range, y_range), dim=1)
# print("spatial_features_2d", spatial_features_2d.shape)
if 'dom_img' in t_mode:
if t_mode == 'dom_img_src':
dom_src = True
elif t_mode == 'dom_img_tgt':
dom_src = False
else:
dom_src = None
#################### PATCH EARLY
if self.voxel_dom_patch_attention and self.dom_patch_first:
spatial_features_2d = self.att_patch_layer(spatial_features_2d)
if self.voxel_dom_rangeinterval_attention and self.dom_patch_first:
spatial_features_2d = self.att_rangeinterval_layer(spatial_features_2d)
if self.voxel_dom_roadinterval_attention and self.dom_patch_first:
spatial_features_2d = self.att_roadinterval_layer(spatial_features_2d)
#################### PATCH LATE
if self.voxel_dom_patch_attention and not self.dom_patch_first:
spatial_features_2d = self.att_patch_layer(spatial_features_2d)
if self.voxel_dom_rangeinterval_attention and not self.dom_patch_first:
spatial_features_2d = self.att_rangeinterval_layer(spatial_features_2d)
if self.voxel_dom_roadinterval_attention and not self.dom_patch_first:
spatial_features_2d = self.att_roadinterval_layer(spatial_features_2d)
####################
if self.range_guidance and self.range_guidance_dom_only:
total_range = spatial_features_2d.shape[-1]
half_range = int(spatial_features_2d.shape[-1] * 0.5)
x_range = torch.abs(torch.arange(-half_range, half_range, 1).float() + 0.5).unsqueeze(0).unsqueeze(0).unsqueeze(0).repeat(spatial_features_2d.shape[0],1, total_range, 1).cuda()
y_range = torch.abs(torch.arange(-half_range, half_range, 1).float() + 0.5).unsqueeze(-1).unsqueeze(0).unsqueeze(0).repeat(spatial_features_2d.shape[0],1,1,total_range).cuda()
spatial_features_2d = torch.cat((spatial_features_2d, x_range, y_range), dim=1)
if self.range_guidance_conv_dom or self.range_guidance_new_conv_dom:
# x_pool = self.domain_pool().view(spatial_features_2d.size(0), -1)
# print('t_mode', t_mode)
# print("l", l)
if self.range_guidance_new_conv_dom_attention:
x_reverse = grad_reverse(spatial_features_2d, l*-1)
if self.range_guidance_new_conv_dom_context:
dom_img_preds, _ = self.conv_dom_layers(x_reverse)
#print(d_pixel)
# if not target:
_, feat_pixel = self.conv_dom_layers(spatial_features_2d.detach())
else:
dom_img_preds = self.conv_dom_layers(x_reverse)
spatial_features_2d = self.local_attention(spatial_features_2d, dom_img_preds.detach())
else:
x_reverse = grad_reverse(spatial_features_2d, l*-1)
dom_img_preds = self.conv_dom_layers(x_reverse)
if self.range_guidance_double_dom:
x_pool2 = self.domain_pool(spatial_features_2d).view(spatial_features_2d.size(0), -1)
x_reverse2 = grad_reverse(x_pool2, l*-1)
# print("x_reverse2", x_reverse2.shape)
dom_img_preds2 = self.domain_classifier(x_reverse2)#.squeeze(-1)
else:
x_pool = self.domain_pool(spatial_features_2d).view(spatial_features_2d.size(0), -1)
x_reverse = grad_reverse(x_pool, l*-1)
dom_img_preds = self.domain_classifier(x_reverse)#.squeeze(-1)
# print("dom_img_preds", dom_img_preds.shape)
if self.dom_squeeze:
dom_img_preds = dom_img_preds.squeeze(-1)
if self.range_guidance_double_dom:
dom_img_preds2 = dom_img_preds2.squeeze(-1)
self.forward_ret_dict['dom_img_preds'] = dom_img_preds
if self.range_guidance_double_dom:
self.forward_ret_dict['dom_img_preds2'] = dom_img_preds2
if self.training:
targets_dict_dom = self.assign_targets(
gt_boxes=data_dict['gt_boxes'],
dom_src=dom_src,
pseudo=pseudo
)
# if self.range_guidance_conv_dom:
# targets_dict_dom['dom_labels']
self.forward_ret_dict.update(targets_dict_dom)
if 'det' not in t_mode:
return data_dict
if self.joint_attention:
if self.voxel_det_seconv_attention and self.voxel_det_se_attention:
spatial_features_2d = torch.max(self.att_spatial_se_layer(spatial_features_2d), self.att_se_layer(spatial_features_2d))
# spatial_features_2d_det = spatial_features_2d
elif self.voxel_det_seconv_attention:
# print("spatial_features_2d before", spatial_features_2d.shape)
spatial_features_2d = self.att_spatial_se_layer(spatial_features_2d)
# spatial_features_2d_det = spatial_features_2d
elif self.voxel_det_se_attention:
spatial_features_2d = self.att_se_layer(spatial_features_2d)
# spatial_features_2d_det = spatial_features_2d
# else:
spatial_features_2d_det = spatial_features_2d
else:
if self.voxel_det_seconv_attention and self.voxel_det_se_attention:
spatial_features_2d_out = torch.max(self.att_spatial_se_layer_det(spatial_features_2d), self.att_se_layer_det(spatial_features_2d))
spatial_features_2d_det = spatial_features_2d_out
elif self.voxel_det_seconv_attention:
# print("spatial_features_2d before", spatial_features_2d.shape)
spatial_features_2d_det = self.att_spatial_se_layer_det(spatial_features_2d)
elif self.voxel_det_se_attention:
spatial_features_2d_det = self.att_se_layer_det(spatial_features_2d)
else:
spatial_features_2d_det = spatial_features_2d
# print("spatial_features_2d", spatial_features_2d.shape)
cls_preds = self.conv_cls(spatial_features_2d_det)
box_preds = self.conv_box(spatial_features_2d_det)
cls_preds = cls_preds.permute(0, 2, 3, 1).contiguous() # [N, H, W, C]
box_preds = box_preds.permute(0, 2, 3, 1).contiguous() # [N, H, W, C]
self.forward_ret_dict['cls_preds'] = cls_preds
self.forward_ret_dict['box_preds'] = box_preds
if self.conv_dir_cls is not None:
dir_cls_preds = self.conv_dir_cls(spatial_features_2d_det)
dir_cls_preds = dir_cls_preds.permute(0, 2, 3, 1).contiguous()
self.forward_ret_dict['dir_cls_preds'] = dir_cls_preds
else:
dir_cls_preds = None
if self.training:
if pseudo:
pseudo_weights = data_dict['pseudo_weights']
else:
pseudo_weights = None
# print("gt_classes", data_dict['gt_classes'].shape)
# print("gt_classes", data_dict['gt_classes'])
# print("pseudo_weights", pseudo_weights)
targets_dict = self.assign_targets(
gt_boxes=data_dict['gt_boxes'],
pseudo=pseudo,
pseudo_weights=pseudo_weights
)
self.forward_ret_dict.update(targets_dict)
if not self.training or self.predict_boxes_when_training:
batch_cls_preds, batch_box_preds = self.generate_predicted_boxes(
batch_size=data_dict['batch_size'],
cls_preds=cls_preds, box_preds=box_preds, dir_cls_preds=dir_cls_preds
)
data_dict['batch_cls_preds'] = batch_cls_preds
data_dict['batch_box_preds'] = batch_box_preds
data_dict['cls_preds_normalized'] = False
if self.rangeinv:
# print("spatial_features_2d", spatial_features_2d.shape) #512,128,128
thresh = self.rm_thresh
start_dim = int(spatial_features_2d.shape[-1]/4.)
mid_dim = int(spatial_features_2d.shape[-1]/2.)
end_dim = start_dim+int(spatial_features_2d.shape[-1]/2.)
near_idx = torch.LongTensor([i for i in range(start_dim, mid_dim-thresh)]+[i for i in range(mid_dim+thresh, end_dim)])
far_idx = torch.LongTensor([i for i in range(start_dim)]+[i for i in range(end_dim, spatial_features_2d.shape[-1])])
if self.keep_x:
near_feat_2d = spatial_features_2d[:,:,:,near_idx]
far_feat_2d = spatial_features_2d[:,:,:, far_idx]
elif self.keep_y:
near_feat_2d = spatial_features_2d[:,:,near_idx,:]
far_feat_2d = spatial_features_2d[:,:,far_idx,:]
near_feat_2d_reverse = grad_reverse(near_feat_2d, l*-1)
range_pred_near = self.conv_range(near_feat_2d_reverse)
# print("near_range_pred", near_range_pred.shape)
far_feat_2d_reverse = grad_reverse(far_feat_2d, l*-1)
range_pred_far = self.conv_range(far_feat_2d_reverse)
# print("far_range_pred", far_range_pred.shape)
range_labels_near = torch.ones((range_pred_near.shape), dtype=torch.float32, device=spatial_features_2d.device)
range_labels_far = torch.zeros((range_pred_far.shape), dtype=torch.float32, device=spatial_features_2d.device)
targets_dict_range = {
'range_pred_near': range_pred_near,
'range_pred_far': range_pred_far,
'range_labels_near': range_labels_near,
'range_labels_far': range_labels_far,
}
self.forward_ret_dict.update(targets_dict_range)
return data_dict
|
[
"torch.nn.Dropout",
"torch.cat",
"torch.randn",
"torch.nn.ModuleDict",
"torch.arange",
"torch.nn.functional.sigmoid",
"torch.ones",
"torch.nn.Linear",
"torch.zeros",
"torch.nn.Parameter",
"torch.mean",
"torch.nn.Conv2d",
"torch.nn.functional.conv2d",
"torch.mul",
"torch.nn.Sigmoid",
"torch.nn.AdaptiveAvgPool2d",
"torch.nn.ReLU",
"numpy.log",
"torch.nn.init.normal_",
"torch.tensor"
] |
[((941, 980), 'torch.nn.Parameter', 'nn.Parameter', (['param'], {'requires_grad': '(True)'}), '(param, requires_grad=True)\n', (953, 980), True, 'import torch.nn as nn\n'), ((1096, 1108), 'torch.nn.Sigmoid', 'nn.Sigmoid', ([], {}), '()\n', (1106, 1108), True, 'import torch.nn as nn\n'), ((2171, 2210), 'torch.nn.Parameter', 'nn.Parameter', (['param'], {'requires_grad': '(True)'}), '(param, requires_grad=True)\n', (2183, 2210), True, 'import torch.nn as nn\n'), ((2234, 2246), 'torch.nn.Sigmoid', 'nn.Sigmoid', ([], {}), '()\n', (2244, 2246), True, 'import torch.nn as nn\n'), ((3220, 3232), 'torch.nn.Sigmoid', 'nn.Sigmoid', ([], {}), '()\n', (3230, 3232), True, 'import torch.nn as nn\n'), ((4426, 4455), 'torch.nn.Conv2d', 'nn.Conv2d', (['num_channels', '(1)', '(1)'], {}), '(num_channels, 1, 1)\n', (4435, 4455), True, 'import torch.nn as nn\n'), ((4479, 4491), 'torch.nn.Sigmoid', 'nn.Sigmoid', ([], {}), '()\n', (4489, 4491), True, 'import torch.nn as nn\n'), ((5514, 5553), 'torch.mul', 'torch.mul', (['input_tensor', 'squeeze_tensor'], {}), '(input_tensor, squeeze_tensor)\n', (5523, 5553), False, 'import torch\n'), ((6316, 6372), 'torch.nn.Linear', 'nn.Linear', (['num_channels', 'num_channels_reduced'], {'bias': '(True)'}), '(num_channels, num_channels_reduced, bias=True)\n', (6325, 6372), True, 'import torch.nn as nn\n'), ((6392, 6448), 'torch.nn.Linear', 'nn.Linear', (['num_channels_reduced', 'num_channels'], {'bias': '(True)'}), '(num_channels_reduced, num_channels, bias=True)\n', (6401, 6448), True, 'import torch.nn as nn\n'), ((6469, 6478), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (6476, 6478), True, 'import torch.nn as nn\n'), ((6502, 6514), 'torch.nn.Sigmoid', 'nn.Sigmoid', ([], {}), '()\n', (6512, 6514), True, 'import torch.nn as nn\n'), ((7356, 7434), 'torch.nn.Conv2d', 'nn.Conv2d', (['input_channels', '(256)'], {'kernel_size': '(1)', 'stride': '(1)', 'padding': '(0)', 'bias': '(False)'}), '(input_channels, 256, kernel_size=1, stride=1, padding=0, bias=False)\n', (7365, 7434), True, 'import torch.nn as nn\n'), ((7474, 7541), 'torch.nn.Conv2d', 'nn.Conv2d', (['(256)', '(128)'], {'kernel_size': '(1)', 'stride': '(1)', 'padding': '(0)', 'bias': '(False)'}), '(256, 128, kernel_size=1, stride=1, padding=0, bias=False)\n', (7483, 7541), True, 'import torch.nn as nn\n'), ((7594, 7659), 'torch.nn.Conv2d', 'nn.Conv2d', (['(128)', '(1)'], {'kernel_size': '(1)', 'stride': '(1)', 'padding': '(0)', 'bias': '(False)'}), '(128, 1, kernel_size=1, stride=1, padding=0, bias=False)\n', (7603, 7659), True, 'import torch.nn as nn\n'), ((10551, 10643), 'torch.nn.Conv2d', 'nn.Conv2d', (['input_channels', '(self.num_anchors_per_location * self.num_class)'], {'kernel_size': '(1)'}), '(input_channels, self.num_anchors_per_location * self.num_class,\n kernel_size=1)\n', (10560, 10643), True, 'import torch.nn as nn\n'), ((10698, 10801), 'torch.nn.Conv2d', 'nn.Conv2d', (['input_channels', '(self.num_anchors_per_location * self.box_coder.code_size)'], {'kernel_size': '(1)'}), '(input_channels, self.num_anchors_per_location * self.box_coder.\n code_size, kernel_size=1)\n', (10707, 10801), True, 'import torch.nn as nn\n'), ((16972, 17028), 'torch.nn.init.normal_', 'nn.init.normal_', (['self.conv_box.weight'], {'mean': '(0)', 'std': '(0.001)'}), '(self.conv_box.weight, mean=0, std=0.001)\n', (16987, 17028), True, 'import torch.nn as nn\n'), ((821, 850), 'torch.cat', 'torch.cat', (['param_list'], {'dim': '(-2)'}), '(param_list, dim=-2)\n', (830, 850), False, 'import torch\n'), ((885, 912), 'torch.randn', 'torch.randn', (['(1)', 'division', '(1)'], {}), '(1, division, 1)\n', (896, 912), False, 'import torch\n'), ((2051, 2080), 'torch.cat', 'torch.cat', (['param_list'], {'dim': '(-1)'}), '(param_list, dim=-1)\n', (2060, 2080), False, 'import torch\n'), ((2115, 2142), 'torch.randn', 'torch.randn', (['(1)', '(1)', 'division'], {}), '(1, 1, division)\n', (2126, 2142), False, 'import torch\n'), ((3054, 3094), 'torch.randn', 'torch.randn', (['(1)', 'kernel_size', 'kernel_size'], {}), '(1, kernel_size, kernel_size)\n', (3065, 3094), False, 'import torch\n'), ((4942, 4968), 'torch.mean', 'torch.mean', (['weights'], {'dim': '(0)'}), '(weights, dim=0)\n', (4952, 4968), False, 'import torch\n'), ((5040, 5071), 'torch.nn.functional.conv2d', 'F.conv2d', (['input_tensor', 'weights'], {}), '(input_tensor, weights)\n', (5048, 5071), True, 'import torch.nn.functional as F\n'), ((8633, 8645), 'torch.nn.functional.sigmoid', 'F.sigmoid', (['x'], {}), '(x)\n', (8642, 8645), True, 'import torch.nn.functional as F\n'), ((11325, 11368), 'torch.nn.Conv2d', 'nn.Conv2d', (['input_channels', '(1)'], {'kernel_size': '(1)'}), '(input_channels, 1, kernel_size=1)\n', (11334, 11368), True, 'import torch.nn as nn\n'), ((12873, 12979), 'torch.nn.Conv2d', 'nn.Conv2d', (['input_channels', '(self.num_anchors_per_location * self.model_cfg.NUM_DIR_BINS)'], {'kernel_size': '(1)'}), '(input_channels, self.num_anchors_per_location * self.model_cfg.\n NUM_DIR_BINS, kernel_size=1)\n', (12882, 12979), True, 'import torch.nn as nn\n'), ((13342, 13365), 'torch.nn.AdaptiveAvgPool2d', 'nn.AdaptiveAvgPool2d', (['(1)'], {}), '(1)\n', (13362, 13365), True, 'import torch.nn as nn\n'), ((13409, 13424), 'torch.nn.ModuleDict', 'nn.ModuleDict', ([], {}), '()\n', (13422, 13424), True, 'import torch.nn as nn\n'), ((19772, 19829), 'torch.cat', 'torch.cat', (['(spatial_features_2d, x_range, y_range)'], {'dim': '(1)'}), '((spatial_features_2d, x_range, y_range), dim=1)\n', (19781, 19829), False, 'import torch\n'), ((29305, 29399), 'torch.ones', 'torch.ones', (['range_pred_near.shape'], {'dtype': 'torch.float32', 'device': 'spatial_features_2d.device'}), '(range_pred_near.shape, dtype=torch.float32, device=\n spatial_features_2d.device)\n', (29315, 29399), False, 'import torch\n'), ((29429, 29523), 'torch.zeros', 'torch.zeros', (['range_pred_far.shape'], {'dtype': 'torch.float32', 'device': 'spatial_features_2d.device'}), '(range_pred_far.shape, dtype=torch.float32, device=\n spatial_features_2d.device)\n', (29440, 29523), False, 'import torch\n'), ((8552, 8564), 'torch.nn.functional.sigmoid', 'F.sigmoid', (['x'], {}), '(x)\n', (8561, 8564), True, 'import torch.nn.functional as F\n'), ((13964, 13979), 'torch.nn.ModuleDict', 'nn.ModuleDict', ([], {}), '()\n', (13977, 13979), True, 'import torch.nn as nn\n'), ((14532, 14555), 'torch.nn.AdaptiveAvgPool2d', 'nn.AdaptiveAvgPool2d', (['(1)'], {}), '(1)\n', (14552, 14555), True, 'import torch.nn as nn\n'), ((14602, 14617), 'torch.nn.ModuleDict', 'nn.ModuleDict', ([], {}), '()\n', (14615, 14617), True, 'import torch.nn as nn\n'), ((16941, 16962), 'numpy.log', 'np.log', (['((1 - pi) / pi)'], {}), '((1 - pi) / pi)\n', (16947, 16962), True, 'import numpy as np\n'), ((21865, 21922), 'torch.cat', 'torch.cat', (['(spatial_features_2d, x_range, y_range)'], {'dim': '(1)'}), '((spatial_features_2d, x_range, y_range), dim=1)\n', (21874, 21922), False, 'import torch\n'), ((742, 801), 'torch.tensor', 'torch.tensor', (['[[[i * (division - 1)]]]'], {'dtype': 'torch.float32'}), '([[[i * (division - 1)]]], dtype=torch.float32)\n', (754, 801), False, 'import torch\n'), ((1972, 2031), 'torch.tensor', 'torch.tensor', (['[[[i * (division - 1)]]]'], {'dtype': 'torch.float32'}), '([[[i * (division - 1)]]], dtype=torch.float32)\n', (1984, 2031), False, 'import torch\n'), ((13585, 13619), 'torch.nn.Linear', 'nn.Linear', (['input_channels', 'dom_fc1'], {}), '(input_channels, dom_fc1)\n', (13594, 13619), True, 'import torch.nn as nn\n'), ((13673, 13686), 'torch.nn.ReLU', 'nn.ReLU', (['(True)'], {}), '(True)\n', (13680, 13686), True, 'import torch.nn as nn\n'), ((13688, 13700), 'torch.nn.Dropout', 'nn.Dropout', ([], {}), '()\n', (13698, 13700), True, 'import torch.nn as nn\n'), ((13754, 13781), 'torch.nn.Linear', 'nn.Linear', (['dom_fc1', 'dom_fc2'], {}), '(dom_fc1, dom_fc2)\n', (13763, 13781), True, 'import torch.nn as nn\n'), ((13783, 13796), 'torch.nn.ReLU', 'nn.ReLU', (['(True)'], {}), '(True)\n', (13790, 13796), True, 'import torch.nn as nn\n'), ((13850, 13862), 'torch.nn.Dropout', 'nn.Dropout', ([], {}), '()\n', (13860, 13862), True, 'import torch.nn as nn\n'), ((13864, 13885), 'torch.nn.Linear', 'nn.Linear', (['dom_fc2', '(1)'], {}), '(dom_fc2, 1)\n', (13873, 13885), True, 'import torch.nn as nn\n'), ((14151, 14185), 'torch.nn.Linear', 'nn.Linear', (['input_channels', 'dom_fc1'], {}), '(input_channels, dom_fc1)\n', (14160, 14185), True, 'import torch.nn as nn\n'), ((14243, 14256), 'torch.nn.ReLU', 'nn.ReLU', (['(True)'], {}), '(True)\n', (14250, 14256), True, 'import torch.nn as nn\n'), ((14258, 14270), 'torch.nn.Dropout', 'nn.Dropout', ([], {}), '()\n', (14268, 14270), True, 'import torch.nn as nn\n'), ((14328, 14355), 'torch.nn.Linear', 'nn.Linear', (['dom_fc1', 'dom_fc2'], {}), '(dom_fc1, dom_fc2)\n', (14337, 14355), True, 'import torch.nn as nn\n'), ((14357, 14370), 'torch.nn.ReLU', 'nn.ReLU', (['(True)'], {}), '(True)\n', (14364, 14370), True, 'import torch.nn as nn\n'), ((14428, 14440), 'torch.nn.Dropout', 'nn.Dropout', ([], {}), '()\n', (14438, 14440), True, 'import torch.nn as nn\n'), ((14442, 14463), 'torch.nn.Linear', 'nn.Linear', (['dom_fc2', '(1)'], {}), '(dom_fc2, 1)\n', (14451, 14463), True, 'import torch.nn as nn\n'), ((14736, 14770), 'torch.nn.Linear', 'nn.Linear', (['input_channels', 'dom_fc1'], {}), '(input_channels, dom_fc1)\n', (14745, 14770), True, 'import torch.nn as nn\n'), ((14824, 14837), 'torch.nn.ReLU', 'nn.ReLU', (['(True)'], {}), '(True)\n', (14831, 14837), True, 'import torch.nn as nn\n'), ((14839, 14851), 'torch.nn.Dropout', 'nn.Dropout', ([], {}), '()\n', (14849, 14851), True, 'import torch.nn as nn\n'), ((14905, 14932), 'torch.nn.Linear', 'nn.Linear', (['dom_fc1', 'dom_fc2'], {}), '(dom_fc1, dom_fc2)\n', (14914, 14932), True, 'import torch.nn as nn\n'), ((14934, 14947), 'torch.nn.ReLU', 'nn.ReLU', (['(True)'], {}), '(True)\n', (14941, 14947), True, 'import torch.nn as nn\n'), ((15001, 15013), 'torch.nn.Dropout', 'nn.Dropout', ([], {}), '()\n', (15011, 15013), True, 'import torch.nn as nn\n'), ((15015, 15036), 'torch.nn.Linear', 'nn.Linear', (['dom_fc2', '(1)'], {}), '(dom_fc2, 1)\n', (15024, 15036), True, 'import torch.nn as nn\n'), ((15375, 15398), 'torch.nn.AdaptiveAvgPool2d', 'nn.AdaptiveAvgPool2d', (['(1)'], {}), '(1)\n', (15395, 15398), True, 'import torch.nn as nn\n'), ((16449, 16472), 'torch.nn.AdaptiveAvgPool2d', 'nn.AdaptiveAvgPool2d', (['(1)'], {}), '(1)\n', (16469, 16472), True, 'import torch.nn as nn\n'), ((15454, 15492), 'torch.nn.Linear', 'nn.Linear', (['input_channels_dom', 'dom_fc1'], {}), '(input_channels_dom, dom_fc1)\n', (15463, 15492), True, 'import torch.nn as nn\n'), ((15546, 15559), 'torch.nn.ReLU', 'nn.ReLU', (['(True)'], {}), '(True)\n', (15553, 15559), True, 'import torch.nn as nn\n'), ((15561, 15573), 'torch.nn.Dropout', 'nn.Dropout', ([], {}), '()\n', (15571, 15573), True, 'import torch.nn as nn\n'), ((15627, 15654), 'torch.nn.Linear', 'nn.Linear', (['dom_fc1', 'dom_fc2'], {}), '(dom_fc1, dom_fc2)\n', (15636, 15654), True, 'import torch.nn as nn\n'), ((15656, 15669), 'torch.nn.ReLU', 'nn.ReLU', (['(True)'], {}), '(True)\n', (15663, 15669), True, 'import torch.nn as nn\n'), ((15723, 15735), 'torch.nn.Dropout', 'nn.Dropout', ([], {}), '()\n', (15733, 15735), True, 'import torch.nn as nn\n'), ((15737, 15758), 'torch.nn.Linear', 'nn.Linear', (['dom_fc2', '(1)'], {}), '(dom_fc2, 1)\n', (15746, 15758), True, 'import torch.nn as nn\n'), ((16524, 16562), 'torch.nn.Linear', 'nn.Linear', (['input_channels_dom', 'dom_fc1'], {}), '(input_channels_dom, dom_fc1)\n', (16533, 16562), True, 'import torch.nn as nn\n'), ((16612, 16625), 'torch.nn.ReLU', 'nn.ReLU', (['(True)'], {}), '(True)\n', (16619, 16625), True, 'import torch.nn as nn\n'), ((16627, 16639), 'torch.nn.Dropout', 'nn.Dropout', ([], {}), '()\n', (16637, 16639), True, 'import torch.nn as nn\n'), ((16689, 16716), 'torch.nn.Linear', 'nn.Linear', (['dom_fc1', 'dom_fc2'], {}), '(dom_fc1, dom_fc2)\n', (16698, 16716), True, 'import torch.nn as nn\n'), ((16718, 16731), 'torch.nn.ReLU', 'nn.ReLU', (['(True)'], {}), '(True)\n', (16725, 16731), True, 'import torch.nn as nn\n'), ((16781, 16793), 'torch.nn.Dropout', 'nn.Dropout', ([], {}), '()\n', (16791, 16793), True, 'import torch.nn as nn\n'), ((16795, 16816), 'torch.nn.Linear', 'nn.Linear', (['dom_fc2', '(1)'], {}), '(dom_fc2, 1)\n', (16804, 16816), True, 'import torch.nn as nn\n'), ((19352, 19392), 'torch.arange', 'torch.arange', (['(-half_range)', 'half_range', '(1)'], {}), '(-half_range, half_range, 1)\n', (19364, 19392), False, 'import torch\n'), ((19581, 19621), 'torch.arange', 'torch.arange', (['(-half_range)', 'half_range', '(1)'], {}), '(-half_range, half_range, 1)\n', (19593, 19621), False, 'import torch\n'), ((21478, 21518), 'torch.arange', 'torch.arange', (['(-half_range)', 'half_range', '(1)'], {}), '(-half_range, half_range, 1)\n', (21490, 21518), False, 'import torch\n'), ((21671, 21711), 'torch.arange', 'torch.arange', (['(-half_range)', 'half_range', '(1)'], {}), '(-half_range, half_range, 1)\n', (21683, 21711), False, 'import torch\n')]
|
"""
------------------------
Flask application configuration module
------------------------
"""
import os
class Config(object):
"""
Parent config class.
"""
API_KEY = os.getenv("API_KEY")
DEBUG = True
TESTING = True
PROPAGATE_EXCEPTIONS = True
JSON_SORT_KEYS = False
JSONIFY_PRETTYPRINT_REGULAR = True
class DevelopmentConfig(Config):
"""
Configuration for the development environment.
"""
pass
class ProductionConfig(Config):
"""
Configuration for the production environment.
"""
DEBUG = False
TESTING = False
|
[
"os.getenv"
] |
[((186, 206), 'os.getenv', 'os.getenv', (['"""API_KEY"""'], {}), "('API_KEY')\n", (195, 206), False, 'import os\n')]
|
# Tabuu 3.0
# by Phxenix for SSBU Training Grounds
# Version: 9.3.0
# Last Changes: 24 March 2022
# Contact me on Discord: Phxenix#1104
import discord
from discord.ext import commands
import os
import utils.logger
import utils.sqlite
class Tabuu3(commands.Bot):
"""
The bot.
"""
def __init__(self):
super().__init__(
command_prefix="%",
intents=discord.Intents.all(),
status=discord.Status.online,
)
# to be used in %stats
self.version_number = "9.3.0"
self.commands_ran = 0
self.events_listened_to = 0
# check to make sure persistent buttons do not get added twice.
self.modmail_button_added = None
async def start(self, *args, **kwargs):
# getting the bot token
with open(r"./files/token.txt", encoding="utf-8") as f:
token = f.readline()
await super().start(token=token, *args, **kwargs)
async def setup_hook(self):
# we need to set up some stuff at startup
utils.logger.create_logger()
await utils.sqlite.setup_db()
# loads all of our cogs
for filename in os.listdir(r"./cogs"):
if filename.endswith(".py"):
await self.load_extension(f"cogs.{filename[:-3]}")
def get_logger(self, name: str):
# we just attach it to the bot so we dont have to import it everywhere.
return utils.logger.get_logger(name)
async def close(self):
# closing the connection on bot shutdown.
# this is more of a placeholder for now.
await super().close()
async def on_ready(self):
print(
f"Lookin' good, connected as: {str(bot.user)}, at: {discord.utils.utcnow().strftime('%d-%m-%Y %H:%M:%S')} UTC"
)
bot = Tabuu3()
bot.run()
|
[
"discord.utils.utcnow",
"os.listdir",
"discord.Intents.all"
] |
[((1220, 1240), 'os.listdir', 'os.listdir', (['"""./cogs"""'], {}), "('./cogs')\n", (1230, 1240), False, 'import os\n'), ((420, 441), 'discord.Intents.all', 'discord.Intents.all', ([], {}), '()\n', (439, 441), False, 'import discord\n'), ((1796, 1818), 'discord.utils.utcnow', 'discord.utils.utcnow', ([], {}), '()\n', (1816, 1818), False, 'import discord\n')]
|
"""
Script for extracting the ground plane from the KITTI dataset.
We need to determine the ground plane position and orientation in order to be able to reconstruct
points on it, which we are trying to detect.
We will collect all the points on the ground plane from the dataset and then fit a plane to them
with RANSAC.
----------------------------------------------------------------------------------------------------
python kitti_extract_ground_plane.py path_labels
----------------------------------------------------------------------------------------------------
"""
__date__ = '04/13/2017'
__author__ = '<NAME>'
__email__ = '<EMAIL>'
import argparse
import os
import numpy as np
import random
# import matplotlib
# matplotlib.use('Agg') # Prevents from using X interface for plotting
from matplotlib import pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
from shared.geometry import R3x3_y, t3x1, Rt4x4
####################################################################################################
# DEFINITIONS #
####################################################################################################
# Parameter for RANSAC
# Distance from the plane (in meters), which is considered as an inlier region
INLIER_TRHESHOLD = 1.0
# Number of estimation iterations carried out by RANSAC
RANSAC_ITERS = 10000
####################################################################################################
# FUNCTIONS #
####################################################################################################
def plane_3p(p1, p2, p3):
"""
Computes the equation of a plane passing through the 3 given points.
Input:
p1, p2, p3: 3x1 np.matrix coordinates of points in the plane
Returns:
[a, b, c, d] coefficients as a 1x4 np.matrix
"""
l1 = p2 - p1
l2 = p3 - p1
normal = np.cross(l1, l2, axis=0)
d = - (normal[0,0]*p1[0,0] + normal[1,0]*p1[1,0] + normal[2,0]*p1[2,0])
return np.asmatrix([normal[0,0], normal[1,0], normal[2,0], d])
def show_X_and_gp(gp_X_4xn, gp_1x4):
"""
Show a 3D plot of the estimated ground plane.
"""
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
ax.set_aspect('equal')
ax.scatter(np.array(gp_X_4xn[2,0:1000]), np.array(gp_X_4xn[0,0:1000]), np.array(-gp_X_4xn[1,0:1000]), color='red')
X = np.arange(-20, 20, 1)
Y = np.arange(-1, 10, 1)
X, Y = np.meshgrid(X, Y)
Z = - (gp_1x4[0,0]*X + gp_1x4[0,1]*Y + gp_1x4[0,3]) / gp_1x4[0,2]
ax.plot_surface(Z, X, -Y, linewidth=0, alpha=0.5, antialiased=True)
# Bounding box of the car
ax.plot([3,3,3,3,3], [1.5, 1.5, -1.5, -1.5, 1.5], [0,-1.9,-1.9,0,0], color='green')
ax.plot([-3,-3,-3,-3,-3], [1.5, 1.5, -1.5, -1.5, 1.5], [0,-1.9,-1.9,0,0], color='red')
ax.plot([3, -3], [1.5, 1.5], [0,0], color='blue')
ax.plot([3, -3], [1.5, 1.5], [-1.9,-1.9], color='blue')
ax.plot([3, -3], [-1.5, -1.5], [0,0], color='blue')
ax.plot([3, -3], [-1.5, -1.5], [-1.9,-1.9], color='blue')
ax.set_xlim(-100, 100)
ax.set_ylim(-100, 100)
ax.set_zlim(-100, 100)
ax.set_xlabel('Z')
ax.set_ylabel('X')
ax.set_zlabel('Y')
plt.show()
####################################################################################################
# CLASSES #
####################################################################################################
class GroundPlaneEstimator(object):
"""
Takes care of the estimation of the ground plane position in the KITTI dataset.
"""
def __init__(self, path_labels):
"""
Input:
path_labels: Path to the "label_2" folder of the KITTI dataset
"""
super(GroundPlaneEstimator, self).__init__()
self.path_labels = path_labels
self.gp_points = []
def run_estimation(self):
"""
Runs the whole process of estimating the ground plane.
"""
print('-- ESTIMATING GROUND PLANE POSITION')
# Read label files and get all ground plane points
print('-- Reading label files')
self._read_label_files()
print('-- Label files contain ' + str(len(self.gp_points)) + ' points')
# Create a matrix from all the points for easier computation
self.gp_X_4xn = np.asmatrix(np.ones((4, len(self.gp_points))))
for i in xrange(len(self.gp_points)):
self.gp_X_4xn[0:3,i] = self.gp_points[i]
# plt.scatter(self.gp_X_4xn[2,:], self.gp_X_4xn[1,:])
# plt.show()
# Run RANSAC on those points
print('-- Running RANSAC plane estimation')
self._ransac_plane()
def _read_label_files(self):
"""
Reads all label files and extract the points on the ground plane.
"""
filenames = [f for f in os.listdir(self.path_labels)
if os.path.isfile(os.path.join(self.path_labels, f))]
if len(filenames) != 7481:
print('Wrong number (%d) of files in the KITTI dataset! Should be 7481.'%(len(filenames)))
exit(1)
# Read each label file
# i = 0
for f in filenames:
path_label_file = os.path.join(self.path_labels, f)
self._process_label_file(path_label_file)
# i += 1
# if i == 1000: break
def _process_label_file(self, path_label_file):
"""
Processes one label file.
Input:
path_label_file: Path to the TXT label file in KITTI format to be processed.
"""
with open(path_label_file, 'r') as infile_label:
# Read the objects
for line in infile_label:
line = line.rstrip('\n')
data = line.split(' ')
# First element of the data is the label. We don't want to process 'Misc' and
# 'DontCare' labels
if data[0] == 'Misc' or data[0] == 'DontCare': continue
# Extract the points of this object on the ground plane
self._extract_ground_plane_pts(data)
def _extract_ground_plane_pts(self, data):
"""
Extract 3D points from the object bounding box, which lie on the ground plane.
Input:
data: One split line of the label file (line.split(' '))
"""
# Object dimensions
h = float(data[8])
w = float(data[9])
l = float(data[10])
# Position of the center point on the ground plane (xz plane)
cx = float(data[11])
cy = float(data[12])
cz = float(data[13])
# Rotation of the object around y
ry = float(data[14])
# 3D box corners on the ground plane. Careful, the coordinate system of the car is that
# x points forward, not z! (It is rotated by 90deg with respect to the camera one)
# fbr, rbr, fbl, rbl
X = np.asmatrix([[l/2, -l/2, l/2, -l/2],
[0, 0, 0, 0 ],
[-w/2, -w/2, w/2, w/2 ],
[1, 1, 1, 1 ]])
# Rotate the 3D box around y axis and translate it to the correct position in the cam. frame
X = Rt4x4(R3x3_y(ry), t3x1(cx, cy, cz)) * X
self.gp_points.append(X[0:3,0])
self.gp_points.append(X[0:3,1])
self.gp_points.append(X[0:3,2])
self.gp_points.append(X[0:3,3])
def _ransac_plane(self):
"""
Finds "optimal" ground plane position given the points.
Returns:
[a, b, c, d] plane equation ax+by+cz+d=0 coefficients as a 1x4 np.matrix
"""
num_points = len(self.gp_points)
# Variables for storing minimum distance sum from the estimated plane
dist2_sum_min = 99999999999999999
gp_1x4_max = np.asmatrix(np.zeros((1,4)))
for i in range(RANSAC_ITERS):
rp = random.sample(range(0, num_points), 3)
# Compute the equation of the ground plane
gp_1x4 = plane_3p(self.gp_points[rp[0]], self.gp_points[rp[1]], self.gp_points[rp[2]])
# Check that the plane gives small errors on the original points - when we have some
# close to singular situation we have to be careful
if gp_1x4 * self.gp_X_4xn[:,rp[0]] > 0.000000001 or \
gp_1x4 * self.gp_X_4xn[:,rp[1]] > 0.000000001 or \
gp_1x4 * self.gp_X_4xn[:,rp[2]] > 0.000000001:
print('WARNING: Solution not precise, skipping...')
continue
# Compute the sum of distances from this plane
distances2 = np.power(gp_1x4 * self.gp_X_4xn, 2)
dist2_sum = np.sum(distances2, axis=1)
if dist2_sum[0,0] < dist2_sum_min:
print('New min distance sum: ' + str(dist2_sum[0,0]))
dist2_sum_min = dist2_sum[0,0]
gp_1x4_max = gp_1x4
print('-- RANSAC FINISHED')
print('Estimated ground plane: ' + str(gp_1x4_max))
print('Sum of distances: ' + str(dist2_sum_min) + ', ' + str(dist2_sum_min/num_points) + ' per point')
# Show a plot of the plane
show_X_and_gp(self.gp_X_4xn, gp_1x4_max)
return gp_1x4_max
####################################################################################################
# MAIN #
####################################################################################################
def parse_arguments():
"""
Parse input options of the script.
"""
parser = argparse.ArgumentParser(description='Convert KITTI label files into BBTXT.')
parser.add_argument('path_labels', metavar='path_labels', type=str,
help='Path to the "label_2" folder of the KITTI dataset')
args = parser.parse_args()
if not os.path.exists(args.path_labels):
print('Input path "%s" does not exist!'%(args.path_labels))
parser.print_help()
exit(1)
return args
def main():
args = parse_arguments()
gpe = GroundPlaneEstimator(args.path_labels)
gpe.run_estimation()
if __name__ == '__main__':
main()
|
[
"numpy.meshgrid",
"matplotlib.pyplot.show",
"argparse.ArgumentParser",
"numpy.sum",
"shared.geometry.t3x1",
"numpy.power",
"numpy.cross",
"os.path.exists",
"numpy.zeros",
"matplotlib.pyplot.figure",
"numpy.asmatrix",
"numpy.arange",
"numpy.array",
"shared.geometry.R3x3_y",
"os.path.join",
"os.listdir"
] |
[((2022, 2046), 'numpy.cross', 'np.cross', (['l1', 'l2'], {'axis': '(0)'}), '(l1, l2, axis=0)\n', (2030, 2046), True, 'import numpy as np\n'), ((2129, 2187), 'numpy.asmatrix', 'np.asmatrix', (['[normal[0, 0], normal[1, 0], normal[2, 0], d]'], {}), '([normal[0, 0], normal[1, 0], normal[2, 0], d])\n', (2140, 2187), True, 'import numpy as np\n'), ((2288, 2300), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (2298, 2300), True, 'from matplotlib import pyplot as plt\n'), ((2492, 2513), 'numpy.arange', 'np.arange', (['(-20)', '(20)', '(1)'], {}), '(-20, 20, 1)\n', (2501, 2513), True, 'import numpy as np\n'), ((2519, 2539), 'numpy.arange', 'np.arange', (['(-1)', '(10)', '(1)'], {}), '(-1, 10, 1)\n', (2528, 2539), True, 'import numpy as np\n'), ((2548, 2565), 'numpy.meshgrid', 'np.meshgrid', (['X', 'Y'], {}), '(X, Y)\n', (2559, 2565), True, 'import numpy as np\n'), ((3260, 3270), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3268, 3270), True, 'from matplotlib import pyplot as plt\n'), ((8929, 9005), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Convert KITTI label files into BBTXT."""'}), "(description='Convert KITTI label files into BBTXT.')\n", (8952, 9005), False, 'import argparse\n'), ((2382, 2411), 'numpy.array', 'np.array', (['gp_X_4xn[2, 0:1000]'], {}), '(gp_X_4xn[2, 0:1000])\n', (2390, 2411), True, 'import numpy as np\n'), ((2412, 2441), 'numpy.array', 'np.array', (['gp_X_4xn[0, 0:1000]'], {}), '(gp_X_4xn[0, 0:1000])\n', (2420, 2441), True, 'import numpy as np\n'), ((2442, 2472), 'numpy.array', 'np.array', (['(-gp_X_4xn[1, 0:1000])'], {}), '(-gp_X_4xn[1, 0:1000])\n', (2450, 2472), True, 'import numpy as np\n'), ((6539, 6648), 'numpy.asmatrix', 'np.asmatrix', (['[[l / 2, -l / 2, l / 2, -l / 2], [0, 0, 0, 0], [-w / 2, -w / 2, w / 2, w / \n 2], [1, 1, 1, 1]]'], {}), '([[l / 2, -l / 2, l / 2, -l / 2], [0, 0, 0, 0], [-w / 2, -w / 2,\n w / 2, w / 2], [1, 1, 1, 1]])\n', (6550, 6648), True, 'import numpy as np\n'), ((9178, 9210), 'os.path.exists', 'os.path.exists', (['args.path_labels'], {}), '(args.path_labels)\n', (9192, 9210), False, 'import os\n'), ((5096, 5129), 'os.path.join', 'os.path.join', (['self.path_labels', 'f'], {}), '(self.path_labels, f)\n', (5108, 5129), False, 'import os\n'), ((7341, 7357), 'numpy.zeros', 'np.zeros', (['(1, 4)'], {}), '((1, 4))\n', (7349, 7357), True, 'import numpy as np\n'), ((8020, 8055), 'numpy.power', 'np.power', (['(gp_1x4 * self.gp_X_4xn)', '(2)'], {}), '(gp_1x4 * self.gp_X_4xn, 2)\n', (8028, 8055), True, 'import numpy as np\n'), ((8071, 8097), 'numpy.sum', 'np.sum', (['distances2'], {'axis': '(1)'}), '(distances2, axis=1)\n', (8077, 8097), True, 'import numpy as np\n'), ((4792, 4820), 'os.listdir', 'os.listdir', (['self.path_labels'], {}), '(self.path_labels)\n', (4802, 4820), False, 'import os\n'), ((6808, 6818), 'shared.geometry.R3x3_y', 'R3x3_y', (['ry'], {}), '(ry)\n', (6814, 6818), False, 'from shared.geometry import R3x3_y, t3x1, Rt4x4\n'), ((6820, 6836), 'shared.geometry.t3x1', 't3x1', (['cx', 'cy', 'cz'], {}), '(cx, cy, cz)\n', (6824, 6836), False, 'from shared.geometry import R3x3_y, t3x1, Rt4x4\n'), ((4846, 4879), 'os.path.join', 'os.path.join', (['self.path_labels', 'f'], {}), '(self.path_labels, f)\n', (4858, 4879), False, 'import os\n')]
|
from setuptools import setup, find_packages
install_requires = ['cffi>=1.5.2']
setup(
name='pyVulkan',
version='0.9',
description='vulkan API bindings for Python',
author='bglgwyng',
author_email='<EMAIL>',
packages=find_packages(),
package_data={'': ['*.h']},
install_requires=install_requires,
url='https://github.com/bglgwyng/pyVulkan',
license='BSD',
keywords='Graphics,3D,Vulkan,cffi',
classifiers=[
"""License :: OSI Approved :: BSD License""",
"""Programming Language :: Python""",
"""Topic :: Multimedia :: Graphics :: 3D Rendering""",
"""Topic :: Software Development :: Libraries :: Python Modules""",
"""Intended Audience :: Developers""",
],
)
|
[
"setuptools.find_packages"
] |
[((242, 257), 'setuptools.find_packages', 'find_packages', ([], {}), '()\n', (255, 257), False, 'from setuptools import setup, find_packages\n')]
|
# showing the list of database
# Hiding user name and password
from getpass import getpass
import config
from mysql.connector import connect,Error
#Making connection with SQL
try:
with connect(
host="localhost",
user=config.username,
password =config.password,
) as connection:
show_db_query = "SHOW DATABASES"
with connection.cursor() as cursor:
cursor.execute(show_db_query)
for db in cursor:
print(db)
##
except Error as e:
print("Current Error",e)
|
[
"mysql.connector.connect"
] |
[((190, 263), 'mysql.connector.connect', 'connect', ([], {'host': '"""localhost"""', 'user': 'config.username', 'password': 'config.password'}), "(host='localhost', user=config.username, password=config.password)\n", (197, 263), False, 'from mysql.connector import connect, Error\n')]
|
import pytest
from examples.reqres import ReqRes, User, UserDetail
@pytest.fixture
def client():
return ReqRes()
def test_reg_res_client_should_be_able_to_list_users(client: ReqRes):
users = client.users()
assert users.page == 1
assert len(users.data)
def test_reg_res_client_should_be_able_to_get_second_page(client: ReqRes):
users = client.users.params(page=2)()
assert users.page == 2
def test_reg_res_client_per_page(client: ReqRes):
users = client.users.params(per_page=2)()
assert len(users.data) == 2
def test_rag_res_one_user(client: ReqRes):
user = client.user(id=3)
assert isinstance(user, UserDetail)
|
[
"examples.reqres.ReqRes"
] |
[((111, 119), 'examples.reqres.ReqRes', 'ReqRes', ([], {}), '()\n', (117, 119), False, 'from examples.reqres import ReqRes, User, UserDetail\n')]
|
import sys
sys.path.append('C:/Users/dmccloskey-sbrg/Google Drive/SBaaS_base')
from SBaaS_base.postgresql_settings import postgresql_settings
from SBaaS_base.postgresql_orm import postgresql_orm
# read in the settings file
filename = 'C:/Users/dmccloskey-sbrg/Google Drive/SBaaS_base/settings.ini';
pg_settings = postgresql_settings(filename);
# connect to the database from the settings file
pg_orm = postgresql_orm();
pg_orm.set_sessionFromSettings(pg_settings.database_settings);
session = pg_orm.get_session();
engine = pg_orm.get_engine();
# your app...
path2Lims = 'C:/Users/dmccloskey-sbrg/Google Drive/SBaaS_LIMS';
sys.path.append(path2Lims)
sys.path.append('C:/Users/dmccloskey-sbrg/Google Drive/SBaaS_quantification')
sys.path.append('C:/Users/dmccloskey-sbrg/Documents/GitHub/io_utilities')
sys.path.append('C:/Users/dmccloskey-sbrg/Documents/GitHub/calculate_utilities')
sys.path.append('C:/Users/dmccloskey-sbrg/Documents/GitHub/quantification_analysis')
## initialize the biologicalMaterial_geneReferences
#from SBaaS_LIMS.lims_biologicalMaterial_io import lims_biologicalMaterial_io
#limsbiomat = lims_biologicalMaterial_io(session,engine,pg_settings.datadir_settings);
#limsbiomat.drop_lims_biologicalMaterial();
#limsbiomat.initialize_lims_biologicalMaterial();
#limsbiomat.reset_lims_biologicalMaterial();
#limsbiomat.import_biologicalMaterialMassVolumeConversion_add(path2Lims+'/'+'data/tests/analysis_quantification/140826_biologicalMaterial_massVolumeConversion_MG1655.csv');
## initialize the sample information
#from SBaaS_LIMS.lims_sample_execute import lims_sample_execute
#limssample = lims_sample_execute(session,engine,pg_settings.datadir_settings);
#limssample.drop_lims_sample();
#limssample.initialize_lims_sample();
#limssample.reset_lims_sample();
## initialize the experiment
#from SBaaS_LIMS.lims_experiment_execute import lims_experiment_execute
#limsexperiment = lims_experiment_execute(session,engine,pg_settings.datadir_settings);
#limsexperiment.drop_lims_experimentTypes();
#limsexperiment.initialize_lims_experimentTypes();
#limsexperiment.reset_lims_experimentTypes();
#limsexperiment.drop_lims_experiment();
#limsexperiment.initialize_lims_experiment();
#limsexperiment.reset_lims_experiment('chemoCLim01');
#limsexperiment.execute_deleteExperiments(['chemoCLim01']);
#limsexperiment.execute_makeExperimentFromSampleFile('data/tests/analysis_quantification/150727_Quantification_chemoCLim01_sampleFile01.csv',1,[10.0]);
#limsexperiment.execute_makeExperimentFromCalibrationFile('data/tests/analysis_quantification/150805_Quantification_chemoCLim01_calibrationFile01.csv');
## export the analyst acquisition batch files
#limsexperiment.execute_makeBatchFile('chemoCLim01', '150805','data/tests/analysis_quantification/150727_Quantification_chemoCLim01.txt',experiment_type_I=4);
#make theresults table
from SBaaS_quantification.stage01_quantification_MQResultsTable_execute import stage01_quantification_MQResultsTable_execute
exmqrt01 = stage01_quantification_MQResultsTable_execute(session,engine,pg_settings.datadir_settings);
exmqrt01.drop_dataStage01_quantification_MQResultsTable();
exmqrt01.initialize_dataStage01_quantification_MQResultsTable();
exmqrt01.execute_deleteExperimentFromMQResultsTable('chemoCLim01',sample_types_I = ['Quality Control','Unknown','Standard','Blank'])
exmqrt01.import_dataStage01MQResultsTable_add('data/tests/analysis_quantification/150805_140521_Quantification_chemoCLim01_calibrators01.csv');
exmqrt01.import_dataStage01MQResultsTable_add('data/tests/analysis_quantification/150805_Quantification_chemoCLim01_samples02.csv');
exmqrt01.export_dataStage01MQResultsTable_metricPlot_js('chemoCLim01',component_names_I = ['fdp.fdp_1.Light'],measurement_I='calculated_concentration');
|
[
"sys.path.append",
"SBaaS_base.postgresql_orm.postgresql_orm",
"SBaaS_base.postgresql_settings.postgresql_settings",
"SBaaS_quantification.stage01_quantification_MQResultsTable_execute.stage01_quantification_MQResultsTable_execute"
] |
[((11, 78), 'sys.path.append', 'sys.path.append', (['"""C:/Users/dmccloskey-sbrg/Google Drive/SBaaS_base"""'], {}), "('C:/Users/dmccloskey-sbrg/Google Drive/SBaaS_base')\n", (26, 78), False, 'import sys\n'), ((314, 343), 'SBaaS_base.postgresql_settings.postgresql_settings', 'postgresql_settings', (['filename'], {}), '(filename)\n', (333, 343), False, 'from SBaaS_base.postgresql_settings import postgresql_settings\n'), ((404, 420), 'SBaaS_base.postgresql_orm.postgresql_orm', 'postgresql_orm', ([], {}), '()\n', (418, 420), False, 'from SBaaS_base.postgresql_orm import postgresql_orm\n'), ((626, 652), 'sys.path.append', 'sys.path.append', (['path2Lims'], {}), '(path2Lims)\n', (641, 652), False, 'import sys\n'), ((653, 730), 'sys.path.append', 'sys.path.append', (['"""C:/Users/dmccloskey-sbrg/Google Drive/SBaaS_quantification"""'], {}), "('C:/Users/dmccloskey-sbrg/Google Drive/SBaaS_quantification')\n", (668, 730), False, 'import sys\n'), ((731, 804), 'sys.path.append', 'sys.path.append', (['"""C:/Users/dmccloskey-sbrg/Documents/GitHub/io_utilities"""'], {}), "('C:/Users/dmccloskey-sbrg/Documents/GitHub/io_utilities')\n", (746, 804), False, 'import sys\n'), ((805, 890), 'sys.path.append', 'sys.path.append', (['"""C:/Users/dmccloskey-sbrg/Documents/GitHub/calculate_utilities"""'], {}), "('C:/Users/dmccloskey-sbrg/Documents/GitHub/calculate_utilities'\n )\n", (820, 890), False, 'import sys\n'), ((886, 975), 'sys.path.append', 'sys.path.append', (['"""C:/Users/dmccloskey-sbrg/Documents/GitHub/quantification_analysis"""'], {}), "(\n 'C:/Users/dmccloskey-sbrg/Documents/GitHub/quantification_analysis')\n", (901, 975), False, 'import sys\n'), ((2988, 3085), 'SBaaS_quantification.stage01_quantification_MQResultsTable_execute.stage01_quantification_MQResultsTable_execute', 'stage01_quantification_MQResultsTable_execute', (['session', 'engine', 'pg_settings.datadir_settings'], {}), '(session, engine, pg_settings.\n datadir_settings)\n', (3033, 3085), False, 'from SBaaS_quantification.stage01_quantification_MQResultsTable_execute import stage01_quantification_MQResultsTable_execute\n')]
|
from collections import namedtuple
import torch
import torch.nn as nn
import torch.nn.functional as F
class PatchOverlapEmbeddings(nn.Module):
def __init__(self, input_channels, image_sizes, stride, patch_size, embed_size):
super().__init__()
assert isinstance(
image_sizes, tuple
), f"Image size is not a tuple. Got {type(image_sizes)}"
self.patch_height_resolution = image_sizes[0] // patch_size
self.patch_width_resoultion = image_sizes[1] // patch_size
self.number_of_patches = (
self.patch_height_resolution * self.patch_width_resoultion
)
self.image_sizes = image_sizes
self.embed_size = embed_size
# assert self.number_of_patches * patch_size ** 2 == image_sizes[0] * image_sizes[1]
self.conv = nn.Conv2d(
in_channels=input_channels,
out_channels=embed_size,
kernel_size=patch_size,
stride=stride,
padding=(patch_size // 2, patch_size // 2),
)
self.norm = nn.LayerNorm(embed_size)
def forward(self, x):
# extract and embed patches
x = self.conv(x)
b, c, h_new, w_new = x.shape
x = x.reshape(b, self.embed_size, -1).permute(0, 2, 1)
# normalize
x = self.norm(x)
return x, h_new, w_new
class ReducedSelfAttention(nn.Module):
def __init__(self, num_heads, embed_size, reduction, dropout):
super().__init__()
self.num_heads = num_heads
self.embed_size = embed_size
self.reduction = reduction
assert (
self.embed_size % self.num_heads == 0
), "Embed size is not divisible by num heads"
self.head_embed_size = self.embed_size // self.num_heads
self.query = nn.Linear(self.embed_size, self.embed_size)
self.key = nn.Linear(self.embed_size, self.embed_size)
self.value = nn.Linear(self.embed_size, self.embed_size)
self.dropout = nn.Dropout(dropout)
self.reduction = reduction
if reduction > 1:
self.reductor = nn.Conv2d(
in_channels=embed_size,
out_channels=embed_size,
kernel_size=reduction,
stride=reduction,
)
self.ln = nn.LayerNorm(embed_size)
self.proj = nn.Linear(in_features=embed_size, out_features=embed_size)
def transpose_for_scores(self, x):
new_x_shape = x.size()[:-1] + (self.num_heads, self.head_embed_size)
x = x.view(*new_x_shape)
return x.permute(0, 2, 1, 3)
def forward(self, x, h, w):
batch_size, L, C = x.shape
query = self.transpose_for_scores(self.query(x)) # nhqd
kv = x
if self.reduction > 1:
kv = x.permute(0, 2, 1).reshape(batch_size, C, h, w)
kv = self.reductor(kv)
kv = kv.reshape(batch_size, C, -1).permute(0, 2, 1)
kv = self.ln(kv)
key = self.transpose_for_scores(self.key(kv)) # nhkd
value = self.transpose_for_scores(self.value(kv)) # nhvd
raw_attention = query @ key.transpose(2, 3) # nhqk
raw_attention = raw_attention / self.head_embed_size ** 0.5
attention = nn.functional.softmax(raw_attention, dim=-1)
attention = self.dropout(attention)
attention = attention @ value
attention = attention.permute(0, 2, 1, 3).contiguous()
attention = attention.view(batch_size, L, C)
attention = self.proj(attention)
attention = self.dropout(attention)
return attention
class PositionDWConv(nn.Module):
def __init__(self, hidden_dim):
super().__init__()
self.conv = nn.Conv2d(
hidden_dim, hidden_dim, kernel_size=3, padding=1, groups=hidden_dim
)
def forward(self, x, h, w):
batch, L, C = x.shape
x = x.permute(0, 2, 1).view(batch, C, h, w)
x = self.conv(x)
return x.view(batch, C, L).permute(0, 2, 1)
class MixFFN(nn.Module):
def __init__(self, embed_size, mlp_expansion, dropout):
super().__init__()
self.fc1 = nn.Linear(embed_size, embed_size * mlp_expansion)
self.fc2 = nn.Linear(embed_size * mlp_expansion, embed_size)
self.position_conv = PositionDWConv(embed_size * mlp_expansion)
self.activation = nn.GELU()
self.dropout = nn.Dropout(dropout)
def forward(self, x, h, w):
x = self.fc1(x)
x = self.activation(self.position_conv(x, h, w))
x = self.fc2(self.dropout(x))
x = self.dropout(x)
return x
class SegformerBlock(nn.Module):
def __init__(self, num_heads, embed_size, reduction, mlp_expansion, dropout):
super().__init__()
self.attention = ReducedSelfAttention(
num_heads=num_heads,
embed_size=embed_size,
reduction=reduction,
dropout=dropout,
)
self.mix_ffn = MixFFN(embed_size, mlp_expansion, dropout)
self.ln1 = nn.LayerNorm(embed_size)
self.ln2 = nn.LayerNorm(embed_size)
def forward(self, x, h, w):
x = x + self.attention(self.ln1(x), h, w)
x = x + self.mix_ffn(self.ln2(x), h, w)
return x
class SegformerLayer(nn.Module):
def __init__(
self,
num_layers,
num_heads,
embed_size,
reduction,
mlp_expansion,
dropout,
):
super().__init__()
self.layers = nn.ModuleList()
for i in range(num_layers):
layer = SegformerBlock(
num_heads=num_heads,
embed_size=embed_size,
reduction=reduction,
mlp_expansion=mlp_expansion,
dropout=dropout,
)
self.layers.add_module(f"layer_{i}", layer)
def forward(self, x, h, w):
for layer in self.layers:
x = layer(x, h, w)
return x
PatchConfig = namedtuple(
"Patch", ["input_channels", "embed_size", "patch_size", "stride", "padding"]
)
class SegformerLayersConfig:
def __init__(
self,
patch_merge_config: PatchConfig,
num_layers,
num_heads,
reduction,
expansion,
dropout,
) -> None:
self.patch_merge_config = patch_merge_config
self.num_layers = num_layers
self.num_heads = num_heads
self.reduction = reduction
self.mlp_expansion = expansion
self.dropout = dropout
class SegformerEncoder(nn.Module):
def __init__(
self,
image_sizes,
layer_configurations,
):
super().__init__()
assert isinstance(
image_sizes, tuple
), f"Image size is not a tuple. Got {type(image_sizes)}"
self.layer_configuration = layer_configurations
self.image_sizes = image_sizes
self.embed_sizes = []
self.segformer_layers = nn.ModuleList()
self.patch_merges = nn.ModuleList()
self.stage_norms = nn.ModuleList()
for indx, layer_configuration in enumerate(layer_configurations, 1):
patch_config = layer_configuration.patch_merge_config
H_reducted, W_reducted = (
self.image_sizes[0] // 2 ** indx,
self.image_sizes[1] // 2 ** indx,
)
embed_size = patch_config.embed_size
patch_merger = PatchOverlapEmbeddings(
input_channels=patch_config.input_channels,
image_sizes=(H_reducted, W_reducted),
stride=patch_config.stride,
patch_size=patch_config.patch_size,
embed_size=embed_size,
)
self.patch_merges.append(patch_merger)
segformer_layer = SegformerLayer(
num_layers=layer_configuration.num_layers,
num_heads=layer_configuration.num_heads,
embed_size=embed_size,
reduction=layer_configuration.reduction,
mlp_expansion=layer_configuration.mlp_expansion,
dropout=layer_configuration.dropout,
)
self.segformer_layers.append(segformer_layer)
self.stage_norms.append(nn.LayerNorm(embed_size))
self.embed_sizes.append(embed_size)
def forward(self, x):
hidden_states = []
b, c, h, w = x.shape
for layer_num in range(len(self.segformer_layers)):
patch_merger = self.patch_merges[layer_num]
x, cur_h, cur_w = patch_merger(x)
x = self.segformer_layers[layer_num](x, cur_h, cur_w) # N, L, C
x = self.stage_norms[layer_num](x)
x = x.permute(0, 2, 1).reshape(b, -1, cur_h, cur_w)
hidden_states.append(x)
return hidden_states
class MLPDecoder(nn.Module):
def __init__(self, in_features, out_features):
super().__init__()
self.ff = nn.Linear(in_features, out_features)
def forward(self, x):
batch, c, h, w = x.shape
x = x.flatten(2).permute(0, 2, 1)
x = self.ff(x)
x = (
x.permute(0, 2, 1).contiguous().view(batch, -1, h, w)
) # batch, out_features, h, w
return x
class SegformerDecoder(nn.Module):
def __init__(self, embed_sizes, decoder_hidden_state, num_labels, dropout):
super().__init__()
self.mlps = nn.ModuleList()
for layer_embed_dim in embed_sizes:
self.mlps.append(MLPDecoder(layer_embed_dim, decoder_hidden_state))
self.linear_fuse = nn.Conv2d(
in_channels=decoder_hidden_state * len(embed_sizes),
out_channels=decoder_hidden_state,
kernel_size=1,
bias=True,
)
self.batch_norm = nn.BatchNorm2d(decoder_hidden_state)
self.activation = nn.ReLU()
self.dropout = nn.Dropout(dropout)
self.classifier = nn.Conv2d(decoder_hidden_state, num_labels, kernel_size=1)
def forward(self, hidden_states):
batch_size, c, h_4, w_4 = hidden_states[0].shape
unified_features = []
for indx, hidden_state in enumerate(hidden_states):
unified_state = self.mlps[indx](hidden_state)
unified_state = F.interpolate(
unified_state, size=(h_4, w_4), mode="bilinear", align_corners=False
)
unified_features.append(unified_state)
fused_features = torch.cat(unified_features, dim=1)
fused_features = self.batch_norm(self.linear_fuse(fused_features))
fused_features = self.dropout(self.activation(fused_features))
fused_features = self.classifier(fused_features)
return fused_features
class Segformer(nn.Module):
def __init__(self, encoder, decoder):
super().__init__()
self.encoder = encoder
self.decoder = decoder
def forward(self, x):
hidden_states = self.encoder(x)
output = self.decoder(hidden_states)
return output
def segformer_b0(image_sizes, num_labels):
encoder = mitb0(image_sizes)
decoder = SegformerDecoder(encoder.embed_sizes, 256, num_labels, 0.0)
return Segformer(encoder, decoder)
def segformer_b1(image_sizes, num_labels):
encoder = mitb1(image_sizes)
decoder = SegformerDecoder(encoder.embed_sizes, 256, num_labels, 0.0)
return Segformer(encoder, decoder)
def segformer_b2(image_sizes, num_labels):
encoder = mitb2(image_sizes)
decoder = SegformerDecoder(encoder.embed_sizes, 768, num_labels, 0.0)
return Segformer(encoder, decoder)
def mitb0(image_sizes):
layer_configurations = [
SegformerLayersConfig(PatchConfig(3, 32, 7, 4, 3), 2, 1, 8, 8, 0.0),
SegformerLayersConfig(PatchConfig(32, 64, 3, 2, 1), 2, 2, 4, 8, 0.0),
SegformerLayersConfig(PatchConfig(64, 160, 3, 2, 1), 2, 5, 2, 4, 0.0),
SegformerLayersConfig(PatchConfig(160, 256, 3, 2, 1), 2, 8, 1, 4, 0.0),
]
return SegformerEncoder(image_sizes, layer_configurations)
def mitb1(image_sizes):
layer_configurations = [
SegformerLayersConfig(
PatchConfig(3, 64, 7, 4, 3),
num_layers=2,
num_heads=1,
reduction=8,
expansion=8,
dropout=0.0,
),
SegformerLayersConfig(
PatchConfig(64, 128, 3, 2, 1),
num_layers=2,
num_heads=2,
reduction=4,
expansion=8,
dropout=0.0,
),
SegformerLayersConfig(
PatchConfig(128, 320, 3, 2, 1),
num_layers=2,
num_heads=5,
reduction=2,
expansion=4,
dropout=0.0,
),
SegformerLayersConfig(
PatchConfig(320, 512, 3, 2, 1),
num_layers=2,
num_heads=8,
reduction=1,
expansion=4,
dropout=0.0,
),
]
return SegformerEncoder(image_sizes, layer_configurations)
def mitb2(image_sizes):
layer_configurations = [
SegformerLayersConfig(
PatchConfig(3, 64, 7, 4, 3),
num_layers=3,
num_heads=1,
reduction=8,
expansion=8,
dropout=0.0,
),
SegformerLayersConfig(
PatchConfig(64, 128, 3, 2, 1),
num_layers=3,
num_heads=2,
reduction=4,
expansion=8,
dropout=0.0,
),
SegformerLayersConfig(
PatchConfig(128, 320, 3, 2, 1),
num_layers=6,
num_heads=5,
reduction=2,
expansion=4,
dropout=0.0,
),
SegformerLayersConfig(
PatchConfig(320, 512, 3, 2, 1),
num_layers=3,
num_heads=8,
reduction=1,
expansion=4,
dropout=0.0,
),
]
return SegformerEncoder(image_sizes, layer_configurations)
|
[
"torch.nn.Dropout",
"torch.nn.ReLU",
"torch.nn.ModuleList",
"torch.nn.Conv2d",
"torch.cat",
"torch.nn.functional.softmax",
"torch.nn.LayerNorm",
"torch.nn.GELU",
"torch.nn.BatchNorm2d",
"collections.namedtuple",
"torch.nn.Linear",
"torch.nn.functional.interpolate"
] |
[((5968, 6060), 'collections.namedtuple', 'namedtuple', (['"""Patch"""', "['input_channels', 'embed_size', 'patch_size', 'stride', 'padding']"], {}), "('Patch', ['input_channels', 'embed_size', 'patch_size', 'stride',\n 'padding'])\n", (5978, 6060), False, 'from collections import namedtuple\n'), ((822, 972), 'torch.nn.Conv2d', 'nn.Conv2d', ([], {'in_channels': 'input_channels', 'out_channels': 'embed_size', 'kernel_size': 'patch_size', 'stride': 'stride', 'padding': '(patch_size // 2, patch_size // 2)'}), '(in_channels=input_channels, out_channels=embed_size, kernel_size=\n patch_size, stride=stride, padding=(patch_size // 2, patch_size // 2))\n', (831, 972), True, 'import torch.nn as nn\n'), ((1059, 1083), 'torch.nn.LayerNorm', 'nn.LayerNorm', (['embed_size'], {}), '(embed_size)\n', (1071, 1083), True, 'import torch.nn as nn\n'), ((1798, 1841), 'torch.nn.Linear', 'nn.Linear', (['self.embed_size', 'self.embed_size'], {}), '(self.embed_size, self.embed_size)\n', (1807, 1841), True, 'import torch.nn as nn\n'), ((1861, 1904), 'torch.nn.Linear', 'nn.Linear', (['self.embed_size', 'self.embed_size'], {}), '(self.embed_size, self.embed_size)\n', (1870, 1904), True, 'import torch.nn as nn\n'), ((1926, 1969), 'torch.nn.Linear', 'nn.Linear', (['self.embed_size', 'self.embed_size'], {}), '(self.embed_size, self.embed_size)\n', (1935, 1969), True, 'import torch.nn as nn\n'), ((1994, 2013), 'torch.nn.Dropout', 'nn.Dropout', (['dropout'], {}), '(dropout)\n', (2004, 2013), True, 'import torch.nn as nn\n'), ((2351, 2409), 'torch.nn.Linear', 'nn.Linear', ([], {'in_features': 'embed_size', 'out_features': 'embed_size'}), '(in_features=embed_size, out_features=embed_size)\n', (2360, 2409), True, 'import torch.nn as nn\n'), ((3247, 3291), 'torch.nn.functional.softmax', 'nn.functional.softmax', (['raw_attention'], {'dim': '(-1)'}), '(raw_attention, dim=-1)\n', (3268, 3291), True, 'import torch.nn as nn\n'), ((3721, 3799), 'torch.nn.Conv2d', 'nn.Conv2d', (['hidden_dim', 'hidden_dim'], {'kernel_size': '(3)', 'padding': '(1)', 'groups': 'hidden_dim'}), '(hidden_dim, hidden_dim, kernel_size=3, padding=1, groups=hidden_dim)\n', (3730, 3799), True, 'import torch.nn as nn\n'), ((4147, 4196), 'torch.nn.Linear', 'nn.Linear', (['embed_size', '(embed_size * mlp_expansion)'], {}), '(embed_size, embed_size * mlp_expansion)\n', (4156, 4196), True, 'import torch.nn as nn\n'), ((4216, 4265), 'torch.nn.Linear', 'nn.Linear', (['(embed_size * mlp_expansion)', 'embed_size'], {}), '(embed_size * mlp_expansion, embed_size)\n', (4225, 4265), True, 'import torch.nn as nn\n'), ((4364, 4373), 'torch.nn.GELU', 'nn.GELU', ([], {}), '()\n', (4371, 4373), True, 'import torch.nn as nn\n'), ((4397, 4416), 'torch.nn.Dropout', 'nn.Dropout', (['dropout'], {}), '(dropout)\n', (4407, 4416), True, 'import torch.nn as nn\n'), ((5030, 5054), 'torch.nn.LayerNorm', 'nn.LayerNorm', (['embed_size'], {}), '(embed_size)\n', (5042, 5054), True, 'import torch.nn as nn\n'), ((5074, 5098), 'torch.nn.LayerNorm', 'nn.LayerNorm', (['embed_size'], {}), '(embed_size)\n', (5086, 5098), True, 'import torch.nn as nn\n'), ((5488, 5503), 'torch.nn.ModuleList', 'nn.ModuleList', ([], {}), '()\n', (5501, 5503), True, 'import torch.nn as nn\n'), ((6942, 6957), 'torch.nn.ModuleList', 'nn.ModuleList', ([], {}), '()\n', (6955, 6957), True, 'import torch.nn as nn\n'), ((6986, 7001), 'torch.nn.ModuleList', 'nn.ModuleList', ([], {}), '()\n', (6999, 7001), True, 'import torch.nn as nn\n'), ((7029, 7044), 'torch.nn.ModuleList', 'nn.ModuleList', ([], {}), '()\n', (7042, 7044), True, 'import torch.nn as nn\n'), ((8942, 8978), 'torch.nn.Linear', 'nn.Linear', (['in_features', 'out_features'], {}), '(in_features, out_features)\n', (8951, 8978), True, 'import torch.nn as nn\n'), ((9404, 9419), 'torch.nn.ModuleList', 'nn.ModuleList', ([], {}), '()\n', (9417, 9419), True, 'import torch.nn as nn\n'), ((9781, 9817), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['decoder_hidden_state'], {}), '(decoder_hidden_state)\n', (9795, 9817), True, 'import torch.nn as nn\n'), ((9844, 9853), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (9851, 9853), True, 'import torch.nn as nn\n'), ((9877, 9896), 'torch.nn.Dropout', 'nn.Dropout', (['dropout'], {}), '(dropout)\n', (9887, 9896), True, 'import torch.nn as nn\n'), ((9923, 9981), 'torch.nn.Conv2d', 'nn.Conv2d', (['decoder_hidden_state', 'num_labels'], {'kernel_size': '(1)'}), '(decoder_hidden_state, num_labels, kernel_size=1)\n', (9932, 9981), True, 'import torch.nn as nn\n'), ((10446, 10480), 'torch.cat', 'torch.cat', (['unified_features'], {'dim': '(1)'}), '(unified_features, dim=1)\n', (10455, 10480), False, 'import torch\n'), ((2104, 2208), 'torch.nn.Conv2d', 'nn.Conv2d', ([], {'in_channels': 'embed_size', 'out_channels': 'embed_size', 'kernel_size': 'reduction', 'stride': 'reduction'}), '(in_channels=embed_size, out_channels=embed_size, kernel_size=\n reduction, stride=reduction)\n', (2113, 2208), True, 'import torch.nn as nn\n'), ((2305, 2329), 'torch.nn.LayerNorm', 'nn.LayerNorm', (['embed_size'], {}), '(embed_size)\n', (2317, 2329), True, 'import torch.nn as nn\n'), ((10255, 10342), 'torch.nn.functional.interpolate', 'F.interpolate', (['unified_state'], {'size': '(h_4, w_4)', 'mode': '"""bilinear"""', 'align_corners': '(False)'}), "(unified_state, size=(h_4, w_4), mode='bilinear',\n align_corners=False)\n", (10268, 10342), True, 'import torch.nn.functional as F\n'), ((8241, 8265), 'torch.nn.LayerNorm', 'nn.LayerNorm', (['embed_size'], {}), '(embed_size)\n', (8253, 8265), True, 'import torch.nn as nn\n')]
|
import torch
import torch.nn as nn
from deepymod_torch.network import Fitting, Library
class DeepMod(nn.Module):
''' Class based interface for deepmod.'''
def __init__(self, n_in, hidden_dims, n_out, library_function, library_args):
super().__init__()
self.network = self.build_network(n_in, hidden_dims, n_out)
self.library = Library(library_function, library_args)
self.fit = self.build_fit_layer(n_in, n_out, library_function, library_args)
def forward(self, input):
prediction = self.network(input)
time_deriv, theta = self.library((prediction, input))
sparse_theta, coeff_vector = self.fit(theta)
return prediction, time_deriv, sparse_theta, coeff_vector
def build_network(self, n_in, hidden_dims, n_out):
# NN
network = []
hs = [n_in] + hidden_dims + [n_out]
for h0, h1 in zip(hs, hs[1:]): # Hidden layers
network.append(nn.Linear(h0, h1))
network.append(nn.Tanh())
network.pop() # get rid of last activation function
network = nn.Sequential(*network)
return network
def build_fit_layer(self, n_in, n_out, library_function, library_args):
sample_input = torch.ones((1, n_in), dtype=torch.float32, requires_grad=True)
n_terms = self.library((self.network(sample_input), sample_input))[1].shape[1] # do sample pass to infer shapes
fit_layer = Fitting(n_terms, n_out)
return fit_layer
# Function below make life easier
def network_parameters(self):
return self.network.parameters()
def coeff_vector(self):
return self.fit.coeff_vector.parameters()
|
[
"torch.ones",
"torch.nn.Sequential",
"torch.nn.Tanh",
"deepymod_torch.network.Fitting",
"deepymod_torch.network.Library",
"torch.nn.Linear"
] |
[((361, 400), 'deepymod_torch.network.Library', 'Library', (['library_function', 'library_args'], {}), '(library_function, library_args)\n', (368, 400), False, 'from deepymod_torch.network import Fitting, Library\n'), ((1092, 1115), 'torch.nn.Sequential', 'nn.Sequential', (['*network'], {}), '(*network)\n', (1105, 1115), True, 'import torch.nn as nn\n'), ((1241, 1303), 'torch.ones', 'torch.ones', (['(1, n_in)'], {'dtype': 'torch.float32', 'requires_grad': '(True)'}), '((1, n_in), dtype=torch.float32, requires_grad=True)\n', (1251, 1303), False, 'import torch\n'), ((1444, 1467), 'deepymod_torch.network.Fitting', 'Fitting', (['n_terms', 'n_out'], {}), '(n_terms, n_out)\n', (1451, 1467), False, 'from deepymod_torch.network import Fitting, Library\n'), ((956, 973), 'torch.nn.Linear', 'nn.Linear', (['h0', 'h1'], {}), '(h0, h1)\n', (965, 973), True, 'import torch.nn as nn\n'), ((1002, 1011), 'torch.nn.Tanh', 'nn.Tanh', ([], {}), '()\n', (1009, 1011), True, 'import torch.nn as nn\n')]
|
from django.urls import path
from . import views
urlpatterns = [
path('', views.index, name='index'),
path('tasks/', views.get_tasks, name='get_tasks'),
path('tasks/add/', views.add_task, name='add_task'),
path('about/', views.about, name='about'),
path('tasks/<int:id>/edit', views.edit_task, name='edit_task'),
path('tasks/<int:id>/delete', views.delete_task, name='delete_task')
]
|
[
"django.urls.path"
] |
[((71, 106), 'django.urls.path', 'path', (['""""""', 'views.index'], {'name': '"""index"""'}), "('', views.index, name='index')\n", (75, 106), False, 'from django.urls import path\n'), ((112, 161), 'django.urls.path', 'path', (['"""tasks/"""', 'views.get_tasks'], {'name': '"""get_tasks"""'}), "('tasks/', views.get_tasks, name='get_tasks')\n", (116, 161), False, 'from django.urls import path\n'), ((167, 218), 'django.urls.path', 'path', (['"""tasks/add/"""', 'views.add_task'], {'name': '"""add_task"""'}), "('tasks/add/', views.add_task, name='add_task')\n", (171, 218), False, 'from django.urls import path\n'), ((224, 265), 'django.urls.path', 'path', (['"""about/"""', 'views.about'], {'name': '"""about"""'}), "('about/', views.about, name='about')\n", (228, 265), False, 'from django.urls import path\n'), ((271, 333), 'django.urls.path', 'path', (['"""tasks/<int:id>/edit"""', 'views.edit_task'], {'name': '"""edit_task"""'}), "('tasks/<int:id>/edit', views.edit_task, name='edit_task')\n", (275, 333), False, 'from django.urls import path\n'), ((339, 407), 'django.urls.path', 'path', (['"""tasks/<int:id>/delete"""', 'views.delete_task'], {'name': '"""delete_task"""'}), "('tasks/<int:id>/delete', views.delete_task, name='delete_task')\n", (343, 407), False, 'from django.urls import path\n')]
|
from win32 import win32api, win32gui
def MoveCam(End_coords):
#focus on roblox
Window_title = "File Explorer"
win32gui.SetForegroundWindow(win32gui.FindWindow(None, Window_title))
#move camera
end_x = End_coords[0]+50
end_y = End_coords[1]+50
win32api.SetCursorPos((end_x, end_y))
print(f"Reached new location ({end_x, end_y})")
|
[
"win32.win32gui.FindWindow",
"win32.win32api.SetCursorPos"
] |
[((278, 315), 'win32.win32api.SetCursorPos', 'win32api.SetCursorPos', (['(end_x, end_y)'], {}), '((end_x, end_y))\n', (299, 315), False, 'from win32 import win32api, win32gui\n'), ((152, 191), 'win32.win32gui.FindWindow', 'win32gui.FindWindow', (['None', 'Window_title'], {}), '(None, Window_title)\n', (171, 191), False, 'from win32 import win32api, win32gui\n')]
|
from typing import Tuple, List
import keyboard
import pyautogui
# Enter your mouse locations and corresponding shortcuts to this list
# ((x, y), shortcut)
ls: List[Tuple[Tuple[int, int], str]] = [
((-500, 500), 'ctrl+left windows+alt+page up'),
((960, 540), 'ctrl+left windows+alt+page down'),
((1500, 500), 'ctrl+Shift+left windows+alt+page up')
]
def set_mouse_pos(pos: Tuple) -> None:
pyautogui.dragTo(pos[0], pos[1])
for (x, y), h_name in ls:
keyboard.add_hotkey(h_name, pyautogui.dragTo, args=(x, y))
keyboard.wait()
|
[
"pyautogui.dragTo",
"keyboard.wait",
"keyboard.add_hotkey"
] |
[((533, 548), 'keyboard.wait', 'keyboard.wait', ([], {}), '()\n', (546, 548), False, 'import keyboard\n'), ((408, 440), 'pyautogui.dragTo', 'pyautogui.dragTo', (['pos[0]', 'pos[1]'], {}), '(pos[0], pos[1])\n', (424, 440), False, 'import pyautogui\n'), ((473, 531), 'keyboard.add_hotkey', 'keyboard.add_hotkey', (['h_name', 'pyautogui.dragTo'], {'args': '(x, y)'}), '(h_name, pyautogui.dragTo, args=(x, y))\n', (492, 531), False, 'import keyboard\n')]
|
'''
Utilities used in front-end rendering
'''
import os
import urllib
import time
import logging
from operator import itemgetter
from google.appengine.api import memcache
from google.appengine.ext.webapp import template
# My modules
from macro.render.defs import *
from macro.exceptions import NoInputError
from macro.data.appengine.defs import MEMCACHED_VIEWS, MACRO_PROC_KEY, MACRO_VIEW_KEY, MEMCACHED_THROTTLE_SECONDS,MEMCACHED_MACRO_PROC
from macro.render.interpretation import TOKEN_CS_ON, TOKEN_CS_OFF
from macro.data.appengine.savedmacro import SavedMacroOps
from macro.render.interpretation import generate_cmd_html, generate_interpret_html
html_escape_table = {
"&": "&",
'"': """,
"'": "'",
">": ">",
"<": "<",
}
def escape(text):
"""Produce entities within text."""
return "".join(html_escape_table.get(c,c) for c in text)
def unescape(s):
s = s.replace("<", "<")
s = s.replace(">", ">")
# this has to be last:
s = s.replace("&", "&")
return s
# Generate form error text
def get_form_error(field):
return FORM_SAVE_REQ_ERROR[field]
# Internal helper function translating field maps into template arrays.
# The parameter is the set of previously selected classes.
def translate_classmap(sel=[],in_list=CLASS_LIST):
'''
Helper to translate classes into a map for a template.
'''
return [{'i': n.replace(" ", "_"), 'v': '1', 'src': '_checked'} \
if n in sel \
else {'i': n.replace(" ", "_"), 'v': '0', 'src': ''} \
for n in in_list]
# Use memcached to throttle actions. Check to see if this user has
# done the requested action recently. If we get no IP, then they
# get tossed in the same bucket as everyone else with no IP.
def throttle_action(action, ip, limit=MEMCACHED_THROTTLE_SECONDS):
''' Throttles specified action. Returns time left till
user can do that action, 0 = its ok. Uses IP as id.'''
key = "%s%s" % (action, str(ip))
curr_secs = int(time.time())
user_secs = memcache.get(key)
if user_secs is not None:
return (MEMCACHED_THROTTLE_SECONDS - (curr_secs - user_secs))
else:
memcache.add(key, int(time.time()), MEMCACHED_THROTTLE_SECONDS)
return 0
# Helper function to simplify template rendering.
def render_template(template_obj, template_vars={}, path=''):
'''
Helper function to render a template in the template directory
with its variable substitutions. Returns template HTML.
'''
# Populate the search variables in the base templace
template_vars['q_in'] = GET_SEARCH_QUERY
template_vars['s_in'] = GET_SEARCH_SORT
template_vars['search'] = URL_SEARCH
template_vars['max_tag_length'] = SINGLE_TAG_MAX_LENGTH
template_vars['search_text'] = FORM_SEARCH_INPUT_HELP
# Populate the token highlighting CSS classes, needed in most pages.
template_vars['tok_on'] = TOKEN_CS_ON
template_vars['tok_off'] = TOKEN_CS_OFF
# Static resources
template_vars['static'] = STATIC_URL
# Fetch the template path the first time this is called.
if type(template_obj) is str and path:
return template.render(os.path.join(path,
template_obj),
template_vars)
# Pre-loaded template
return template_obj.render(template_vars)
# Get a processed macro from the macro id.
def get_macro_obj_from_id(macro_id, macro=None):
'''Get a processed macro from the macro id. Raises an exception
on error. Assumes valid input. Incrments the view counter
for this macro.'''
# First check memcache for a processed macro object:
macro_obj = memcache.get(MACRO_PROC_KEY % macro_id)
if not macro_obj:
# Nothing from memcache, load from data store.
if not macro:
saved_macro_entity = SavedMacroOps.get_macro_entity(macro_id)
# If saved_macro_entity is still none, we failed
# in the datastore.
if saved_macro_entity is None:
raise NoInputError("Macro id '%s' not found." % macro_id)
macro = saved_macro_entity.macro
# Process the macro, lazily importing.
from macro.interpret.interpreter import MacroInterpreter
macro_obj = MacroInterpreter().interpret_macro(macro)
# Save macro in memcached.
memcache.add(MACRO_PROC_KEY % macro_id,
macro_obj,
MEMCACHED_MACRO_PROC)
return macro_obj
# Render a processed macro's interpretation into a template.
def render_macro(macro_obj, path, errors=True, tt=False, template='processed_macro.template'):
''' Helper to render macro interpretation into a template. '''
macro_output_list = []
# Render the macro lines
for cmd in macro_obj:
macro_output_list.append({'line': generate_cmd_html(cmd.cmd_list, tt=tt, show_err=errors),
'interpret': generate_interpret_html(cmd.interpret, tt=tt, show_err=errors, cmd_error=cmd.error)})
# Return rendered output
return render_template(template,
{'macro' : macro_output_list},
path)
# Fetch a saved macro's populated view template hash
def get_view_dict_from_macro_id(macro_id, saved_macro=None):
# View pages are extremely heavy, and the majority of their
# data doesn't change. First attempt to get the static data
# from memcached.
key = MACRO_VIEW_KEY % macro_id
macro_form_template_values = memcache.get(key)
# If we missed in memcached, load from the data store.
if not macro_form_template_values:
# Ensure we have a saved macro. Will throw
# exception on fail.
if not saved_macro:
saved_macro = SavedMacroOps(macro_id)
# Start populating template.
macro_form_template_values = {
'macro_input' : saved_macro.entity.macro,
'title' : saved_macro.entity.title,
'author_name' : saved_macro.entity.name,
'author_server' : saved_macro.entity.server,
'notes' : saved_macro.entity.notes,
'version' : saved_macro.entity.version,
'curr_version' : "%s.%s.%s" % (MAJOR_VERSION,
MINOR_VERSION,
PATCH_VERSION),
'class_list' : translate_classmap(in_list=saved_macro.entity.classes),
'classes' : ", ".join(saved_macro.entity.classes),
'tags' : ", ".join(saved_macro.entity.tags),
'macro_id' : macro_id,
}
# Save in memcached
memcache.add(key, macro_form_template_values, MEMCACHED_VIEWS)
return macro_form_template_values
|
[
"macro.interpret.interpreter.MacroInterpreter",
"macro.render.interpretation.generate_interpret_html",
"macro.data.appengine.savedmacro.SavedMacroOps",
"macro.exceptions.NoInputError",
"google.appengine.api.memcache.add",
"time.time",
"macro.render.interpretation.generate_cmd_html",
"macro.data.appengine.savedmacro.SavedMacroOps.get_macro_entity",
"os.path.join",
"google.appengine.api.memcache.get"
] |
[((2196, 2213), 'google.appengine.api.memcache.get', 'memcache.get', (['key'], {}), '(key)\n', (2208, 2213), False, 'from google.appengine.api import memcache\n'), ((3915, 3954), 'google.appengine.api.memcache.get', 'memcache.get', (['(MACRO_PROC_KEY % macro_id)'], {}), '(MACRO_PROC_KEY % macro_id)\n', (3927, 3954), False, 'from google.appengine.api import memcache\n'), ((5797, 5814), 'google.appengine.api.memcache.get', 'memcache.get', (['key'], {}), '(key)\n', (5809, 5814), False, 'from google.appengine.api import memcache\n'), ((2167, 2178), 'time.time', 'time.time', ([], {}), '()\n', (2176, 2178), False, 'import time\n'), ((4602, 4674), 'google.appengine.api.memcache.add', 'memcache.add', (['(MACRO_PROC_KEY % macro_id)', 'macro_obj', 'MEMCACHED_MACRO_PROC'], {}), '(MACRO_PROC_KEY % macro_id, macro_obj, MEMCACHED_MACRO_PROC)\n', (4614, 4674), False, 'from google.appengine.api import memcache\n'), ((7004, 7066), 'google.appengine.api.memcache.add', 'memcache.add', (['key', 'macro_form_template_values', 'MEMCACHED_VIEWS'], {}), '(key, macro_form_template_values, MEMCACHED_VIEWS)\n', (7016, 7066), False, 'from google.appengine.api import memcache\n'), ((3395, 3427), 'os.path.join', 'os.path.join', (['path', 'template_obj'], {}), '(path, template_obj)\n', (3407, 3427), False, 'import os\n'), ((4087, 4127), 'macro.data.appengine.savedmacro.SavedMacroOps.get_macro_entity', 'SavedMacroOps.get_macro_entity', (['macro_id'], {}), '(macro_id)\n', (4117, 4127), False, 'from macro.data.appengine.savedmacro import SavedMacroOps\n'), ((6049, 6072), 'macro.data.appengine.savedmacro.SavedMacroOps', 'SavedMacroOps', (['macro_id'], {}), '(macro_id)\n', (6062, 6072), False, 'from macro.data.appengine.savedmacro import SavedMacroOps\n'), ((2352, 2363), 'time.time', 'time.time', ([], {}), '()\n', (2361, 2363), False, 'import time\n'), ((4286, 4337), 'macro.exceptions.NoInputError', 'NoInputError', (['("Macro id \'%s\' not found." % macro_id)'], {}), '("Macro id \'%s\' not found." % macro_id)\n', (4298, 4337), False, 'from macro.exceptions import NoInputError\n'), ((4516, 4534), 'macro.interpret.interpreter.MacroInterpreter', 'MacroInterpreter', ([], {}), '()\n', (4532, 4534), False, 'from macro.interpret.interpreter import MacroInterpreter\n'), ((5093, 5148), 'macro.render.interpretation.generate_cmd_html', 'generate_cmd_html', (['cmd.cmd_list'], {'tt': 'tt', 'show_err': 'errors'}), '(cmd.cmd_list, tt=tt, show_err=errors)\n', (5110, 5148), False, 'from macro.render.interpretation import generate_cmd_html, generate_interpret_html\n'), ((5197, 5285), 'macro.render.interpretation.generate_interpret_html', 'generate_interpret_html', (['cmd.interpret'], {'tt': 'tt', 'show_err': 'errors', 'cmd_error': 'cmd.error'}), '(cmd.interpret, tt=tt, show_err=errors, cmd_error=\n cmd.error)\n', (5220, 5285), False, 'from macro.render.interpretation import generate_cmd_html, generate_interpret_html\n')]
|
from django.test import TestCase
from machina.apps.forum_permission.shortcuts import assign_perm
from machina.core.db.models import get_model
from ashley.factories import ForumFactory, LTIContextFactory, UserFactory
Forum = get_model("forum", "Forum") # pylint: disable=C0103
class ForumRenameTestCase(TestCase):
"""Test the rename admin feature of a forum"""
def setUp(self):
super().setUp()
def test_basic_user(self):
"""
A user without the `can_rename_forum` permission
should not be able to rename it.
"""
user = UserFactory()
lti_context = LTIContextFactory(lti_consumer=user.lti_consumer)
forum = ForumFactory(name="Initial forum name")
forum.lti_contexts.add(lti_context)
self.client.force_login(user, "ashley.auth.backend.LTIBackend")
response = self.client.get(f"/forum/admin/rename/{forum.pk}/")
self.assertEqual(403, response.status_code)
update_response = self.client.post(
f"/forum/admin/rename/{forum.pk}/", data={"name": "Modified forum name"}
)
self.assertEqual(403, update_response.status_code)
self.assertEqual("Initial forum name", Forum.objects.get(pk=forum.pk).name)
def test_with_can_rename_forum_permission(self):
"""
A user with the `can_rename_forum` permission should be able
to rename it.
"""
user = UserFactory()
lti_context = LTIContextFactory(lti_consumer=user.lti_consumer)
forum = ForumFactory(name="Initial forum name")
forum.lti_contexts.add(lti_context)
assign_perm("can_rename_forum", user, forum, True)
self.client.force_login(user, "ashley.auth.backend.LTIBackend")
response = self.client.get(f"/forum/admin/rename/{forum.pk}/")
self.assertEqual(200, response.status_code)
self.assertContains(response, "Rename the forum")
update_response = self.client.post(
f"/forum/admin/rename/{forum.pk}/", data={"name": "Modified forum name"}
)
self.assertEqual(302, update_response.status_code)
self.assertEqual("Modified forum name", Forum.objects.get(pk=forum.pk).name)
|
[
"machina.core.db.models.get_model",
"machina.apps.forum_permission.shortcuts.assign_perm",
"ashley.factories.LTIContextFactory",
"ashley.factories.UserFactory",
"ashley.factories.ForumFactory"
] |
[((226, 253), 'machina.core.db.models.get_model', 'get_model', (['"""forum"""', '"""Forum"""'], {}), "('forum', 'Forum')\n", (235, 253), False, 'from machina.core.db.models import get_model\n'), ((584, 597), 'ashley.factories.UserFactory', 'UserFactory', ([], {}), '()\n', (595, 597), False, 'from ashley.factories import ForumFactory, LTIContextFactory, UserFactory\n'), ((620, 669), 'ashley.factories.LTIContextFactory', 'LTIContextFactory', ([], {'lti_consumer': 'user.lti_consumer'}), '(lti_consumer=user.lti_consumer)\n', (637, 669), False, 'from ashley.factories import ForumFactory, LTIContextFactory, UserFactory\n'), ((686, 725), 'ashley.factories.ForumFactory', 'ForumFactory', ([], {'name': '"""Initial forum name"""'}), "(name='Initial forum name')\n", (698, 725), False, 'from ashley.factories import ForumFactory, LTIContextFactory, UserFactory\n'), ((1435, 1448), 'ashley.factories.UserFactory', 'UserFactory', ([], {}), '()\n', (1446, 1448), False, 'from ashley.factories import ForumFactory, LTIContextFactory, UserFactory\n'), ((1471, 1520), 'ashley.factories.LTIContextFactory', 'LTIContextFactory', ([], {'lti_consumer': 'user.lti_consumer'}), '(lti_consumer=user.lti_consumer)\n', (1488, 1520), False, 'from ashley.factories import ForumFactory, LTIContextFactory, UserFactory\n'), ((1537, 1576), 'ashley.factories.ForumFactory', 'ForumFactory', ([], {'name': '"""Initial forum name"""'}), "(name='Initial forum name')\n", (1549, 1576), False, 'from ashley.factories import ForumFactory, LTIContextFactory, UserFactory\n'), ((1630, 1680), 'machina.apps.forum_permission.shortcuts.assign_perm', 'assign_perm', (['"""can_rename_forum"""', 'user', 'forum', '(True)'], {}), "('can_rename_forum', user, forum, True)\n", (1641, 1680), False, 'from machina.apps.forum_permission.shortcuts import assign_perm\n')]
|
from sympy import Eq, Matrix, cancel, expand, fraction, gcd_list, lcm_list, poly, solve, symbols
def sphere(P1, P2, P3, P4):
# return F(x, y, z) such that F(x, y, z) = 0 is the sphere's equation
g, h, j, k, x, y, z = symbols('g, h, j, k, x, y, z')
sphere_eq = Eq(x**2 + y**2 + z**2 + g*x + h*y + j*z + k, 0)
sphere_eqs = []
sphere_eqs.append(sphere_eq.subs(x, P1[0]).subs(y, P1[1]).subs(z, P1[2]))
sphere_eqs.append(sphere_eq.subs(x, P2[0]).subs(y, P2[1]).subs(z, P2[2]))
sphere_eqs.append(sphere_eq.subs(x, P3[0]).subs(y, P3[1]).subs(z, P3[2]))
sphere_eqs.append(sphere_eq.subs(x, P4[0]).subs(y, P4[1]).subs(z, P4[2]))
s = solve(sphere_eqs, (g, h, j, k))
return fraction(cancel(x**2 + y**2 + z**2 + s[g]*x + s[h]*y + s[j]*z + s[k]))[0]
def coplanar(P1, P2, P3, P4):
mat = []
mat.append([P1[0], P1[1], P1[2], 1])
mat.append([P2[0], P2[1], P2[2], 1])
mat.append([P3[0], P3[1], P3[2], 1])
mat.append([P4[0], P4[1], P4[2], 1])
return Eq(fraction(cancel(Matrix(mat).det()))[0], 0)
def plane(P1, P2, P3):
x, y, z = symbols('x, y, z')
return coplanar((x, y, z), P1, P2, P3)
def intersect(P1, P2, P3, slope, sphere_l, ai0, ai1, ai2):
axes = symbols('x, y, z')
axis0, axis1, axis2 = axes[ai0], axes[ai1], axes[ai2]
# return the intersect point P of:
# `axis2 = plane_p(axis0, axis1)` passing through P1, P2 and P3
# `axis1 = plane_q(axis0)` with `slope = axis1/axis0` passing through P1
# `sphere_l(x, y, z) = 0`
plane_p = solve(plane(P1, P2, P3), axis2)[0]
plane_q = slope*(axis0 - P1[ai0]) + P1[ai1]
sphere_coeffs = poly(sphere_l.subs(axis2, plane_p).subs(axis1, plane_q), axis0).all_coeffs()
P = {}
P[axis0] = -sphere_coeffs[1]/sphere_coeffs[0] - P1[ai0]
P[axis1] = plane_q.subs(axis0, P[axis0])
P[axis2] = plane_p.subs(axis0, P[axis0]).subs(axis1, P[axis1])
return P[axes[0]], P[axes[1]], P[axes[2]]
def multiplied(x, y, z, w):
x1, y1, z1, w1 = fraction(cancel(x)), fraction(cancel(y)), fraction(cancel(z)), fraction(cancel(w))
lcd = lcm_list([x1[1], y1[1], z1[1], w1[1]])
return x1[0]*cancel(lcd/x1[1]), y1[0]*cancel(lcd/y1[1]), z1[0]*cancel(lcd/z1[1]), w1[0]*cancel(lcd/w1[1])
def to_homogeneous(P):
return multiplied(P[0], P[1], P[2], 1)
def reduced(x, y, z, w):
gcd = gcd_list([x, y, z, w])
if gcd == 0:
return 0, 0, 0, 1
return cancel(x/gcd), cancel(y/gcd), cancel(z/gcd), cancel(w/gcd)
def cross(P1, P2, P3):
x10, x11, x12, x13 = P1[0], P1[1], P1[2], P1[3]
x20, x21, x22, x23 = P2[0], P2[1], P2[2], P2[3]
x30, x31, x32, x33 = P3[0], P3[1], P3[2], P3[3]
# generated by cross-3d.py
x = -x11*x22*x33 + x11*x23*x32 + x12*x21*x33 - x12*x23*x31 - x13*x21*x32 + x13*x22*x31
y = x10*x22*x33 - x10*x23*x32 - x12*x20*x33 + x12*x23*x30 + x13*x20*x32 - x13*x22*x30
z = -x10*x21*x33 + x10*x23*x31 + x11*x20*x33 - x11*x23*x30 - x13*x20*x31 + x13*x21*x30
w = x10*x21*x32 - x10*x22*x31 - x11*x20*x32 + x11*x22*x30 + x12*x20*x31 - x12*x21*x30
return reduced(x, y, z, w)
def on_sphere(sphere_h, P):
x, y, z, w = symbols('x, y, z, w')
return expand(sphere_h.subs(x, P[0]).subs(y, P[1]).subs(z, P[2]).subs(w, P[3])) == 0
def main():
# https://imomath.com/index.cgi?page=inversion (Problem 11)
a, b, c, d, e, f, g, h, j, x, y, z, w = symbols('a, b, c, d, e, f, g, h, j, x, y, z, w')
# quick test a special case
# b, d, e = 0, 0, 0
O, A, B, C = (0, 0, 0), (a, 0, 0), (b, c, 0), (d, e, f)
sphere_l = sphere(O, A, B, C)
print('Sphere Equation:', sphere_l, '= 0')
# If we choose plane $y=kx$ through point O, we'll get G with too many terms:
# G[0] has 12319 terms
# G[1] has 9959 terms
# G[2] has 6337 terms
# G[3] has 8760 terms
# D = intersect(O, B, C, g, sphere_l, 1, 2, 0)
# E = intersect(O, C, A, h, sphere_l, 2, 0, 1)
# F = intersect(O, A, B, j, sphere_l, 0, 1, 2)
D = intersect(C, B, O, g, sphere_l, 1, 2, 0)
E = intersect(A, C, O, h, sphere_l, 2, 0, 1)
F = intersect(B, A, O, j, sphere_l, 0, 1, 2)
sphere_h = poly(sphere_l, (x, y, z)).homogenize(w).expr
print('Sphere Equation in Homogeneous:', sphere_h, '= 0')
A, B, C = to_homogeneous(A), to_homogeneous(B), to_homogeneous(C)
D, E, F = to_homogeneous(D), to_homogeneous(E), to_homogeneous(F)
print('D:', D)
print('E:', E)
print('F:', F)
print('Is A on Sphere?', on_sphere(sphere_h, A))
print('Is B on Sphere?', on_sphere(sphere_h, B))
print('Is C on Sphere?', on_sphere(sphere_h, C))
print('Is D on Sphere?', on_sphere(sphere_h, D))
print('Is E on Sphere?', on_sphere(sphere_h, E))
print('Is F on Sphere?', on_sphere(sphere_h, F))
CDE, BFD, AEF = cross(C, D, E), cross(B, F, D), cross(A, E, F)
print('CDE:', CDE)
print('BFD:', BFD)
print('AEF:', AEF)
G = cross(CDE, BFD, AEF)
print('G:', G)
print('G[0] has', len(G[0].args), 'terms')
print('G[1] has', len(G[1].args), 'terms')
print('G[2] has', len(G[2].args), 'terms')
print('G[3] has', len(G[3].args), 'terms')
print('Is G on Sphere?', on_sphere(sphere_h, G))
if __name__ == '__main__':
main()
|
[
"sympy.symbols",
"sympy.solve",
"sympy.Eq",
"sympy.gcd_list",
"sympy.lcm_list",
"sympy.cancel",
"sympy.Matrix",
"sympy.poly"
] |
[((226, 256), 'sympy.symbols', 'symbols', (['"""g, h, j, k, x, y, z"""'], {}), "('g, h, j, k, x, y, z')\n", (233, 256), False, 'from sympy import Eq, Matrix, cancel, expand, fraction, gcd_list, lcm_list, poly, solve, symbols\n'), ((273, 332), 'sympy.Eq', 'Eq', (['(x ** 2 + y ** 2 + z ** 2 + g * x + h * y + j * z + k)', '(0)'], {}), '(x ** 2 + y ** 2 + z ** 2 + g * x + h * y + j * z + k, 0)\n', (275, 332), False, 'from sympy import Eq, Matrix, cancel, expand, fraction, gcd_list, lcm_list, poly, solve, symbols\n'), ((661, 692), 'sympy.solve', 'solve', (['sphere_eqs', '(g, h, j, k)'], {}), '(sphere_eqs, (g, h, j, k))\n', (666, 692), False, 'from sympy import Eq, Matrix, cancel, expand, fraction, gcd_list, lcm_list, poly, solve, symbols\n'), ((1081, 1099), 'sympy.symbols', 'symbols', (['"""x, y, z"""'], {}), "('x, y, z')\n", (1088, 1099), False, 'from sympy import Eq, Matrix, cancel, expand, fraction, gcd_list, lcm_list, poly, solve, symbols\n'), ((1214, 1232), 'sympy.symbols', 'symbols', (['"""x, y, z"""'], {}), "('x, y, z')\n", (1221, 1232), False, 'from sympy import Eq, Matrix, cancel, expand, fraction, gcd_list, lcm_list, poly, solve, symbols\n'), ((2072, 2110), 'sympy.lcm_list', 'lcm_list', (['[x1[1], y1[1], z1[1], w1[1]]'], {}), '([x1[1], y1[1], z1[1], w1[1]])\n', (2080, 2110), False, 'from sympy import Eq, Matrix, cancel, expand, fraction, gcd_list, lcm_list, poly, solve, symbols\n'), ((2324, 2346), 'sympy.gcd_list', 'gcd_list', (['[x, y, z, w]'], {}), '([x, y, z, w])\n', (2332, 2346), False, 'from sympy import Eq, Matrix, cancel, expand, fraction, gcd_list, lcm_list, poly, solve, symbols\n'), ((3110, 3131), 'sympy.symbols', 'symbols', (['"""x, y, z, w"""'], {}), "('x, y, z, w')\n", (3117, 3131), False, 'from sympy import Eq, Matrix, cancel, expand, fraction, gcd_list, lcm_list, poly, solve, symbols\n'), ((3342, 3390), 'sympy.symbols', 'symbols', (['"""a, b, c, d, e, f, g, h, j, x, y, z, w"""'], {}), "('a, b, c, d, e, f, g, h, j, x, y, z, w')\n", (3349, 3390), False, 'from sympy import Eq, Matrix, cancel, expand, fraction, gcd_list, lcm_list, poly, solve, symbols\n'), ((2401, 2416), 'sympy.cancel', 'cancel', (['(x / gcd)'], {}), '(x / gcd)\n', (2407, 2416), False, 'from sympy import Eq, Matrix, cancel, expand, fraction, gcd_list, lcm_list, poly, solve, symbols\n'), ((2416, 2431), 'sympy.cancel', 'cancel', (['(y / gcd)'], {}), '(y / gcd)\n', (2422, 2431), False, 'from sympy import Eq, Matrix, cancel, expand, fraction, gcd_list, lcm_list, poly, solve, symbols\n'), ((2431, 2446), 'sympy.cancel', 'cancel', (['(z / gcd)'], {}), '(z / gcd)\n', (2437, 2446), False, 'from sympy import Eq, Matrix, cancel, expand, fraction, gcd_list, lcm_list, poly, solve, symbols\n'), ((2446, 2461), 'sympy.cancel', 'cancel', (['(w / gcd)'], {}), '(w / gcd)\n', (2452, 2461), False, 'from sympy import Eq, Matrix, cancel, expand, fraction, gcd_list, lcm_list, poly, solve, symbols\n'), ((713, 785), 'sympy.cancel', 'cancel', (['(x ** 2 + y ** 2 + z ** 2 + s[g] * x + s[h] * y + s[j] * z + s[k])'], {}), '(x ** 2 + y ** 2 + z ** 2 + s[g] * x + s[h] * y + s[j] * z + s[k])\n', (719, 785), False, 'from sympy import Eq, Matrix, cancel, expand, fraction, gcd_list, lcm_list, poly, solve, symbols\n'), ((1988, 1997), 'sympy.cancel', 'cancel', (['x'], {}), '(x)\n', (1994, 1997), False, 'from sympy import Eq, Matrix, cancel, expand, fraction, gcd_list, lcm_list, poly, solve, symbols\n'), ((2009, 2018), 'sympy.cancel', 'cancel', (['y'], {}), '(y)\n', (2015, 2018), False, 'from sympy import Eq, Matrix, cancel, expand, fraction, gcd_list, lcm_list, poly, solve, symbols\n'), ((2030, 2039), 'sympy.cancel', 'cancel', (['z'], {}), '(z)\n', (2036, 2039), False, 'from sympy import Eq, Matrix, cancel, expand, fraction, gcd_list, lcm_list, poly, solve, symbols\n'), ((2051, 2060), 'sympy.cancel', 'cancel', (['w'], {}), '(w)\n', (2057, 2060), False, 'from sympy import Eq, Matrix, cancel, expand, fraction, gcd_list, lcm_list, poly, solve, symbols\n'), ((2128, 2147), 'sympy.cancel', 'cancel', (['(lcd / x1[1])'], {}), '(lcd / x1[1])\n', (2134, 2147), False, 'from sympy import Eq, Matrix, cancel, expand, fraction, gcd_list, lcm_list, poly, solve, symbols\n'), ((2153, 2172), 'sympy.cancel', 'cancel', (['(lcd / y1[1])'], {}), '(lcd / y1[1])\n', (2159, 2172), False, 'from sympy import Eq, Matrix, cancel, expand, fraction, gcd_list, lcm_list, poly, solve, symbols\n'), ((2178, 2197), 'sympy.cancel', 'cancel', (['(lcd / z1[1])'], {}), '(lcd / z1[1])\n', (2184, 2197), False, 'from sympy import Eq, Matrix, cancel, expand, fraction, gcd_list, lcm_list, poly, solve, symbols\n'), ((2203, 2222), 'sympy.cancel', 'cancel', (['(lcd / w1[1])'], {}), '(lcd / w1[1])\n', (2209, 2222), False, 'from sympy import Eq, Matrix, cancel, expand, fraction, gcd_list, lcm_list, poly, solve, symbols\n'), ((4090, 4115), 'sympy.poly', 'poly', (['sphere_l', '(x, y, z)'], {}), '(sphere_l, (x, y, z))\n', (4094, 4115), False, 'from sympy import Eq, Matrix, cancel, expand, fraction, gcd_list, lcm_list, poly, solve, symbols\n'), ((1016, 1027), 'sympy.Matrix', 'Matrix', (['mat'], {}), '(mat)\n', (1022, 1027), False, 'from sympy import Eq, Matrix, cancel, expand, fraction, gcd_list, lcm_list, poly, solve, symbols\n')]
|
from datetime import datetime as dt, timedelta
def unixtimetotime(time):
time = dt.utcfromtimestamp(time)
time = time + timedelta(hours=0)
return time.strftime('%Y-%m-%d %H:%M:%S')
def fahrtocels(fahr):
return (fahr - 32) / 1.8
def checkjsonkey(data,loc,key,i = 0):
if key in data[loc]:
return data[loc][key]
elif i and key in data[loc]['data'][i]:
return data[loc]['data'][i][key]
else:
return 0
def get_values(data,i = 0):
latitude = data['latitude']
longitude = data['longitude']
timezone = data['timezone']
if i:
loc = 'hourly'
else:
loc = 'currently'
time = checkjsonkey(data,loc,'time',i)
time = unixtimetotime(time)
summary = checkjsonkey(data,loc,'summary',i)
icon = checkjsonkey(data,loc,'icon',i)
preciptype = checkjsonkey(data,loc,'precipType',i)
temperature = fahrtocels(checkjsonkey(data,loc,'temperature',i))
apparentTemperature = fahrtocels(checkjsonkey(data,loc,'apparentTemperature',i))
humidity = checkjsonkey(data,loc,'humidity',i)
dewPoint = checkjsonkey(data,loc,'dewPoint',i)
windSpeed = checkjsonkey(data,loc,'windSpeed',i)
cloudCover = checkjsonkey(data,loc,'cloudCover',i)
visibility = checkjsonkey(data,loc,'visibility',i)
uvIndex = checkjsonkey(data,loc,'uvIndex',i)
values = [latitude,longitude,timezone,summary,icon,preciptype,temperature,time,
apparentTemperature,humidity,dewPoint,windSpeed,cloudCover,visibility,uvIndex]
return values
|
[
"datetime.datetime.utcfromtimestamp",
"datetime.timedelta"
] |
[((85, 110), 'datetime.datetime.utcfromtimestamp', 'dt.utcfromtimestamp', (['time'], {}), '(time)\n', (104, 110), True, 'from datetime import datetime as dt, timedelta\n'), ((129, 147), 'datetime.timedelta', 'timedelta', ([], {'hours': '(0)'}), '(hours=0)\n', (138, 147), False, 'from datetime import datetime as dt, timedelta\n')]
|
import pytest
import numpy as np
from FastDSP.structures import GPUArray
class TestGPUArray:
@classmethod
def setup_class(cls):
cls.rows = 4
cls.cols = 6
cls.array_uint8 = np.ones((cls.rows, cls.cols), dtype=np.uint8)
cls.array_int = np.ones((cls.rows, cls.cols), dtype=np.int32)
cls.array_float = np.ones((cls.rows, cls.cols), dtype=np.float32)
cls.array_double = np.ones((cls.rows, cls.cols), dtype=np.float64)
cls.array_complex_float = np.ones((cls.rows, cls.cols), dtype=np.complex64)
cls.array_complex_double = np.ones((cls.rows, cls.cols), dtype=np.complex128)
cls.array_uint8_gpu = GPUArray(cls.array_uint8)
cls.array_int_gpu = GPUArray(cls.array_int)
cls.array_float_gpu = GPUArray(cls.array_float)
cls.array_double_gpu = GPUArray(cls.array_double)
cls.array_float_complex_gpu = GPUArray(cls.array_complex_float)
cls.array_complex_double_gpu = GPUArray(cls.array_complex_double)
def test_uint8_array_transfer_to_device_and_back(self):
assert(np.all(self.array_uint8 == self.array_uint8_gpu.get()))
def test_int_array_transfer_to_device_and_back(self):
assert(np.all(self.array_int == self.array_int_gpu.get()))
def test_float_array_transfer_to_device_and_back(self):
assert(np.all(self.array_float == self.array_float_gpu.get()))
def test_double_array_transfer_to_device_and_back(self):
assert(np.all(self.array_double == self.array_double_gpu.get()))
def test_complex_float_array_transfer_to_device(self):
assert(np.all(self.array_complex_float == self.array_float_complex_gpu.get()))
def test_complex_double_array_transfer_to_device(self):
assert(np.all(self.array_complex_double == self.array_complex_double_gpu.get()))
def test_right_item_is_returned(self):
for i in range(self.rows):
for j in range(self.cols):
assert(self.array_uint8[i, j] == self.array_uint8_gpu[i*self.cols + self.rows])
assert(self.array_int[i, j] == self.array_int_gpu[i*self.cols + self.rows])
assert(self.array_float[i, j] == self.array_float_gpu[i*self.cols + self.rows])
assert(self.array_double[i, j] == self.array_double_gpu[i*self.cols + self.rows])
assert(self.array_complex_float[i, j] == self.array_float_complex_gpu[i*self.cols + self.rows])
assert(self.array_complex_double[i, j] == self.array_float_complex_gpu[i*self.cols + self.rows])
def test_uint8_addition_returns_right_value(self):
array3_uint8 = self.array_uint8_gpu + self.array_uint8_gpu
array3 = self.array_uint8 + self.array_uint8
assert(np.all(array3_uint8.get() == array3))
def test_int_addition_returns_right_value(self):
array3_int = self.array_int_gpu + self.array_int_gpu
array3 = self.array_int + self.array_int
assert(np.all(array3_int.get() == array3))
def test_float_addition_returns_right_value(self):
array3_float = self.array_float_gpu + self.array_float_gpu
array3 = self.array_float + self.array_float
assert(np.all(array3_float.get() == array3))
def test_double_addition_returns_right_value(self):
array3_double = self.array_double_gpu + self.array_double_gpu
array3 = self.array_double + self.array_double
assert(np.all(array3_double.get() == array3))
def test_complex_float_addition_returns_the_right_value(self):
array3_complex = self.array_float_complex_gpu + self.array_float_complex_gpu
array3 = self.array_complex_float + self.array_complex_float
assert(np.all(array3_complex.get() == array3))
def test_complex_double_addition_return_the_right_value(self):
array3_complex_double = self.array_complex_double_gpu + self.array_complex_double_gpu
array3 = self.array_complex_double + self.array_complex_double
assert(np.all(array3_complex_double.get() == array3))
def test_uint8_subtraction_returns_right_value(self):
array3_uint8 = self.array_uint8_gpu - self.array_uint8_gpu
array3 = self.array_uint8 - self.array_uint8
assert(np.all(array3_uint8.get() == array3))
def test_int_subtraction_returns_right_value(self):
array3_int = self.array_int_gpu - self.array_int_gpu
array3 = self.array_int - self.array_int
assert(np.all(array3_int.get() == array3))
def test_float_subtraction_returns_right_value(self):
array3_float = self.array_float_gpu - self.array_float_gpu
array3 = self.array_float - self.array_float
assert(np.all(array3_float.get() == array3))
def test_double_subtraction_returns_right_value(self):
array3_double = self.array_double_gpu - self.array_double_gpu
array3 = self.array_double - self.array_double
assert(np.all(array3_double.get() == array3))
def test_complex_float_subtraction_returns_the_right_value(self):
array3_complex = self.array_float_complex_gpu - self.array_float_complex_gpu
array3 = self.array_complex_float - self.array_complex_float
assert(np.all(array3_complex.get() == array3))
def test_complex_double_subtraction_return_the_right_value(self):
array3_complex_double = self.array_complex_double_gpu - self.array_complex_double_gpu
array3 = self.array_complex_double - self.array_complex_double
assert(np.all(array3_complex_double.get() == array3))
def test_uint8_multiplication_returns_right_value(self):
array3_uint8 = self.array_uint8_gpu * self.array_uint8_gpu
array3 = self.array_uint8 * self.array_uint8
assert(np.all(array3_uint8.get() == array3))
def test_int_multiplication_returns_right_value(self):
array3_int = self.array_int_gpu * self.array_int_gpu
array3 = self.array_int * self.array_int
assert(np.all(array3_int.get() == array3))
def test_float_multiplication_returns_right_value(self):
array3_float = self.array_float_gpu * self.array_float_gpu
array3 = self.array_float * self.array_float
assert(np.all(array3_float.get() == array3))
def test_double_multiplication_returns_right_value(self):
array3_double = self.array_double_gpu * self.array_double_gpu
array3 = self.array_double * self.array_double
assert(np.all(array3_double.get() == array3))
def test_complex_float_multiplication_returns_the_right_value(self):
array3_complex = self.array_float_complex_gpu * self.array_float_complex_gpu
array3 = self.array_complex_float * self.array_complex_float
assert(np.all(array3_complex.get() == array3))
def test_complex_double_multiplication_return_the_right_value(self):
array3_complex_double = self.array_complex_double_gpu * self.array_complex_double_gpu
array3 = self.array_complex_double * self.array_complex_double
assert(np.all(array3_complex_double.get() == array3))
def test_uint8_division_returns_right_value(self):
array3_uint8 = self.array_uint8_gpu / self.array_uint8_gpu
array3 = self.array_uint8 / self.array_uint8
assert(np.all(array3_uint8.get() == array3))
def test_int_division_returns_right_value(self):
array3_int = self.array_int_gpu / self.array_int_gpu
array3 = self.array_int / self.array_int
assert(np.all(array3_int.get() == array3))
def test_float_division_returns_right_value(self):
array3_float = self.array_float_gpu / self.array_float_gpu
array3 = self.array_float / self.array_float
assert(np.all(array3_float.get() == array3))
def test_double_division_returns_right_value(self):
array3_double = self.array_double_gpu / self.array_double_gpu
array3 = self.array_double / self.array_double
assert(np.all(array3_double.get() == array3))
def test_complex_float_division_returns_the_right_value(self):
array3_complex = self.array_float_complex_gpu / self.array_float_complex_gpu
array3 = self.array_complex_float / self.array_complex_float
assert(np.all(array3_complex.get() == array3))
def test_complex_double_division_return_the_right_value(self):
array3_complex_double = self.array_complex_double_gpu / self.array_complex_double_gpu
array3 = self.array_complex_double / self.array_complex_double
assert(np.all(array3_complex_double.get() == array3))
|
[
"FastDSP.structures.GPUArray",
"numpy.ones"
] |
[((209, 254), 'numpy.ones', 'np.ones', (['(cls.rows, cls.cols)'], {'dtype': 'np.uint8'}), '((cls.rows, cls.cols), dtype=np.uint8)\n', (216, 254), True, 'import numpy as np\n'), ((279, 324), 'numpy.ones', 'np.ones', (['(cls.rows, cls.cols)'], {'dtype': 'np.int32'}), '((cls.rows, cls.cols), dtype=np.int32)\n', (286, 324), True, 'import numpy as np\n'), ((351, 398), 'numpy.ones', 'np.ones', (['(cls.rows, cls.cols)'], {'dtype': 'np.float32'}), '((cls.rows, cls.cols), dtype=np.float32)\n', (358, 398), True, 'import numpy as np\n'), ((426, 473), 'numpy.ones', 'np.ones', (['(cls.rows, cls.cols)'], {'dtype': 'np.float64'}), '((cls.rows, cls.cols), dtype=np.float64)\n', (433, 473), True, 'import numpy as np\n'), ((508, 557), 'numpy.ones', 'np.ones', (['(cls.rows, cls.cols)'], {'dtype': 'np.complex64'}), '((cls.rows, cls.cols), dtype=np.complex64)\n', (515, 557), True, 'import numpy as np\n'), ((593, 643), 'numpy.ones', 'np.ones', (['(cls.rows, cls.cols)'], {'dtype': 'np.complex128'}), '((cls.rows, cls.cols), dtype=np.complex128)\n', (600, 643), True, 'import numpy as np\n'), ((675, 700), 'FastDSP.structures.GPUArray', 'GPUArray', (['cls.array_uint8'], {}), '(cls.array_uint8)\n', (683, 700), False, 'from FastDSP.structures import GPUArray\n'), ((729, 752), 'FastDSP.structures.GPUArray', 'GPUArray', (['cls.array_int'], {}), '(cls.array_int)\n', (737, 752), False, 'from FastDSP.structures import GPUArray\n'), ((783, 808), 'FastDSP.structures.GPUArray', 'GPUArray', (['cls.array_float'], {}), '(cls.array_float)\n', (791, 808), False, 'from FastDSP.structures import GPUArray\n'), ((840, 866), 'FastDSP.structures.GPUArray', 'GPUArray', (['cls.array_double'], {}), '(cls.array_double)\n', (848, 866), False, 'from FastDSP.structures import GPUArray\n'), ((905, 938), 'FastDSP.structures.GPUArray', 'GPUArray', (['cls.array_complex_float'], {}), '(cls.array_complex_float)\n', (913, 938), False, 'from FastDSP.structures import GPUArray\n'), ((978, 1012), 'FastDSP.structures.GPUArray', 'GPUArray', (['cls.array_complex_double'], {}), '(cls.array_complex_double)\n', (986, 1012), False, 'from FastDSP.structures import GPUArray\n')]
|
"""Add completed uploads
Revision ID: <KEY>
Revises: 065886328b03
Create Date: 2021-02-22 16:42:00.690943
"""
import geoalchemy2
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = "<KEY>"
down_revision = "065886328b03"
branch_labels = None
depends_on = None
def upgrade():
op.create_table(
"uploads",
sa.Column("key", sa.String(), nullable=False),
sa.Column("filename", sa.String(), nullable=False),
sa.Column("created", sa.DateTime(timezone=True), server_default=sa.text("now()"), nullable=False),
sa.Column("creator_user_id", sa.BigInteger(), nullable=False),
sa.Column("credit", sa.String(), nullable=True),
sa.ForeignKeyConstraint(["creator_user_id"], ["users.id"], name=op.f("fk_uploads_creator_user_id_users")),
sa.PrimaryKeyConstraint("key", name=op.f("pk_uploads")),
)
op.create_index(op.f("ix_uploads_creator_user_id"), "uploads", ["creator_user_id"], unique=False)
op.alter_column("initiated_uploads", "user_id", new_column_name="initiator_user_id")
op.add_column("users", sa.Column("avatar_key", sa.String(), nullable=True))
op.create_foreign_key(op.f("fk_users_avatar_key_uploads"), "users", "uploads", ["avatar_key"], ["key"])
op.execute(
"""
INSERT INTO uploads (creator_user_id, key, filename)
SELECT
id,
substr(avatar_filename, 1, 64),
avatar_filename
FROM users
WHERE avatar_filename IS NOT NULL"""
)
op.execute("UPDATE users SET avatar_key = substr(avatar_filename, 1, 64) WHERE avatar_filename IS NOT NULL")
op.drop_column("users", "avatar_filename")
op.create_index(
op.f("ix_initiated_uploads_initiator_user_id"), "initiated_uploads", ["initiator_user_id"], unique=False
)
op.drop_index("ix_initiated_uploads_user_id", table_name="initiated_uploads")
op.add_column("page_versions", sa.Column("photo_key", sa.String(), nullable=True))
op.create_foreign_key(
op.f("fk_page_versions_photo_key_uploads"), "page_versions", "uploads", ["photo_key"], ["key"]
)
def downgrade():
op.drop_constraint(op.f("fk_page_versions_photo_key_uploads"), "page_versions", type_="foreignkey")
op.drop_column("page_versions", "photo_key")
op.create_index("ix_initiated_uploads_user_id", "initiated_uploads", ["initiator_user_id"], unique=False)
op.drop_index(op.f("ix_initiated_uploads_initiator_user_id"), table_name="initiated_uploads")
op.drop_constraint(op.f("fk_users_avatar_key_uploads"), "users", type_="foreignkey")
op.alter_column("initiated_uploads", "initiator_user_id", new_column_name="user_id")
op.drop_table("uploads")
op.add_column("users", sa.Column("avatar_filename", sa.VARCHAR(), autoincrement=False, nullable=True))
|
[
"alembic.op.drop_table",
"sqlalchemy.DateTime",
"alembic.op.alter_column",
"sqlalchemy.VARCHAR",
"alembic.op.drop_index",
"alembic.op.create_index",
"alembic.op.f",
"alembic.op.drop_column",
"sqlalchemy.text",
"alembic.op.execute",
"sqlalchemy.String",
"sqlalchemy.BigInteger"
] |
[((1008, 1097), 'alembic.op.alter_column', 'op.alter_column', (['"""initiated_uploads"""', '"""user_id"""'], {'new_column_name': '"""initiator_user_id"""'}), "('initiated_uploads', 'user_id', new_column_name=\n 'initiator_user_id')\n", (1023, 1097), False, 'from alembic import op\n'), ((1285, 1538), 'alembic.op.execute', 'op.execute', (['"""\n INSERT INTO uploads (creator_user_id, key, filename)\n SELECT\n id,\n substr(avatar_filename, 1, 64),\n avatar_filename\n FROM users\n WHERE avatar_filename IS NOT NULL"""'], {}), '(\n """\n INSERT INTO uploads (creator_user_id, key, filename)\n SELECT\n id,\n substr(avatar_filename, 1, 64),\n avatar_filename\n FROM users\n WHERE avatar_filename IS NOT NULL"""\n )\n', (1295, 1538), False, 'from alembic import op\n'), ((1547, 1665), 'alembic.op.execute', 'op.execute', (['"""UPDATE users SET avatar_key = substr(avatar_filename, 1, 64) WHERE avatar_filename IS NOT NULL"""'], {}), "(\n 'UPDATE users SET avatar_key = substr(avatar_filename, 1, 64) WHERE avatar_filename IS NOT NULL'\n )\n", (1557, 1665), False, 'from alembic import op\n'), ((1660, 1702), 'alembic.op.drop_column', 'op.drop_column', (['"""users"""', '"""avatar_filename"""'], {}), "('users', 'avatar_filename')\n", (1674, 1702), False, 'from alembic import op\n'), ((1847, 1924), 'alembic.op.drop_index', 'op.drop_index', (['"""ix_initiated_uploads_user_id"""'], {'table_name': '"""initiated_uploads"""'}), "('ix_initiated_uploads_user_id', table_name='initiated_uploads')\n", (1860, 1924), False, 'from alembic import op\n'), ((2275, 2319), 'alembic.op.drop_column', 'op.drop_column', (['"""page_versions"""', '"""photo_key"""'], {}), "('page_versions', 'photo_key')\n", (2289, 2319), False, 'from alembic import op\n'), ((2324, 2434), 'alembic.op.create_index', 'op.create_index', (['"""ix_initiated_uploads_user_id"""', '"""initiated_uploads"""', "['initiator_user_id']"], {'unique': '(False)'}), "('ix_initiated_uploads_user_id', 'initiated_uploads', [\n 'initiator_user_id'], unique=False)\n", (2339, 2434), False, 'from alembic import op\n'), ((2621, 2710), 'alembic.op.alter_column', 'op.alter_column', (['"""initiated_uploads"""', '"""initiator_user_id"""'], {'new_column_name': '"""user_id"""'}), "('initiated_uploads', 'initiator_user_id', new_column_name=\n 'user_id')\n", (2636, 2710), False, 'from alembic import op\n'), ((2710, 2734), 'alembic.op.drop_table', 'op.drop_table', (['"""uploads"""'], {}), "('uploads')\n", (2723, 2734), False, 'from alembic import op\n'), ((922, 956), 'alembic.op.f', 'op.f', (['"""ix_uploads_creator_user_id"""'], {}), "('ix_uploads_creator_user_id')\n", (926, 956), False, 'from alembic import op\n'), ((1199, 1234), 'alembic.op.f', 'op.f', (['"""fk_users_avatar_key_uploads"""'], {}), "('fk_users_avatar_key_uploads')\n", (1203, 1234), False, 'from alembic import op\n'), ((1732, 1778), 'alembic.op.f', 'op.f', (['"""ix_initiated_uploads_initiator_user_id"""'], {}), "('ix_initiated_uploads_initiator_user_id')\n", (1736, 1778), False, 'from alembic import op\n'), ((2047, 2089), 'alembic.op.f', 'op.f', (['"""fk_page_versions_photo_key_uploads"""'], {}), "('fk_page_versions_photo_key_uploads')\n", (2051, 2089), False, 'from alembic import op\n'), ((2190, 2232), 'alembic.op.f', 'op.f', (['"""fk_page_versions_photo_key_uploads"""'], {}), "('fk_page_versions_photo_key_uploads')\n", (2194, 2232), False, 'from alembic import op\n'), ((2448, 2494), 'alembic.op.f', 'op.f', (['"""ix_initiated_uploads_initiator_user_id"""'], {}), "('ix_initiated_uploads_initiator_user_id')\n", (2452, 2494), False, 'from alembic import op\n'), ((2551, 2586), 'alembic.op.f', 'op.f', (['"""fk_users_avatar_key_uploads"""'], {}), "('fk_users_avatar_key_uploads')\n", (2555, 2586), False, 'from alembic import op\n'), ((391, 402), 'sqlalchemy.String', 'sa.String', ([], {}), '()\n', (400, 402), True, 'import sqlalchemy as sa\n'), ((451, 462), 'sqlalchemy.String', 'sa.String', ([], {}), '()\n', (460, 462), True, 'import sqlalchemy as sa\n'), ((510, 536), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {'timezone': '(True)'}), '(timezone=True)\n', (521, 536), True, 'import sqlalchemy as sa\n'), ((625, 640), 'sqlalchemy.BigInteger', 'sa.BigInteger', ([], {}), '()\n', (638, 640), True, 'import sqlalchemy as sa\n'), ((687, 698), 'sqlalchemy.String', 'sa.String', ([], {}), '()\n', (696, 698), True, 'import sqlalchemy as sa\n'), ((1144, 1155), 'sqlalchemy.String', 'sa.String', ([], {}), '()\n', (1153, 1155), True, 'import sqlalchemy as sa\n'), ((1983, 1994), 'sqlalchemy.String', 'sa.String', ([], {}), '()\n', (1992, 1994), True, 'import sqlalchemy as sa\n'), ((2791, 2803), 'sqlalchemy.VARCHAR', 'sa.VARCHAR', ([], {}), '()\n', (2801, 2803), True, 'import sqlalchemy as sa\n'), ((553, 569), 'sqlalchemy.text', 'sa.text', (['"""now()"""'], {}), "('now()')\n", (560, 569), True, 'import sqlalchemy as sa\n'), ((788, 828), 'alembic.op.f', 'op.f', (['"""fk_uploads_creator_user_id_users"""'], {}), "('fk_uploads_creator_user_id_users')\n", (792, 828), False, 'from alembic import op\n'), ((875, 893), 'alembic.op.f', 'op.f', (['"""pk_uploads"""'], {}), "('pk_uploads')\n", (879, 893), False, 'from alembic import op\n')]
|
import requests
import json
import os
import re
from haystack.utils import export_answers_to_csv
import logging
import subprocess
import time
import pprint
import pandas as pd
from typing import Dict, Any, List
from haystack.document_store.sql import DocumentORM
from collections import defaultdict
## Paths to raw questions and models
question_path = "/home/bulelani/Desktop/odin/odin/src_new/data/raw_questions"
url = 'http://127.0.0.1:8000/models/1/doc-qa' # use more accurate model in config.py
## initialize finders ands stuffies
from haystack import Finder
from haystack.document_store.elasticsearch import ElasticsearchDocumentStore
from haystack.reader.farm import FARMReader
from haystack.reader.transformers import TransformersReader
from haystack.utils import print_answers
from haystack.retriever.sparse import ElasticsearchRetriever
logger = logging.getLogger(__name__)
document_store = ElasticsearchDocumentStore(host="localhost", username="", password="", index="document")
retriever = ElasticsearchRetriever(document_store=document_store)
reader = FARMReader(model_name_or_path="deepset/roberta-base-squad2", use_gpu=False, no_ans_boost=0.6)
finder = Finder(reader, retriever)
## Lists
filtered_questions = list()
## Getting questions
for filename in os.listdir(question_path):
with open(f"{question_path}/{filename}") as file:
data = json.load(file)
file.close()
questions = list(data["question"])
filtered_questions = [q for q in questions if "this course" in q]
filtered_questions = list(set(filtered_questions))
## Answering questions
answers = list()
equad = {"data": [{"paragraphs": []}]}
{
'question': 'What does this course help professionals who need to quickly upskill and enhance their SQL?',
'no_ans_gap': 4.704477691650391,
'answers': [
{
'answer': 'demonstrable and practical skills',
'score': 10.06596565246582,
'probability': 0.7787239681151941,
'context': 'eed to rapidly upskill and enhance their SQL toolkit with demonstrable and practical skills. This course is technical in nature. It is therefore reco',
'offset_start': 58,
'offset_end': 92,
'offset_start_in_doc': 3290,
'offset_end_in_doc': 3324,
'document_id': '23bc88fa-f41e-4bd3-97e9-268fd6f0ac92',
'meta': {
'name': 'SQL_Prospectus_2020.txt'
}
}
]
}
|
[
"json.load",
"haystack.document_store.elasticsearch.ElasticsearchDocumentStore",
"haystack.reader.farm.FARMReader",
"haystack.Finder",
"haystack.retriever.sparse.ElasticsearchRetriever",
"os.listdir",
"logging.getLogger"
] |
[((858, 885), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (875, 885), False, 'import logging\n'), ((904, 996), 'haystack.document_store.elasticsearch.ElasticsearchDocumentStore', 'ElasticsearchDocumentStore', ([], {'host': '"""localhost"""', 'username': '""""""', 'password': '""""""', 'index': '"""document"""'}), "(host='localhost', username='', password='',\n index='document')\n", (930, 996), False, 'from haystack.document_store.elasticsearch import ElasticsearchDocumentStore\n'), ((1005, 1058), 'haystack.retriever.sparse.ElasticsearchRetriever', 'ElasticsearchRetriever', ([], {'document_store': 'document_store'}), '(document_store=document_store)\n', (1027, 1058), False, 'from haystack.retriever.sparse import ElasticsearchRetriever\n'), ((1068, 1165), 'haystack.reader.farm.FARMReader', 'FARMReader', ([], {'model_name_or_path': '"""deepset/roberta-base-squad2"""', 'use_gpu': '(False)', 'no_ans_boost': '(0.6)'}), "(model_name_or_path='deepset/roberta-base-squad2', use_gpu=False,\n no_ans_boost=0.6)\n", (1078, 1165), False, 'from haystack.reader.farm import FARMReader\n'), ((1171, 1196), 'haystack.Finder', 'Finder', (['reader', 'retriever'], {}), '(reader, retriever)\n', (1177, 1196), False, 'from haystack import Finder\n'), ((1272, 1297), 'os.listdir', 'os.listdir', (['question_path'], {}), '(question_path)\n', (1282, 1297), False, 'import os\n'), ((1368, 1383), 'json.load', 'json.load', (['file'], {}), '(file)\n', (1377, 1383), False, 'import json\n')]
|
import torch
def torch_fit(f, xdata, ydata, p0=None, rounds=10000, learning_rate=1e-3):
"""Experimental function to fit data with gradient descent like neural networks.
"""
if p0 is None:
# determine number of parameters by inspecting the function
from scipy._lib._util import getargspec_no_self as _getargspec
args, varargs, varkw, defaults = _getargspec(f)
if len(args) < 2:
raise ValueError("Unable to determine number of fit parameters.")
n = len(args) - 1
dtype = torch.float
x = torch.tensor(xdata, dtype=dtype)
y = torch.tensor(ydata, dtype=dtype)
if p0 is None:
p = torch.randn(n, dtype=dtype, requires_grad=True)
else:
p = torch.tensor(p0, dtype=dtype, requires_grad=True)
optimizer = torch.optim.Adam([p], lr=learning_rate)
for t in range(rounds):
loss = (f(x, *p) - y).pow(2).sum()
optimizer.zero_grad()
loss.backward()
optimizer.step()
popt = p.detach().numpy()
pcov = None
return popt, pcov
|
[
"scipy._lib._util.getargspec_no_self",
"torch.optim.Adam",
"torch.randn",
"torch.tensor"
] |
[((562, 594), 'torch.tensor', 'torch.tensor', (['xdata'], {'dtype': 'dtype'}), '(xdata, dtype=dtype)\n', (574, 594), False, 'import torch\n'), ((603, 635), 'torch.tensor', 'torch.tensor', (['ydata'], {'dtype': 'dtype'}), '(ydata, dtype=dtype)\n', (615, 635), False, 'import torch\n'), ((805, 844), 'torch.optim.Adam', 'torch.optim.Adam', (['[p]'], {'lr': 'learning_rate'}), '([p], lr=learning_rate)\n', (821, 844), False, 'import torch\n'), ((384, 398), 'scipy._lib._util.getargspec_no_self', '_getargspec', (['f'], {}), '(f)\n', (395, 398), True, 'from scipy._lib._util import getargspec_no_self as _getargspec\n'), ((668, 715), 'torch.randn', 'torch.randn', (['n'], {'dtype': 'dtype', 'requires_grad': '(True)'}), '(n, dtype=dtype, requires_grad=True)\n', (679, 715), False, 'import torch\n'), ((738, 787), 'torch.tensor', 'torch.tensor', (['p0'], {'dtype': 'dtype', 'requires_grad': '(True)'}), '(p0, dtype=dtype, requires_grad=True)\n', (750, 787), False, 'import torch\n')]
|
# Generated by Django 2.2.13 on 2020-07-13 16:38
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('standard', '0012_termsindexpage_project_code'),
]
operations = [
migrations.RenameField(
model_name='termsindexpage',
old_name='project_code',
new_name='project',
),
]
|
[
"django.db.migrations.RenameField"
] |
[((238, 338), 'django.db.migrations.RenameField', 'migrations.RenameField', ([], {'model_name': '"""termsindexpage"""', 'old_name': '"""project_code"""', 'new_name': '"""project"""'}), "(model_name='termsindexpage', old_name='project_code',\n new_name='project')\n", (260, 338), False, 'from django.db import migrations\n')]
|
from functools import reduce
import re
import os
#clean : clean text by turkish words
def clean(text):
d = { "Ş":"ş", "İ":"i", "Ü":"ü", "Ç":"ç", "Ö":"ö", "Ğ":"ğ", "I":"ı", "Î":"ı", "Û":"u", "Â":"a" , "â":"a" , "î":"ı" , "û":"u" , "ä":"a", "à":"a", "å":"a", "é":"e", "ê":"e", "ë":"e", "è":"e", "ï":"ı", "ì":"ı", "ò":"o", "ù":"u", "ÿ":"y", "ó":"o", "ú":"u", "ñ":"n", "Ñ":"a", "À":"a", "Á":"a", "Ã":"a", "Ä":"a", "Å":"a", "È":"e", "É":"e", "Ê":"e", "Ë":"e", "Ì":"ı", "Í":"ı", "Î":"ı", "Ï":"ı", "Ò":"o", "Ó":"o", "Ô":"o", "Õ":"o", "Ö":"o" }
text = reduce( lambda x, y: x.replace( y,d[y] ),d,text )
text = text.lower()
text = re.sub('[^a-z0-9\sçışöğü]+', '', text)
text = text.strip()
return text
#trinity: parse line into three characters
def trinity(row):
for i in range(len(row)-2):
yield ''.join(row[i:i + 3])
#variables
totalch = 0
twchar = []
#char: columns name for matrix
char = '''abcçdefgğhıijklmnoöprsştuüvyzqwx'''
#twchar: rows name for matrix
for i in char:
for j in char:
twchar.append( i+j )
#print(len(char))#54
#print(len(twchar))#2916
#add number to name for create matrix
mrowname = dict( [ (k,v) for v,k in enumerate(char)] )
mcolname = dict( [ (k,v) for v,k in enumerate(twchar)] )
matris = [ [ 0 for mrow in range(len(char)) ] for mcol in range(len(twchar)) ]
#how long data size
print(os.stat("data.txt").st_size)
#sayac = 0
#calculate line in data
for exm in open('data.txt'):
exm = clean(exm)
#print(str(sayac)+'gonderiliyor')
for a, b ,c in trinity(exm):
matris[mcolname[a+b]][mrowname[c]] += 1
totalch +=1
#sayac +=1
#replace all indices into totalch in matris
for i in range(len(twchar)):
for j in range(len(char)):
matris[i][j] = matris[i][j] / totalch
#convert matris to txt file
with open("probobility_matrix.txt", "w") as file:
file.write(str(matris))
|
[
"re.sub",
"os.stat"
] |
[((627, 666), 're.sub', 're.sub', (['"""[^a-z0-9\\\\sçışöğü]+"""', '""""""', 'text'], {}), "('[^a-z0-9\\\\sçışöğü]+', '', text)\n", (633, 666), False, 'import re\n'), ((1325, 1344), 'os.stat', 'os.stat', (['"""data.txt"""'], {}), "('data.txt')\n", (1332, 1344), False, 'import os\n')]
|
import numpy as np
class MotionExplorer:
"""
Aim at exploring motions, represented as sampled observations of a n-dimensional input vector.
This stream of vectors describe a vector space in which the Mahalanobis distance is used to
assess the distance of new samples to previously seen samples. Everytime a new sample is
observed that is when that K nearest neighbour are in average further away than N standard deviation, the new sample is deamed original and saved to the attribute observations.
"""
def __init__(self, inputdim = 2, stepsize = 10, order = 4, window = 30,
start_buffer = 10, periodic_recompute = 5, number_of_neighbour = 5,
number_of_stdev = 4.5
):
"""
Parameters
----------
inputdim : int
the number of dimension of the input vector.
stepsize : int
The size of the interpolation step in milliseconds.
order : int
The dimension of the output vector, 1 is position only, 2 includes velocity, 3 provides acceleration, and so on.
window : int
The size of the averaging window in samples.
start_buffer : int
The number of sample is takes before any observation can be saved, this leaves time
for the Savitsky Golay interpolation to start ouputing some data.
periodic_recompute : int
The number of samples after which mean and covarianve of saved observations will be recomputed.
number_of_neighbour : int
The number of closest neighnbours that are considered when assessing if a new sample is original or not.
number_of_stdev : float
The number of standard deviation a new vectors has to be from the mean of K nearest neighbour as measured by Mahalanobis distance. When the mean of K is greater than this value, the new sample is considered original and saved to observations.
"""
self.inputdim = inputdim
self.order = order
## filtering
self.axis = [AxisFilter(stepsize, order, window) for _ in range(inputdim)]
## observations space
self.observations = np.zeros((1,self.inputdim*self.order))
self.mean = np.zeros(self.inputdim*self.order)
self.icov = np.eye(self.inputdim*self.order)
## variable logic
self.counter = 0
self.start_buffer = start_buffer
self.periodic_recompute = periodic_recompute
self.number_of_neighbour = 5
self.number_of_stdev = 4.5
self.last_sample = np.zeros(self.inputdim*self.order)
def new_sample(self, ms, ndata):
"""Passes a new observed sample to the motionexplorer. It will filter it based on the last
observed sample and compute the distance of this current sample to all previously saved
original samples. If the average distance of the N nearest neightbour is greater than X
stdev, then the current sample is saved to the class attribute observations.
Parameters
----------
ms : int
Timestamp in milliseconds. This can be easily produced with the time module and the
call to: int(round(time.time() * 1000)).
ndata : iterable
An iterable object (tuple, ndarray, ..) representing the N dimensional vector of the
current sample.
Returns
-------
int, bool
average Mahalanobis distance to the K nearest neighboour and flag saying if the
current sample is added to the set of original observations.
"""
## ndata.shape == inputdim
self.counter += 1
for i, data in enumerate(ndata):
self.axis[i].new_sample(ms, data)
## recompute mean and icov every periodic_recompute
if self.counter % self.periodic_recompute == 0:
self.compute_observations_mean_icov()
## get last sample from each axis and squash to 1D
sample = np.array([self.axis[i].samples[-1] for i in range(self.inputdim)]).reshape(-1)
## compute the distance of sample to all stored observations
distances = self.distance_to_observations(sample)
distance_meank = np.mean(distances[:self.number_of_neighbour])
if (self.counter > self.start_buffer) and self.axis[0].full:
## keep the sample if further than number of stdev to previous observations
if distance_meank > self.number_of_stdev:
self.observations = np.vstack((self.observations, sample))
added = True
else: added = False
else:
added = False
self.last_sample = sample
return distance_meank, added
def distance_to_observations(self, vector):
"""Return the Mahalanobis distance of vector to the space of all observations.
The ouput distances are sorted.
https://en.wikipedia.org/wiki/Mahalanobis_distance
"""
diff = self.observations - vector
distances = np.sqrt(np.diag(np.dot(np.dot(diff, self.icov), diff.T)))
return np.sort(distances)
def compute_observations_mean_icov(self):
self.mean = np.mean(self.observations, axis=0)
# print self.observations.shape[0]
if self.observations.shape[0] > 1:
self.icov = np.linalg.pinv(np.cov((self.observations-self.mean).transpose()))
class AxisFilter:
"""Filters an unevenly sampled measurement dimension. It interpolates at constant time steps `stepsize` in ms, performs Butter worth filetering and Savitsky Golay interpolation of order `order` over a moving window `window`.
"""
def __init__(self, stepsize, order, window):
"""
Parameters
----------
stepsize : int
The size of the interpolation step in milliseconds.
order : int
The dimension of the output vector, 1 is position only, 2 includes velocity, 3 provides acceleration, and so on.
window : int
The size of the averaging window in samples.
"""
self.stepsize = stepsize
self.order = order
self.interpolator = TimeInterpolator(stepsize)
self.sgfitter = SavitskyGolayFitter(order, window)
self.full = False
def new_sample(self, time, value):
self.samples = np.empty((0,self.order))
self.interpolator.new_sample(time, value)
for point in self.interpolator.value_steps:
point = self.sgfitter.new_sample(point)
self.samples = np.vstack((self.samples, point))
self.full = self.sgfitter.full
class TimeInterpolator:
"""Interpolate between 2 measurements at constant step size X in ms.
"""
def __init__(self, stepsize):
self.stepsize = stepsize
self.firstpoint = True
def new_sample(self, time, value):
if self.firstpoint == True:
self.firstpoint = False
self.time_steps = np.array([time])
self.value_steps = np.array([value])
else:
self.time_steps = np.arange(self.last_time, time, self.stepsize)
self.value_steps = np.interp(self.time_steps, [self.last_time, time], [self.last_value, value])
self.last_time = time
self.last_value = value
class SavitskyGolayFitter:
def __init__(self, order = 4, window = 30):
self.order = order
if window%2==0:
window = window + 1
self.window = window
#compute the savitzky-golay differentiators
sgolay = self.savitzky_golay(order, window)
self.sgolay_diff = []
self.buffers = []
self.samples = 0
self.full = False
#create the filters
for i in range(order):
self.sgolay_diff.append(np.ravel(sgolay[i, :]))
self.buffers.append(IIRFilter(self.sgolay_diff[i], [1]))
def new_sample(self, x):
self.samples = self.samples + 1
if self.samples>self.window:
self.full = True
fits = np.zeros((self.order,))
# use enumerate or map
c = 0
for buffer in self.buffers:
fits[c] = buffer.filter(x)
c = c + 1
return fits
#sg coefficient computation
def savitzky_golay(self, order = 2, window = 30):
if window is None:
window = order + 2
if window % 2 != 1 or window < 1:
raise TypeError("window size must be a positive odd number")
if window < order + 2:
raise TypeError("window size is too small for the polynomial")
# A second order polynomial has 3 coefficients
order_range = range(order+1)
half_window = (window-1)//2
B = np.mat(
[ [k**i for i in order_range] for k in range(-half_window, half_window+1)] )
M = np.linalg.pinv(B)
return M
class IIRFilter:
def __init__(self, B, A):
"""Create an IIR filter, given the B and A coefficient vectors.
"""
self.B = B
self.A = A
if len(A)>2:
self.prev_outputs = Ringbuffer(len(A)-1)
else:
self.prev_outputs = Ringbuffer(3)
self.prev_inputs = Ringbuffer(len(B))
def filter(self, x):
"""Take one sample and filter it. Return the output.
"""
y = 0
self.prev_inputs.new_sample(x)
k =0
for b in self.B:
y = y + b * self.prev_inputs.reverse_index(k)
k = k + 1
k = 0
for a in self.A[1:]:
y = y - a * self.prev_outputs.reverse_index(k)
k = k + 1
y = y / self.A[0]
self.prev_outputs.new_sample(y)
return y
def new_sample(self, x):
return self.filter(x)
class Ringbuffer:
def __init__(self, size, init=0):
if size<1:
throw(Exception("Invalid size for a ringbuffer: must be >=1"))
self.n_samples = size
self.samples = np.ones((size,))*init
self.read_head = 1
self.write_head = 0
self.sum = 0
def get_length(self):
return self.n_samples
def get_samples(self):
return np.hstack((self.samples[self.read_head-1:],self.samples[0:self.read_head-1]))
def get_sum(self):
return self.sum
def get_output(self):
#self.read_head %= self.n_samples
return self.samples[self.read_head-1]
def get_mean(self):
return self.sum / float(self.n_samples)
def forward_index(self, i):
new_index = self.read_head+i-1
new_index = new_index % self.n_samples
return self.samples[new_index]
def reverse_index(self, i):
new_index = self.write_head-i-1
while new_index<0:
new_index+=self.n_samples
return self.samples[new_index]
def new_sample(self, x):
s = self.samples[self.write_head]
self.samples[self.write_head] = x
self.sum += x
self.sum -= self.samples[self.read_head]
self.read_head += 1
self.write_head += 1
self.read_head %= self.n_samples
self.write_head %= self.n_samples
return s
|
[
"numpy.dot",
"numpy.ravel",
"numpy.empty",
"numpy.zeros",
"numpy.ones",
"numpy.hstack",
"numpy.sort",
"numpy.mean",
"numpy.array",
"numpy.arange",
"numpy.interp",
"numpy.eye",
"numpy.linalg.pinv",
"numpy.vstack"
] |
[((2181, 2222), 'numpy.zeros', 'np.zeros', (['(1, self.inputdim * self.order)'], {}), '((1, self.inputdim * self.order))\n', (2189, 2222), True, 'import numpy as np\n'), ((2240, 2276), 'numpy.zeros', 'np.zeros', (['(self.inputdim * self.order)'], {}), '(self.inputdim * self.order)\n', (2248, 2276), True, 'import numpy as np\n'), ((2295, 2329), 'numpy.eye', 'np.eye', (['(self.inputdim * self.order)'], {}), '(self.inputdim * self.order)\n', (2301, 2329), True, 'import numpy as np\n'), ((2575, 2611), 'numpy.zeros', 'np.zeros', (['(self.inputdim * self.order)'], {}), '(self.inputdim * self.order)\n', (2583, 2611), True, 'import numpy as np\n'), ((4240, 4285), 'numpy.mean', 'np.mean', (['distances[:self.number_of_neighbour]'], {}), '(distances[:self.number_of_neighbour])\n', (4247, 4285), True, 'import numpy as np\n'), ((5134, 5152), 'numpy.sort', 'np.sort', (['distances'], {}), '(distances)\n', (5141, 5152), True, 'import numpy as np\n'), ((5220, 5254), 'numpy.mean', 'np.mean', (['self.observations'], {'axis': '(0)'}), '(self.observations, axis=0)\n', (5227, 5254), True, 'import numpy as np\n'), ((6373, 6398), 'numpy.empty', 'np.empty', (['(0, self.order)'], {}), '((0, self.order))\n', (6381, 6398), True, 'import numpy as np\n'), ((8073, 8096), 'numpy.zeros', 'np.zeros', (['(self.order,)'], {}), '((self.order,))\n', (8081, 8096), True, 'import numpy as np\n'), ((8879, 8896), 'numpy.linalg.pinv', 'np.linalg.pinv', (['B'], {}), '(B)\n', (8893, 8896), True, 'import numpy as np\n'), ((10209, 10295), 'numpy.hstack', 'np.hstack', (['(self.samples[self.read_head - 1:], self.samples[0:self.read_head - 1])'], {}), '((self.samples[self.read_head - 1:], self.samples[0:self.read_head -\n 1]))\n', (10218, 10295), True, 'import numpy as np\n'), ((6581, 6613), 'numpy.vstack', 'np.vstack', (['(self.samples, point)'], {}), '((self.samples, point))\n', (6590, 6613), True, 'import numpy as np\n'), ((7002, 7018), 'numpy.array', 'np.array', (['[time]'], {}), '([time])\n', (7010, 7018), True, 'import numpy as np\n'), ((7050, 7067), 'numpy.array', 'np.array', (['[value]'], {}), '([value])\n', (7058, 7067), True, 'import numpy as np\n'), ((7113, 7159), 'numpy.arange', 'np.arange', (['self.last_time', 'time', 'self.stepsize'], {}), '(self.last_time, time, self.stepsize)\n', (7122, 7159), True, 'import numpy as np\n'), ((7191, 7267), 'numpy.interp', 'np.interp', (['self.time_steps', '[self.last_time, time]', '[self.last_value, value]'], {}), '(self.time_steps, [self.last_time, time], [self.last_value, value])\n', (7200, 7267), True, 'import numpy as np\n'), ((10011, 10027), 'numpy.ones', 'np.ones', (['(size,)'], {}), '((size,))\n', (10018, 10027), True, 'import numpy as np\n'), ((4535, 4573), 'numpy.vstack', 'np.vstack', (['(self.observations, sample)'], {}), '((self.observations, sample))\n', (4544, 4573), True, 'import numpy as np\n'), ((7829, 7851), 'numpy.ravel', 'np.ravel', (['sgolay[i, :]'], {}), '(sgolay[i, :])\n', (7837, 7851), True, 'import numpy as np\n'), ((5084, 5107), 'numpy.dot', 'np.dot', (['diff', 'self.icov'], {}), '(diff, self.icov)\n', (5090, 5107), True, 'import numpy as np\n')]
|