code stringlengths 22 1.05M | apis listlengths 1 3.31k | extract_api stringlengths 75 3.25M |
|---|---|---|
from concurrent.futures.thread import ThreadPoolExecutor
from io import BytesIO
import os
import cv2
import numpy as np
import requests
def get_img(url):
try:
headers = {"User-Agent": "Mozilla/5.0 (Macintosh; "
"Intel Mac OS X 10_11_6) "
"AppleWebKit/537.36 (KHTML, like Gecko) "
"Chrome/61.0.3163.100 Safari/537.36"}
response = requests.get(url, headers=headers, timeout=2)
if response.status_code == requests.codes.ok:
return response
else:
return None
except Exception:
return None
def download_images_from_file(label):
with open("links_" + label + ".txt") as file:
URLs = file.readlines()
URLs = [url.rstrip() for url in URLs]
next_img_num = 0
num_threads = min(len(URLs), 30)
for i in range(0, len(URLs), 50):
part = URLs[i:i + 50]
with ThreadPoolExecutor(num_threads) as executor:
results = [result for result
in executor.map(get_img, part)
if result is not None]
for result in results:
filename = str(next_img_num) + ".png"
filepath = os.path.join("dataset_2", label, filename)
img_bytes = BytesIO(result.content)
img = cv2.imdecode(np.frombuffer(img_bytes.read(), np.uint8),
flags=1)
if img is None:
continue
height, width = img.shape[:2]
if height < width:
new_height = 224
new_width = int((new_height / height) * width)
else:
new_width = 224
new_height = int((new_width / width) * height)
img = cv2.resize(img,
dsize=(new_width, new_height),
interpolation=cv2.INTER_AREA)
cv2.imwrite(filepath, img)
next_img_num += 1
def create_csv(labels):
labels.sort()
text = "file label\n"
for label_num, label in enumerate(labels):
label_dir = os.path.join("dataset_2", label)
filenames = os.listdir(label_dir)
filenames = [os.path.join(label_dir, filename)
for filename in filenames]
for filename in filenames:
# save numerical values for labels
text += filename + " " + str(label_num) + "\n"
with open("dataset_2.csv", "w") as file:
file.write(text)
if __name__ == '__main__':
#for file in ["cats", "cats", "cats"]:
# download_images_from_file(file)
#labels = ["cat", "dog", "owl"]
#create_csv(labels)
#for file in ["deku", "naruto", "saitama"]:
# download_images_from_file(file)
labels = ["deku", "naruto", "saitama"]
create_csv(labels)
| [
"cv2.imwrite",
"os.listdir",
"os.path.join",
"io.BytesIO",
"requests.get",
"cv2.resize",
"concurrent.futures.thread.ThreadPoolExecutor"
] | [((452, 497), 'requests.get', 'requests.get', (['url'], {'headers': 'headers', 'timeout': '(2)'}), '(url, headers=headers, timeout=2)\n', (464, 497), False, 'import requests\n'), ((2162, 2194), 'os.path.join', 'os.path.join', (['"""dataset_2"""', 'label'], {}), "('dataset_2', label)\n", (2174, 2194), False, 'import os\n'), ((2215, 2236), 'os.listdir', 'os.listdir', (['label_dir'], {}), '(label_dir)\n', (2225, 2236), False, 'import os\n'), ((966, 997), 'concurrent.futures.thread.ThreadPoolExecutor', 'ThreadPoolExecutor', (['num_threads'], {}), '(num_threads)\n', (984, 997), False, 'from concurrent.futures.thread import ThreadPoolExecutor\n'), ((1257, 1299), 'os.path.join', 'os.path.join', (['"""dataset_2"""', 'label', 'filename'], {}), "('dataset_2', label, filename)\n", (1269, 1299), False, 'import os\n'), ((1324, 1347), 'io.BytesIO', 'BytesIO', (['result.content'], {}), '(result.content)\n', (1331, 1347), False, 'from io import BytesIO\n'), ((1818, 1894), 'cv2.resize', 'cv2.resize', (['img'], {'dsize': '(new_width, new_height)', 'interpolation': 'cv2.INTER_AREA'}), '(img, dsize=(new_width, new_height), interpolation=cv2.INTER_AREA)\n', (1828, 1894), False, 'import cv2\n'), ((1966, 1992), 'cv2.imwrite', 'cv2.imwrite', (['filepath', 'img'], {}), '(filepath, img)\n', (1977, 1992), False, 'import cv2\n'), ((2258, 2291), 'os.path.join', 'os.path.join', (['label_dir', 'filename'], {}), '(label_dir, filename)\n', (2270, 2291), False, 'import os\n')] |
import random
import pandas
import numpy as np
from sklearn import metrics, cross_validation
import tensorflow as tf
from tensorflow.contrib import layers
from tensorflow.contrib import learn
random.seed(42)
"""
data = pandas.read_csv('titanic_train.csv')
X = data[["Embarked"]]
y = data["Survived"]
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.2, random_state=42)
embarked_classes = X_train["Embarked"].unique()
n_classes = len(embarked_classes) + 1
print('Embarked has next classes: ', embarked_classes)
"""
X_train = ["s", "a", "s", "d"]
cat_processor = learn.preprocessing.CategoricalProcessor()
X_train = np.array(list(cat_processor.fit_transform(X_train)))
t = X_train[0][0]
result = cat_processor.vocabularies_[0].reverse(t)
| [
"tensorflow.contrib.learn.preprocessing.CategoricalProcessor",
"random.seed"
] | [((194, 209), 'random.seed', 'random.seed', (['(42)'], {}), '(42)\n', (205, 209), False, 'import random\n'), ((603, 645), 'tensorflow.contrib.learn.preprocessing.CategoricalProcessor', 'learn.preprocessing.CategoricalProcessor', ([], {}), '()\n', (643, 645), False, 'from tensorflow.contrib import learn\n')] |
from web3 import Web3
from hexbytes import HexBytes
from .contract import Contract
from . import mk_contract_address, encode_hex
def buildContexts(ops, api, contracts, txhash):
contract_stack = []
tx = api.getTransaction(txhash)
to = tx['to']
blnum = int(tx['blockNumber'])
if to == '0x0':
to = None
create = to is None
# contract deployment
# need to generate the contract address in order to fetch the bytecode
if to is None:
baddr = mk_contract_address(tx['from'], tx['nonce'])
to = '0x%s' % encode_hex(baddr).decode()
cache = {}
for addr in getAddresses(ops, to):
if addr in cache:
c = cache[addr]
if create and addr == to:
if addr + '_created' in c:
c = cache[addr + '_created']
else:
# if it's cached, then the contract is already created so we need
# to return the Contract instance w/ create = False
newc = object.__new__(Contract)
newc.__dict__ = c.__dict__.copy()
newc.create = False
c = newc
cache[addr + '_created'] = c
else:
acc = api.getAccountInfo(addr, blnum)
c = findContractForBytecode(contracts, acc['code'])
cache[addr] = c
if not c:
print("Couldn't find contract for address {}".format(addr))
if c and create and addr == to:
c.create = True
contract_stack.append(Context(addr, c))
# contractTexts are not loaded by default, the following will
# load the contractTexts and populate the sourceCache for the contract
# corresponding to each op in this tx, greatly improving the response
# time when quickly moving through the opviewer ui
i = 0
while i < len(ops):
pc = ops[i]['pc']
contract_stack[i].getSourceCode(pc)
i += 1
return contract_stack
def getAddresses(ops, original_contract):
""" determine the address of the sourceCode for each operation
Returns an array of addresses, 1 for each op in ops
"""
addresses = []
cur_depth = None
prev_depth = None
prev_op = None
cur_address = original_contract
place_holders = [] # stores the index of where CREATE op addr should go in the addr_stack
depth_to_addr = {} # mapping to track depth to an addr so we can easily push the current addr on the stack when we return from a call
step = 0
for o in ops:
if not 'depth' in o.keys():
# We're done here
break
# Address tracking
cur_depth = o['depth']
# depth may not always start at 1. ganache-cli starts at 0
# this will set the prev_depth from the first op
if not prev_op:
prev_depth = cur_depth
depth_to_addr[cur_depth] = cur_address
if cur_depth > prev_depth:
# Made it into a call-variant
# All call-lookalikes are 'gas,address,value' on stack,
# so address is second item of prev line
#
# There's one exception, though; CREATE
# With a CREATE, we don't know the address until after the RETURN
# so we push a placeholder and update on the Return
if prev_op['op'] == 0xf0:
cur_address = None
place_holders.append([len(addresses)])
else:
cur_address = prev_op['stack'][-2]
depth_to_addr[cur_depth] = cur_address
if cur_depth < prev_depth:
# RETURN op. we now know the prev_depth address, so add to context
if cur_address is None and prev_op['op'] == 0xf3:
prev_address = o['stack'][-1]
for i in place_holders.pop():
addresses[i] = prev_address
# Returned from a call
cur_address = depth_to_addr[cur_depth]
if not cur_address:
place_holders[-1].append(len(addresses))
addresses.append(cur_address)
prev_op = o
prev_depth = cur_depth
# handle debug_traceTransaction output
def fixAddr(a):
if a and len(a) > 40:
if (a.startswith('0x') and len(a) == 42):
return a
else:
return "0x%s" % a[24:]
addresses = [fixAddr(a) for a in addresses]
return addresses
def findContractForBytecode(contracts, bytecode):
if type(bytecode) is HexBytes:
bytecode = Web3.toHex(bytecode)
if bytecode.startswith('0x'):
bytecode = bytecode[2:]
for c in contracts:
# ignore last 34 bytes which is just metadata
if c.bin and c.bin[:-68] == bytecode[:-68] or c.binRuntime and c.binRuntime[:-68] == bytecode[:-68]:
return c
return None
class Context(object):
def __init__(self, address, contract):
self.address = address
self.contract = contract
def getSourceCode(self, pc):
if self.contract:
return self.contract.getSourceCode(pc)
return "Missing Contract", (0, 0)
@property
def name(self):
return self.contract.name if self.contract else "?"
| [
"web3.Web3.toHex"
] | [((4606, 4626), 'web3.Web3.toHex', 'Web3.toHex', (['bytecode'], {}), '(bytecode)\n', (4616, 4626), False, 'from web3 import Web3\n')] |
from django.contrib import admin
from .models import Contact
class ContactAdmin(admin.ModelAdmin):
list_display = (
'created_on',
'user',
'name',
'email_address',
'subject',
'message',
)
ordering = ('created_on',)
actions = ['mark_as_answered']
def mark_as_answered(self, request, queryset):
queryset.update(answered=True)
admin.site.register(Contact, ContactAdmin) | [
"django.contrib.admin.site.register"
] | [((406, 448), 'django.contrib.admin.site.register', 'admin.site.register', (['Contact', 'ContactAdmin'], {}), '(Contact, ContactAdmin)\n', (425, 448), False, 'from django.contrib import admin\n')] |
import os
import math
import xls_cli.ansi as ansi
from xls_cli.grid import Grid
from getkey import getkey, keys
class Frame:
width, height = 0, 0
printable_window = "\x1B[2J"
title = "unititled"
grid = None
def __init__(self, title):
rows, columns = os.popen('stty size', 'r').read().split()
self.title = title
self.height = int(rows)
self.width = int(columns)
def render(self):
self.printable_window += self.draw_title_bar()
self.printable_window += self.draw_grid()
print(self.printable_window)
def loop(self):
while 1:
key = getkey()
if key == keys.UP:
self.grid.move_up()
self.refresh()
if key == keys.DOWN:
self.grid.move_down()
self.refresh()
if key == keys.RIGHT:
self.grid.move_right()
self.refresh()
if key == keys.LEFT:
self.grid.move_left()
self.refresh()
elif key == 'q':
quit()
def refresh(self):
self.printable_window = "\x1B[2J"
self.render()
def draw_title_bar(self):
title = "%s - %s" %("xls-cli", self.title)
return ansi.bg(title.center(self.width, " "), 28)
def draw_grid(self):
grid_to_string = "\n" + "-" * self.width + "\n"
for j in range(0, (len(self.grid.subgrid))):
row = []
for i in range(0, (len(self.grid.subgrid[0]) )):
text = "{:<20}".format(" " + str(self.grid.subgrid[j][i]))
if (j == self.grid.pos["y"] and i == self.grid.pos["x"]):
text = ansi.bg(text, 8)
row.append(text)
line_separator = "-" * self.width
grid_to_string += "%s\n%s\n" %("|".join(row), line_separator)
#grid_to_string += max(0, (self.grid.sheet.nrows - self.grid.max_rows)) * "\n"
return grid_to_string
| [
"getkey.getkey",
"os.popen",
"xls_cli.ansi.bg"
] | [((652, 660), 'getkey.getkey', 'getkey', ([], {}), '()\n', (658, 660), False, 'from getkey import getkey, keys\n'), ((1760, 1776), 'xls_cli.ansi.bg', 'ansi.bg', (['text', '(8)'], {}), '(text, 8)\n', (1767, 1776), True, 'import xls_cli.ansi as ansi\n'), ((283, 309), 'os.popen', 'os.popen', (['"""stty size"""', '"""r"""'], {}), "('stty size', 'r')\n", (291, 309), False, 'import os\n')] |
import unittest
import json
from datetime import datetime,timedelta
from app import db, app, LOGGER
from sqlalchemy import event
from sqlalchemy.engine import Engine
from sqlite3 import Connection as SQLite3Connection
from app.organisation.models import Organisation
from app.users.models import AppUser, UserCategory, Country
from app.events.models import Event
from app.events.models import EventType
from app.applicationModel.models import ApplicationForm
@event.listens_for(Engine, "connect")
def set_sqlite_pragma(dbapi_connection, connection_record):
cursor = dbapi_connection.cursor()
cursor.execute("PRAGMA foreign_keys=ON")
cursor.close()
class ApiTestCase(unittest.TestCase):
def __init__(self, *args, **kwargs):
super(ApiTestCase, self).__init__(*args, **kwargs)
self.test_users = []
def add_user(self,
email='<EMAIL>',
firstname='User',
lastname='Lastname',
user_title='Mrs',
password='<PASSWORD>',
organisation_id=1,
is_admin=False,
post_create_fn=lambda x: None):
user = AppUser(email,
firstname,
lastname,
user_title,
password,
organisation_id,
is_admin)
user.verify()
post_create_fn(user)
db.session.add(user)
db.session.commit()
self.test_users.append(user)
return user
def setUp(self):
app.config['TESTING'] = True
app.config['DEBUG'] = True
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite://'
self.app = app.test_client()
db.reflect()
db.drop_all()
db.create_all()
LOGGER.setLevel('ERROR')
# Add dummy metadata
self.user_category = UserCategory('Postdoc')
db.session.add(self.user_category)
self.country = Country('South Africa')
db.session.add(self.country)
# Add a dummy organisation
self.add_organisation(domain='org')
db.session.flush()
def add_organisation(self, name='My Org', system_name='Baobab', small_logo='org.png',
large_logo='org_big.png', domain='com', url='www.org.com',
email_from='<EMAIL>', system_url='baobab.deeplearningindaba.com',
privacy_policy='PrivacyPolicy.pdf'):
org = Organisation(name, system_name, small_logo, large_logo, domain, url, email_from, system_url, privacy_policy)
db.session.add(org)
db.session.commit()
return org
def add_event(self,
name ='Test Event',
description = 'Event Description',
start_date = datetime.now() + timedelta(days=30),
end_date = datetime.now() + timedelta(days=60),
key = 'INDABA2025',
organisation_id = 1,
email_from = '<EMAIL>',
url = 'indaba.deeplearning',
application_open = datetime.now(),
application_close = datetime.now() + timedelta(days=10),
review_open = datetime.now() ,
review_close = datetime.now() + timedelta(days=15),
selection_open = datetime.now(),
selection_close = datetime.now() + timedelta(days=15),
offer_open = datetime.now(),
offer_close = datetime.now(),
registration_open = datetime.now(),
registration_close = datetime.now() + timedelta(days=15),
event_type = EventType.EVENT):
event = Event(name, description, start_date, end_date, key, organisation_id, email_from, url,
application_open, application_close, review_open, review_close, selection_open,
selection_close, offer_open, offer_close, registration_open, registration_close, event_type)
db.session.add(event)
db.session.commit()
return event
def get_auth_header_for(self, email, password='<PASSWORD>'):
body = {
'email': email,
'password': password
}
response = self.app.post('api/v1/authenticate', data=body)
data = json.loads(response.data)
header = {'Authorization': data['token']}
return header
def add_to_db(self, obj):
db.session.add(obj)
db.session.commit()
def tearDown(self):
db.session.remove()
db.reflect()
db.drop_all()
def create_application_form(self,
event_id = 1,
is_open = True,
nominations = False):
application_form = ApplicationForm(event_id, is_open, nominations)
db.session.add(application_form)
db.session.commit()
return application_form | [
"app.db.session.commit",
"app.users.models.AppUser",
"app.users.models.UserCategory",
"app.db.session.flush",
"app.db.session.add",
"app.db.reflect",
"app.db.create_all",
"datetime.timedelta",
"app.app.test_client",
"json.loads",
"app.events.models.Event",
"app.users.models.Country",
"app.db... | [((463, 499), 'sqlalchemy.event.listens_for', 'event.listens_for', (['Engine', '"""connect"""'], {}), "(Engine, 'connect')\n", (480, 499), False, 'from sqlalchemy import event\n'), ((1180, 1268), 'app.users.models.AppUser', 'AppUser', (['email', 'firstname', 'lastname', 'user_title', 'password', 'organisation_id', 'is_admin'], {}), '(email, firstname, lastname, user_title, password, organisation_id,\n is_admin)\n', (1187, 1268), False, 'from app.users.models import AppUser, UserCategory, Country\n'), ((1428, 1448), 'app.db.session.add', 'db.session.add', (['user'], {}), '(user)\n', (1442, 1448), False, 'from app import db, app, LOGGER\n'), ((1457, 1476), 'app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (1474, 1476), False, 'from app import db, app, LOGGER\n'), ((1707, 1724), 'app.app.test_client', 'app.test_client', ([], {}), '()\n', (1722, 1724), False, 'from app import db, app, LOGGER\n'), ((1733, 1745), 'app.db.reflect', 'db.reflect', ([], {}), '()\n', (1743, 1745), False, 'from app import db, app, LOGGER\n'), ((1754, 1767), 'app.db.drop_all', 'db.drop_all', ([], {}), '()\n', (1765, 1767), False, 'from app import db, app, LOGGER\n'), ((1776, 1791), 'app.db.create_all', 'db.create_all', ([], {}), '()\n', (1789, 1791), False, 'from app import db, app, LOGGER\n'), ((1800, 1824), 'app.LOGGER.setLevel', 'LOGGER.setLevel', (['"""ERROR"""'], {}), "('ERROR')\n", (1815, 1824), False, 'from app import db, app, LOGGER\n'), ((1884, 1907), 'app.users.models.UserCategory', 'UserCategory', (['"""Postdoc"""'], {}), "('Postdoc')\n", (1896, 1907), False, 'from app.users.models import AppUser, UserCategory, Country\n'), ((1916, 1950), 'app.db.session.add', 'db.session.add', (['self.user_category'], {}), '(self.user_category)\n', (1930, 1950), False, 'from app import db, app, LOGGER\n'), ((1974, 1997), 'app.users.models.Country', 'Country', (['"""South Africa"""'], {}), "('South Africa')\n", (1981, 1997), False, 'from app.users.models import AppUser, UserCategory, Country\n'), ((2006, 2034), 'app.db.session.add', 'db.session.add', (['self.country'], {}), '(self.country)\n', (2020, 2034), False, 'from app import db, app, LOGGER\n'), ((2123, 2141), 'app.db.session.flush', 'db.session.flush', ([], {}), '()\n', (2139, 2141), False, 'from app import db, app, LOGGER\n'), ((2518, 2630), 'app.organisation.models.Organisation', 'Organisation', (['name', 'system_name', 'small_logo', 'large_logo', 'domain', 'url', 'email_from', 'system_url', 'privacy_policy'], {}), '(name, system_name, small_logo, large_logo, domain, url,\n email_from, system_url, privacy_policy)\n', (2530, 2630), False, 'from app.organisation.models import Organisation\n'), ((2635, 2654), 'app.db.session.add', 'db.session.add', (['org'], {}), '(org)\n', (2649, 2654), False, 'from app import db, app, LOGGER\n'), ((2663, 2682), 'app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (2680, 2682), False, 'from app import db, app, LOGGER\n'), ((3152, 3166), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (3164, 3166), False, 'from datetime import datetime, timedelta\n'), ((3273, 3287), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (3285, 3287), False, 'from datetime import datetime, timedelta\n'), ((3393, 3407), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (3405, 3407), False, 'from datetime import datetime, timedelta\n'), ((3511, 3525), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (3523, 3525), False, 'from datetime import datetime, timedelta\n'), ((3558, 3572), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (3570, 3572), False, 'from datetime import datetime, timedelta\n'), ((3611, 3625), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (3623, 3625), False, 'from datetime import datetime, timedelta\n'), ((3767, 4037), 'app.events.models.Event', 'Event', (['name', 'description', 'start_date', 'end_date', 'key', 'organisation_id', 'email_from', 'url', 'application_open', 'application_close', 'review_open', 'review_close', 'selection_open', 'selection_close', 'offer_open', 'offer_close', 'registration_open', 'registration_close', 'event_type'], {}), '(name, description, start_date, end_date, key, organisation_id,\n email_from, url, application_open, application_close, review_open,\n review_close, selection_open, selection_close, offer_open, offer_close,\n registration_open, registration_close, event_type)\n', (3772, 4037), False, 'from app.events.models import Event\n'), ((4085, 4106), 'app.db.session.add', 'db.session.add', (['event'], {}), '(event)\n', (4099, 4106), False, 'from app import db, app, LOGGER\n'), ((4115, 4134), 'app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (4132, 4134), False, 'from app import db, app, LOGGER\n'), ((4392, 4417), 'json.loads', 'json.loads', (['response.data'], {}), '(response.data)\n', (4402, 4417), False, 'import json\n'), ((4529, 4548), 'app.db.session.add', 'db.session.add', (['obj'], {}), '(obj)\n', (4543, 4548), False, 'from app import db, app, LOGGER\n'), ((4557, 4576), 'app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (4574, 4576), False, 'from app import db, app, LOGGER\n'), ((4610, 4629), 'app.db.session.remove', 'db.session.remove', ([], {}), '()\n', (4627, 4629), False, 'from app import db, app, LOGGER\n'), ((4638, 4650), 'app.db.reflect', 'db.reflect', ([], {}), '()\n', (4648, 4650), False, 'from app import db, app, LOGGER\n'), ((4659, 4672), 'app.db.drop_all', 'db.drop_all', ([], {}), '()\n', (4670, 4672), False, 'from app import db, app, LOGGER\n'), ((4904, 4951), 'app.applicationModel.models.ApplicationForm', 'ApplicationForm', (['event_id', 'is_open', 'nominations'], {}), '(event_id, is_open, nominations)\n', (4919, 4951), False, 'from app.applicationModel.models import ApplicationForm\n'), ((4960, 4992), 'app.db.session.add', 'db.session.add', (['application_form'], {}), '(application_form)\n', (4974, 4992), False, 'from app import db, app, LOGGER\n'), ((5001, 5020), 'app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (5018, 5020), False, 'from app import db, app, LOGGER\n'), ((2849, 2863), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (2861, 2863), False, 'from datetime import datetime, timedelta\n'), ((2866, 2884), 'datetime.timedelta', 'timedelta', ([], {'days': '(30)'}), '(days=30)\n', (2875, 2884), False, 'from datetime import datetime, timedelta\n'), ((2915, 2929), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (2927, 2929), False, 'from datetime import datetime, timedelta\n'), ((2932, 2950), 'datetime.timedelta', 'timedelta', ([], {'days': '(60)'}), '(days=60)\n', (2941, 2950), False, 'from datetime import datetime, timedelta\n'), ((3205, 3219), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (3217, 3219), False, 'from datetime import datetime, timedelta\n'), ((3222, 3240), 'datetime.timedelta', 'timedelta', ([], {'days': '(10)'}), '(days=10)\n', (3231, 3240), False, 'from datetime import datetime, timedelta\n'), ((3322, 3336), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (3334, 3336), False, 'from datetime import datetime, timedelta\n'), ((3339, 3357), 'datetime.timedelta', 'timedelta', ([], {'days': '(15)'}), '(days=15)\n', (3348, 3357), False, 'from datetime import datetime, timedelta\n'), ((3444, 3458), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (3456, 3458), False, 'from datetime import datetime, timedelta\n'), ((3461, 3479), 'datetime.timedelta', 'timedelta', ([], {'days': '(15)'}), '(days=15)\n', (3470, 3479), False, 'from datetime import datetime, timedelta\n'), ((3665, 3679), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (3677, 3679), False, 'from datetime import datetime, timedelta\n'), ((3682, 3700), 'datetime.timedelta', 'timedelta', ([], {'days': '(15)'}), '(days=15)\n', (3691, 3700), False, 'from datetime import datetime, timedelta\n')] |
import numpy as np
import matplotlib.pyplot as plt
class View(object):
def __init__(self, width, height, title, show_axis=True, packed=True, text_size=2.8, show_text=True, col_size=6, *args, **kwargs):
self.width = width
self.height = height
self.title = title
self.show_axis = show_axis
self.packed = packed
self.text_size = text_size
self.show_text = show_text
self.col_size = col_size
def prepare(self, *args, **kwargs):
raise NotImplementedError()
def save(self, filename):
raise NotImplementedError()
def show(self, *args, **kwrags):
raise NotImplementedError()
class MatplotView(View):
def __init__(self, width, height, title, show_axis=True, packed=True, text_size=2.8, show_text=True, col_size=6, *args, **kwargs):
super(MatplotView, self).__init__(width, height, title, show_axis, packed, text_size, show_text, col_size, *args, **kwargs)
self._fig = None
def __del__(self):
self._close_fig()
def _close_fig(self):
if self._fig:
plt.close(self._fig)
def prepare(self, *args, **kwargs):
self._close_fig()
self._fig = plt.figure(figsize=(self.width, self.height))
plt.title(self.title)
plt.axis('off')
# plt.axis('on' if self.show_axis else 'off')
plt.rc('font', **{'size': self.text_size})
def save(self, filename, transparent=False, bbox_inches='tight', dpi=400):
self._fig.savefig(filename, transparent=transparent, dpi=dpi, bbox_inches=bbox_inches)
def show(self, *args, **kwrags):
raise NotImplementedError()
###################################
#visualize map
#def view_map(self, what = 'codebook', which_dim = 'all', pack= 'Yes', text_size = 2.8,save='No', save_dir = 'empty',grid='No',text='Yes',cmap='None',COL_SiZe=6):
#
# mapsize = getattr(self, 'mapsize')
# if np.min(mapsize) >1:
# if pack == 'No':
# self.view_2d(self, text_size, which_dim = which_dim, what = what)
# else:
# #print 'hi'
# self.view_2d_Pack(self, text_size, which_dim = which_dim,what = what,save = save, save_dir = save_dir, grid=grid,text=text,CMAP=cmap,col_sz=COL_SiZe)
#
# elif np.min(mapsize) == 1:
# self.view_1d(self, text_size, which_dim = which_dim, what = what)
#
#
| [
"matplotlib.pyplot.axis",
"matplotlib.pyplot.close",
"matplotlib.pyplot.figure",
"matplotlib.pyplot.title",
"matplotlib.pyplot.rc"
] | [((1216, 1261), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(self.width, self.height)'}), '(figsize=(self.width, self.height))\n', (1226, 1261), True, 'import matplotlib.pyplot as plt\n'), ((1270, 1291), 'matplotlib.pyplot.title', 'plt.title', (['self.title'], {}), '(self.title)\n', (1279, 1291), True, 'import matplotlib.pyplot as plt\n'), ((1300, 1315), 'matplotlib.pyplot.axis', 'plt.axis', (['"""off"""'], {}), "('off')\n", (1308, 1315), True, 'import matplotlib.pyplot as plt\n'), ((1378, 1420), 'matplotlib.pyplot.rc', 'plt.rc', (['"""font"""'], {}), "('font', **{'size': self.text_size})\n", (1384, 1420), True, 'import matplotlib.pyplot as plt\n'), ((1108, 1128), 'matplotlib.pyplot.close', 'plt.close', (['self._fig'], {}), '(self._fig)\n', (1117, 1128), True, 'import matplotlib.pyplot as plt\n')] |
#!/usr/bin/env python3
import getpass
import json
import pprint
import requests
import sys
# The credentials to be used
try:
user = input('Login name: ')
# If it's a tty, use the version that doesn't echo the password.
if sys.stdin.isatty():
password = getpass.getpass('Password: ')
else:
sys.stdout.write('Password: ')
sys.stdout.flush()
password = sys.stdin.readline().rstrip()
except:
print('Quitting')
sys.exit(1)
login = {
'username': user,
'password': password,
}
# Send credentials to login url to retrieve token. Raise
# an error, if the return code indicates a problem.
# Please use the URL of the system you'd like to access the API
# in the example below.
resp = requests.post('https://app.dimensions.ai/api/auth.json', json=login)
resp.raise_for_status()
# Create http header using the generated token.
headers = {
'Authorization': "JWT " + resp.json()['token']
}
# Execute DSL query.
resp = requests.post(
'https://app.dimensions.ai/api/dsl.json',
data='search publications in title_abstract_only for "SBML" return publications[basics+extras]',
headers=headers)
# Write to file.
with open('example-publications.json', 'w') as f:
json.dump(resp.json(), f)
| [
"requests.post",
"getpass.getpass",
"sys.stdin.readline",
"sys.exit",
"sys.stdin.isatty",
"sys.stdout.flush",
"sys.stdout.write"
] | [((753, 821), 'requests.post', 'requests.post', (['"""https://app.dimensions.ai/api/auth.json"""'], {'json': 'login'}), "('https://app.dimensions.ai/api/auth.json', json=login)\n", (766, 821), False, 'import requests\n'), ((993, 1172), 'requests.post', 'requests.post', (['"""https://app.dimensions.ai/api/dsl.json"""'], {'data': '"""search publications in title_abstract_only for "SBML" return publications[basics+extras]"""', 'headers': 'headers'}), '(\'https://app.dimensions.ai/api/dsl.json\', data=\n \'search publications in title_abstract_only for "SBML" return publications[basics+extras]\'\n , headers=headers)\n', (1006, 1172), False, 'import requests\n'), ((239, 257), 'sys.stdin.isatty', 'sys.stdin.isatty', ([], {}), '()\n', (255, 257), False, 'import sys\n'), ((278, 307), 'getpass.getpass', 'getpass.getpass', (['"""Password: """'], {}), "('Password: ')\n", (293, 307), False, 'import getpass\n'), ((326, 356), 'sys.stdout.write', 'sys.stdout.write', (['"""Password: """'], {}), "('Password: ')\n", (342, 356), False, 'import sys\n'), ((365, 383), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (381, 383), False, 'import sys\n'), ((467, 478), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (475, 478), False, 'import sys\n'), ((403, 423), 'sys.stdin.readline', 'sys.stdin.readline', ([], {}), '()\n', (421, 423), False, 'import sys\n')] |
import cv2
import mediapipe as mp
import time
'''
Hand Landmarks:
WRIST = 0
THUMB_CMC = 1
THUMB_MCP = 2
THUMB_IP = 3
THUMB_TIP = 4
INDEX_FINGER_MCP = 5
INDEX_FINGER_PIP = 6
INDEX_FINGER_DIP = 7
INDEX_FINGER_TIP = 8
MIDDLE_FINGER_MCP = 9
MIDDLE_FINGER_PIP = 10
MIDDLE_FINGER_DIP = 11
MIDDLE_FINGER_TIP = 12
RING_FINGER_MCP = 13
RING_FINGER_PIP = 14
RING_FINGER_DIP = 15
RING_FINGER_TIP = 16
PINKY_MCP = 17
PINKY_PIP = 18
PINKY_DIP = 19
PINKY_TIP = 20
'''
class handDetector():
def __init__(self,mode=False,maxHands=2,detectionConf=0.5,trackConf=0.5):
self.mode = mode
self.maxHands = maxHands
self.detectionConf = detectionConf
self.trackConf = trackConf
self.mpHands = mp.solutions.hands
self.hands = self.mpHands.Hands(self.mode, self.maxHands, self.detectionConf, self.trackConf)
self.mpDraw = mp.solutions.drawing_utils
def find_hands(self,img,draw = True):
imgRGB = cv2.cvtColor(img,cv2.COLOR_BGR2RGB)
self.results = self.hands.process(imgRGB)
if self.results.multi_hand_landmarks:
for handLms in self.results.multi_hand_landmarks:
if draw:
self.mpDraw.draw_landmarks(img, handLms, self.mpHands.HAND_CONNECTIONS)
return img
def find_position(self,img, handnum=0, draw=True):
lmList = []
if self.results.multi_hand_landmarks:
handLms = self.results.multi_hand_landmarks[handnum]
for idx, lm in enumerate(handLms.landmark):
#print(idx,lm)
# rudimentary landmark tracker
h,w,c = img.shape
cx,cy = int(lm.x*w),int(lm.y*h)
lmList.append([idx,cx,cy])
if draw:
cv2.circle(img,(cx,cy),6,(255,0,255),cv2.FILLED)
return lmList
def main():
pTime = 0
cTime = 0
cap = cv2.VideoCapture(0)
detector = handDetector()
while True:
success, img = cap.read()
img = detector.find_hands(img)
mylist = detector.find_position(img)
if len(mylist) != 0:
print(mylist[3])
cTime = time.time()
fps = 1/(cTime-pTime)
pTime = cTime
cv2.putText(img,str(int(fps)),(10,70),cv2.FONT_HERSHEY_PLAIN,3,(255,255,0),3)
cv2.imshow("Image",img)
cv2.waitKey(1)
if __name__ == "__main__":
main() | [
"cv2.imshow",
"cv2.circle",
"cv2.VideoCapture",
"cv2.cvtColor",
"time.time",
"cv2.waitKey"
] | [((2002, 2021), 'cv2.VideoCapture', 'cv2.VideoCapture', (['(0)'], {}), '(0)\n', (2018, 2021), False, 'import cv2\n'), ((988, 1024), 'cv2.cvtColor', 'cv2.cvtColor', (['img', 'cv2.COLOR_BGR2RGB'], {}), '(img, cv2.COLOR_BGR2RGB)\n', (1000, 1024), False, 'import cv2\n'), ((2285, 2296), 'time.time', 'time.time', ([], {}), '()\n', (2294, 2296), False, 'import time\n'), ((2445, 2469), 'cv2.imshow', 'cv2.imshow', (['"""Image"""', 'img'], {}), "('Image', img)\n", (2455, 2469), False, 'import cv2\n'), ((2478, 2492), 'cv2.waitKey', 'cv2.waitKey', (['(1)'], {}), '(1)\n', (2489, 2492), False, 'import cv2\n'), ((1874, 1929), 'cv2.circle', 'cv2.circle', (['img', '(cx, cy)', '(6)', '(255, 0, 255)', 'cv2.FILLED'], {}), '(img, (cx, cy), 6, (255, 0, 255), cv2.FILLED)\n', (1884, 1929), False, 'import cv2\n')] |
# Generated by Django 2.0 on 2017-12-27 03:05
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Search',
fields=[
('search_id', models.AutoField(primary_key=True, serialize=False)),
('text', models.TextField()),
('searched_date', models.DateTimeField(auto_now_add=True)),
],
),
]
| [
"django.db.models.DateTimeField",
"django.db.models.AutoField",
"django.db.models.TextField"
] | [((307, 358), 'django.db.models.AutoField', 'models.AutoField', ([], {'primary_key': '(True)', 'serialize': '(False)'}), '(primary_key=True, serialize=False)\n', (323, 358), False, 'from django.db import migrations, models\n'), ((386, 404), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (402, 404), False, 'from django.db import migrations, models\n'), ((441, 480), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (461, 480), False, 'from django.db import migrations, models\n')] |
from __future__ import absolute_import, unicode_literals
from django.conf import settings
from django.core.urlresolvers import reverse
from django.test import TestCase
from django.test.utils import override_settings
from wagtail.tests.testapp.rich_text import CustomRichTextArea
from wagtail.tests.utils import WagtailTestUtils
from wagtail.wagtailadmin.rich_text import HalloRichTextArea, get_rich_text_editor_widget
from wagtail.wagtailcore.models import Page
class TestGetRichTextEditorWidget(TestCase):
@override_settings()
def test_default(self):
# Simulate the absence of a setting
if hasattr(settings, 'WAGTAILADMIN_RICH_TEXT_EDITORS'):
del settings.WAGTAILADMIN_RICH_TEXT_EDITORS
self.assertIsInstance(get_rich_text_editor_widget(), HalloRichTextArea)
@override_settings(WAGTAILADMIN_RICH_TEXT_EDITORS={
'default': {
'WIDGET': 'wagtail.tests.testapp.rich_text.CustomRichTextArea'
},
})
def test_overridden_default_editor(self):
self.assertIsInstance(get_rich_text_editor_widget(), CustomRichTextArea)
@override_settings(WAGTAILADMIN_RICH_TEXT_EDITORS={
'custom': {
'WIDGET': 'wagtail.tests.testapp.rich_text.CustomRichTextArea'
},
})
def test_custom_editor_without_default(self):
self.assertIsInstance(get_rich_text_editor_widget('custom'), CustomRichTextArea)
@override_settings(WAGTAILADMIN_RICH_TEXT_EDITORS={
'default': {
'WIDGET': 'wagtail.wagtailadmin.rich_text.HalloRichTextArea'
},
'custom': {
'WIDGET': 'wagtail.tests.testapp.rich_text.CustomRichTextArea'
},
})
def test_custom_editor_with_default(self):
self.assertIsInstance(get_rich_text_editor_widget(), HalloRichTextArea)
self.assertIsInstance(get_rich_text_editor_widget('custom'), CustomRichTextArea)
@override_settings()
class TestDefaultRichText(TestCase, WagtailTestUtils):
def setUp(self):
# Find root page
self.root_page = Page.objects.get(id=2)
self.login()
# Simulate the absence of a setting
if hasattr(settings, 'WAGTAILADMIN_RICH_TEXT_EDITORS'):
del settings.WAGTAILADMIN_RICH_TEXT_EDITORS
def tearDown(self):
from wagtail.tests.testapp.models import DefaultRichBlockFieldPage
from wagtail.tests.testapp.models import DefaultRichTextFieldPage
DefaultRichTextFieldPage.get_edit_handler()._form_class = None
block_page_edit_handler = DefaultRichBlockFieldPage.get_edit_handler()
if block_page_edit_handler._form_class:
rich_text_block = block_page_edit_handler._form_class.base_fields['body'].block.child_blocks['rich_text']
if hasattr(rich_text_block, 'field'):
del rich_text_block.field
block_page_edit_handler._form_class = None
def test_default_editor_in_rich_text_field(self):
response = self.client.get(reverse(
'wagtailadmin_pages:add', args=('tests', 'defaultrichtextfieldpage', self.root_page.id)
))
# Check status code
self.assertEqual(response.status_code, 200)
# Check that hallo (default editor by now)
self.assertContains(response, 'makeHalloRichTextEditable("id_body");')
def test_default_editor_in_rich_text_block(self):
response = self.client.get(reverse(
'wagtailadmin_pages:add', args=('tests', 'defaultrichblockfieldpage', self.root_page.id)
))
# Check status code
self.assertEqual(response.status_code, 200)
# Check that hallo (default editor by now)
self.assertContains(response, 'makeHalloRichTextEditable("__PREFIX__-value");')
@override_settings(WAGTAILADMIN_RICH_TEXT_EDITORS={
'default': {
'WIDGET': 'wagtail.tests.testapp.rich_text.CustomRichTextArea'
},
})
class TestOverriddenDefaultRichText(TestCase, WagtailTestUtils):
def setUp(self):
# Find root page
self.root_page = Page.objects.get(id=2)
self.login()
def tearDown(self):
from wagtail.tests.testapp.models import DefaultRichBlockFieldPage
from wagtail.tests.testapp.models import DefaultRichTextFieldPage
DefaultRichTextFieldPage.get_edit_handler()._form_class = None
block_page_edit_handler = DefaultRichBlockFieldPage.get_edit_handler()
if block_page_edit_handler._form_class:
rich_text_block = block_page_edit_handler._form_class.base_fields['body'].block.child_blocks['rich_text']
if hasattr(rich_text_block, 'field'):
del rich_text_block.field
block_page_edit_handler._form_class = None
def test_overridden_default_editor_in_rich_text_field(self):
response = self.client.get(reverse(
'wagtailadmin_pages:add', args=('tests', 'defaultrichtextfieldpage', self.root_page.id)
))
# Check status code
self.assertEqual(response.status_code, 200)
# Check that hallo (default editor by now) was replaced with fake editor
self.assertNotContains(response, 'makeHalloRichTextEditable("id_body");')
self.assertContains(response, 'customEditorInitScript("id_body");')
def test_overridden_default_editor_in_rich_text_block(self):
response = self.client.get(reverse(
'wagtailadmin_pages:add', args=('tests', 'defaultrichblockfieldpage', self.root_page.id)
))
# Check status code
self.assertEqual(response.status_code, 200)
# Check that hallo (default editor by now) was replaced with fake editor
self.assertNotContains(response, 'makeHalloRichTextEditable("__PREFIX__-value");')
self.assertContains(response, 'customEditorInitScript("__PREFIX__-value");')
@override_settings(WAGTAILADMIN_RICH_TEXT_EDITORS={
'default': {
'WIDGET': 'wagtail.wagtailadmin.rich_text.HalloRichTextArea'
},
'custom': {
'WIDGET': 'wagtail.tests.testapp.rich_text.CustomRichTextArea'
},
})
class TestCustomDefaultRichText(TestCase, WagtailTestUtils):
def setUp(self):
# Find root page
self.root_page = Page.objects.get(id=2)
self.login()
def tearDown(self):
from wagtail.tests.testapp.models import CustomRichBlockFieldPage
from wagtail.tests.testapp.models import CustomRichTextFieldPage
CustomRichBlockFieldPage.get_edit_handler()._form_class = None
CustomRichTextFieldPage.get_edit_handler()._form_class = None
def test_custom_editor_in_rich_text_field(self):
response = self.client.get(reverse(
'wagtailadmin_pages:add', args=('tests', 'customrichtextfieldpage', self.root_page.id)
))
# Check status code
self.assertEqual(response.status_code, 200)
# Check that hallo (default editor by now) was replaced with fake editor
self.assertNotContains(response, 'makeHalloRichTextEditable("id_body");')
self.assertContains(response, 'customEditorInitScript("id_body");')
def test_custom_editor_in_rich_text_block(self):
response = self.client.get(reverse(
'wagtailadmin_pages:add', args=('tests', 'customrichblockfieldpage', self.root_page.id)
))
# Check status code
self.assertEqual(response.status_code, 200)
# Check that hallo (default editor by now) was replaced with fake editor
self.assertNotContains(response, 'makeHalloRichTextEditable("__PREFIX__-value");')
self.assertContains(response, 'customEditorInitScript("__PREFIX__-value");')
| [
"wagtail.tests.testapp.models.DefaultRichBlockFieldPage.get_edit_handler",
"wagtail.tests.testapp.models.DefaultRichTextFieldPage.get_edit_handler",
"django.core.urlresolvers.reverse",
"wagtail.tests.testapp.models.CustomRichBlockFieldPage.get_edit_handler",
"wagtail.tests.testapp.models.CustomRichTextField... | [((1910, 1929), 'django.test.utils.override_settings', 'override_settings', ([], {}), '()\n', (1927, 1929), False, 'from django.test.utils import override_settings\n'), ((3759, 3890), 'django.test.utils.override_settings', 'override_settings', ([], {'WAGTAILADMIN_RICH_TEXT_EDITORS': "{'default': {'WIDGET': 'wagtail.tests.testapp.rich_text.CustomRichTextArea'}}"}), "(WAGTAILADMIN_RICH_TEXT_EDITORS={'default': {'WIDGET':\n 'wagtail.tests.testapp.rich_text.CustomRichTextArea'}})\n", (3776, 3890), False, 'from django.test.utils import override_settings\n'), ((5831, 6041), 'django.test.utils.override_settings', 'override_settings', ([], {'WAGTAILADMIN_RICH_TEXT_EDITORS': "{'default': {'WIDGET': 'wagtail.wagtailadmin.rich_text.HalloRichTextArea'},\n 'custom': {'WIDGET': 'wagtail.tests.testapp.rich_text.CustomRichTextArea'}}"}), "(WAGTAILADMIN_RICH_TEXT_EDITORS={'default': {'WIDGET':\n 'wagtail.wagtailadmin.rich_text.HalloRichTextArea'}, 'custom': {\n 'WIDGET': 'wagtail.tests.testapp.rich_text.CustomRichTextArea'}})\n", (5848, 6041), False, 'from django.test.utils import override_settings\n'), ((516, 535), 'django.test.utils.override_settings', 'override_settings', ([], {}), '()\n', (533, 535), False, 'from django.test.utils import override_settings\n'), ((815, 946), 'django.test.utils.override_settings', 'override_settings', ([], {'WAGTAILADMIN_RICH_TEXT_EDITORS': "{'default': {'WIDGET': 'wagtail.tests.testapp.rich_text.CustomRichTextArea'}}"}), "(WAGTAILADMIN_RICH_TEXT_EDITORS={'default': {'WIDGET':\n 'wagtail.tests.testapp.rich_text.CustomRichTextArea'}})\n", (832, 946), False, 'from django.test.utils import override_settings\n'), ((1113, 1243), 'django.test.utils.override_settings', 'override_settings', ([], {'WAGTAILADMIN_RICH_TEXT_EDITORS': "{'custom': {'WIDGET': 'wagtail.tests.testapp.rich_text.CustomRichTextArea'}}"}), "(WAGTAILADMIN_RICH_TEXT_EDITORS={'custom': {'WIDGET':\n 'wagtail.tests.testapp.rich_text.CustomRichTextArea'}})\n", (1130, 1243), False, 'from django.test.utils import override_settings\n'), ((1422, 1632), 'django.test.utils.override_settings', 'override_settings', ([], {'WAGTAILADMIN_RICH_TEXT_EDITORS': "{'default': {'WIDGET': 'wagtail.wagtailadmin.rich_text.HalloRichTextArea'},\n 'custom': {'WIDGET': 'wagtail.tests.testapp.rich_text.CustomRichTextArea'}}"}), "(WAGTAILADMIN_RICH_TEXT_EDITORS={'default': {'WIDGET':\n 'wagtail.wagtailadmin.rich_text.HalloRichTextArea'}, 'custom': {\n 'WIDGET': 'wagtail.tests.testapp.rich_text.CustomRichTextArea'}})\n", (1439, 1632), False, 'from django.test.utils import override_settings\n'), ((2057, 2079), 'wagtail.wagtailcore.models.Page.objects.get', 'Page.objects.get', ([], {'id': '(2)'}), '(id=2)\n', (2073, 2079), False, 'from wagtail.wagtailcore.models import Page\n'), ((2548, 2592), 'wagtail.tests.testapp.models.DefaultRichBlockFieldPage.get_edit_handler', 'DefaultRichBlockFieldPage.get_edit_handler', ([], {}), '()\n', (2590, 2592), False, 'from wagtail.tests.testapp.models import DefaultRichBlockFieldPage\n'), ((4045, 4067), 'wagtail.wagtailcore.models.Page.objects.get', 'Page.objects.get', ([], {'id': '(2)'}), '(id=2)\n', (4061, 4067), False, 'from wagtail.wagtailcore.models import Page\n'), ((4371, 4415), 'wagtail.tests.testapp.models.DefaultRichBlockFieldPage.get_edit_handler', 'DefaultRichBlockFieldPage.get_edit_handler', ([], {}), '()\n', (4413, 4415), False, 'from wagtail.tests.testapp.models import DefaultRichBlockFieldPage\n'), ((6205, 6227), 'wagtail.wagtailcore.models.Page.objects.get', 'Page.objects.get', ([], {'id': '(2)'}), '(id=2)\n', (6221, 6227), False, 'from wagtail.wagtailcore.models import Page\n'), ((759, 788), 'wagtail.wagtailadmin.rich_text.get_rich_text_editor_widget', 'get_rich_text_editor_widget', ([], {}), '()\n', (786, 788), False, 'from wagtail.wagtailadmin.rich_text import HalloRichTextArea, get_rich_text_editor_widget\n'), ((1056, 1085), 'wagtail.wagtailadmin.rich_text.get_rich_text_editor_widget', 'get_rich_text_editor_widget', ([], {}), '()\n', (1083, 1085), False, 'from wagtail.wagtailadmin.rich_text import HalloRichTextArea, get_rich_text_editor_widget\n'), ((1357, 1394), 'wagtail.wagtailadmin.rich_text.get_rich_text_editor_widget', 'get_rich_text_editor_widget', (['"""custom"""'], {}), "('custom')\n", (1384, 1394), False, 'from wagtail.wagtailadmin.rich_text import HalloRichTextArea, get_rich_text_editor_widget\n'), ((1768, 1797), 'wagtail.wagtailadmin.rich_text.get_rich_text_editor_widget', 'get_rich_text_editor_widget', ([], {}), '()\n', (1795, 1797), False, 'from wagtail.wagtailadmin.rich_text import HalloRichTextArea, get_rich_text_editor_widget\n'), ((1848, 1885), 'wagtail.wagtailadmin.rich_text.get_rich_text_editor_widget', 'get_rich_text_editor_widget', (['"""custom"""'], {}), "('custom')\n", (1875, 1885), False, 'from wagtail.wagtailadmin.rich_text import HalloRichTextArea, get_rich_text_editor_widget\n'), ((2450, 2493), 'wagtail.tests.testapp.models.DefaultRichTextFieldPage.get_edit_handler', 'DefaultRichTextFieldPage.get_edit_handler', ([], {}), '()\n', (2491, 2493), False, 'from wagtail.tests.testapp.models import DefaultRichTextFieldPage\n'), ((2992, 3092), 'django.core.urlresolvers.reverse', 'reverse', (['"""wagtailadmin_pages:add"""'], {'args': "('tests', 'defaultrichtextfieldpage', self.root_page.id)"}), "('wagtailadmin_pages:add', args=('tests', 'defaultrichtextfieldpage',\n self.root_page.id))\n", (2999, 3092), False, 'from django.core.urlresolvers import reverse\n'), ((3414, 3515), 'django.core.urlresolvers.reverse', 'reverse', (['"""wagtailadmin_pages:add"""'], {'args': "('tests', 'defaultrichblockfieldpage', self.root_page.id)"}), "('wagtailadmin_pages:add', args=('tests',\n 'defaultrichblockfieldpage', self.root_page.id))\n", (3421, 3515), False, 'from django.core.urlresolvers import reverse\n'), ((4273, 4316), 'wagtail.tests.testapp.models.DefaultRichTextFieldPage.get_edit_handler', 'DefaultRichTextFieldPage.get_edit_handler', ([], {}), '()\n', (4314, 4316), False, 'from wagtail.tests.testapp.models import DefaultRichTextFieldPage\n'), ((4826, 4926), 'django.core.urlresolvers.reverse', 'reverse', (['"""wagtailadmin_pages:add"""'], {'args': "('tests', 'defaultrichtextfieldpage', self.root_page.id)"}), "('wagtailadmin_pages:add', args=('tests', 'defaultrichtextfieldpage',\n self.root_page.id))\n", (4833, 4926), False, 'from django.core.urlresolvers import reverse\n'), ((5368, 5469), 'django.core.urlresolvers.reverse', 'reverse', (['"""wagtailadmin_pages:add"""'], {'args': "('tests', 'defaultrichblockfieldpage', self.root_page.id)"}), "('wagtailadmin_pages:add', args=('tests',\n 'defaultrichblockfieldpage', self.root_page.id))\n", (5375, 5469), False, 'from django.core.urlresolvers import reverse\n'), ((6431, 6474), 'wagtail.tests.testapp.models.CustomRichBlockFieldPage.get_edit_handler', 'CustomRichBlockFieldPage.get_edit_handler', ([], {}), '()\n', (6472, 6474), False, 'from wagtail.tests.testapp.models import CustomRichBlockFieldPage\n'), ((6502, 6544), 'wagtail.tests.testapp.models.CustomRichTextFieldPage.get_edit_handler', 'CustomRichTextFieldPage.get_edit_handler', ([], {}), '()\n', (6542, 6544), False, 'from wagtail.tests.testapp.models import CustomRichTextFieldPage\n'), ((6653, 6752), 'django.core.urlresolvers.reverse', 'reverse', (['"""wagtailadmin_pages:add"""'], {'args': "('tests', 'customrichtextfieldpage', self.root_page.id)"}), "('wagtailadmin_pages:add', args=('tests', 'customrichtextfieldpage',\n self.root_page.id))\n", (6660, 6752), False, 'from django.core.urlresolvers import reverse\n'), ((7182, 7282), 'django.core.urlresolvers.reverse', 'reverse', (['"""wagtailadmin_pages:add"""'], {'args': "('tests', 'customrichblockfieldpage', self.root_page.id)"}), "('wagtailadmin_pages:add', args=('tests', 'customrichblockfieldpage',\n self.root_page.id))\n", (7189, 7282), False, 'from django.core.urlresolvers import reverse\n')] |
import os
from utils import load_json
from utils import print_err
from global_settings import SCENE_BUILDER_OUTPUT_DIR
class SceneBuilderConfig(object):
def __init__(
self,
input_scene_dir,
scene_builder_root,
output_dir_name,
rigid_mesh_db,
articulated_mesh_db,
articulated_mesh_default_tf_file,
enable_vrgym=False,
enable_physics=False,
enable_gazebo=False
):
self.input_scene_dir = input_scene_dir
self.scene_builder_root = scene_builder_root
self.output_dir_name = output_dir_name
self.output_dir = os.path.normpath("{}/{}/{}".format(scene_builder_root, SCENE_BUILDER_OUTPUT_DIR, output_dir_name))
self.rigid_mesh_db = rigid_mesh_db
self.articulated_mesh_db = articulated_mesh_db
self.articulated_mesh_default_tf = self.load_tf_(articulated_mesh_db, articulated_mesh_default_tf_file)
self.enable_vrgym = enable_vrgym
self.enable_physics = enable_physics
self.enable_gazebo = enable_gazebo
if self.output_dir_name is None:
self.output_dir_name = self.input_scene_dir.split('/')[-1]
def __str__(self):
ret = ""
ret += '#' * 50 + "\n"
ret += "# Scene Builder Configuration\n"
ret += '#' * 50 + "\n"
ret += "* Rigid mesh database: {}\n".format(self.rigid_mesh_db)
ret += "* Articulated mesh database: {}\n".format(self.articulated_mesh_db)
ret += '-' * 60 + '\n'
ret += "* Input scene dir: {}\n".format(self.input_scene_dir)
ret += "* scene_builder pkg root: {}\n".format(self.scene_builder_root)
ret += "* Output scene dir name: {}\n".format(self.output_dir_name)
ret += "* Output scene dir: {}\n".format(self.output_dir)
ret += '-' * 60 + '\n'
ret += "* Enable VRGym: {}\n".format(self.enable_vrgym)
ret += "* Enable physics: {}\n".format(self.enable_physics)
ret += "* Enable Gazebo: {}".format(self.enable_gazebo)
return ret
def load_tf_(self, mesh_db, tf_file):
interactive_cads = [
it for it in os.listdir(mesh_db)
if os.path.isdir(mesh_db + '/' + it)
]
tf = load_json(tf_file)
validate_data_format = lambda x, y: x in y and "scale" in y[x] and "tf" in y[x]
try:
assert( all([validate_data_format(it, tf) for it in interactive_cads]) )
except:
for it in interactive_cads:
if not validate_data_format(it, tf):
print_err("Interactive CAD `{}` was not shown in the transformation file `{}`".format(it, tf_file))
raise
return tf | [
"utils.load_json",
"os.listdir",
"os.path.isdir"
] | [((2296, 2314), 'utils.load_json', 'load_json', (['tf_file'], {}), '(tf_file)\n', (2305, 2314), False, 'from utils import load_json\n'), ((2202, 2221), 'os.listdir', 'os.listdir', (['mesh_db'], {}), '(mesh_db)\n', (2212, 2221), False, 'import os\n'), ((2238, 2271), 'os.path.isdir', 'os.path.isdir', (["(mesh_db + '/' + it)"], {}), "(mesh_db + '/' + it)\n", (2251, 2271), False, 'import os\n')] |
import unittest
from tests.unit import model_test, sanitizer_test, occurrence_test, query_test, vocabulary_test
def create_suite(t_lst, t_load):
t_lst.append(t_load.loadTestsFromTestCase(model_test.ModelTest))
t_lst.append(t_load.loadTestsFromTestCase(sanitizer_test.SanitizerTest))
t_lst.append(t_load.loadTestsFromTestCase(occurrence_test.OccurrenceTest))
t_lst.append(t_load.loadTestsFromTestCase(query_test.QueryTest))
t_lst.append(t_load.loadTestsFromTestCase(vocabulary_test.VocabularyTest))
test_list = []
test_loader = unittest.TestLoader()
# add test suites
create_suite(test_list, test_loader)
suite = unittest.TestSuite(test_list)
runner = unittest.TextTestRunner()
runner.run(suite)
| [
"unittest.TestSuite",
"unittest.TextTestRunner",
"unittest.TestLoader"
] | [((552, 573), 'unittest.TestLoader', 'unittest.TestLoader', ([], {}), '()\n', (571, 573), False, 'import unittest\n'), ((639, 668), 'unittest.TestSuite', 'unittest.TestSuite', (['test_list'], {}), '(test_list)\n', (657, 668), False, 'import unittest\n'), ((678, 703), 'unittest.TextTestRunner', 'unittest.TextTestRunner', ([], {}), '()\n', (701, 703), False, 'import unittest\n')] |
import setuptools
setuptools.setup(
name="pyp",
version="0.0.1",
author="Dawsonwen",
author_email="<EMAIL>",
description="Personal utils about Image processing and Deep Learning",
long_description="Personal utils about Image processing and Deep Learning",
long_description_content_type="text/markdown",
packages=setuptools.find_packages(),
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
install_requires=['opencv-python', "matplotlib"],
)
| [
"setuptools.find_packages"
] | [((346, 372), 'setuptools.find_packages', 'setuptools.find_packages', ([], {}), '()\n', (370, 372), False, 'import setuptools\n')] |
from flask import Flask, render_template, request, redirect
app = Flask(__name__)
@app.route("/")
def root():
return "Hello, World!"
@app.route("/index.html")
@app.route("/index.php")
def home():
return redirect("/")
@app.route("/user/<username>")
def user_template(username):
return render_template("user.html", name=username)
@app.route("/json")
def data():
return {
"name": "<NAME>",
"email": "<EMAIL>"
}
@app.route("/submit", methods=['POST', 'GET'])
def submit():
if(request.method == "POST"):
return request.form
return "No data has benn sent!"
| [
"flask.redirect",
"flask.render_template",
"flask.Flask"
] | [((67, 82), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (72, 82), False, 'from flask import Flask, render_template, request, redirect\n'), ((212, 225), 'flask.redirect', 'redirect', (['"""/"""'], {}), "('/')\n", (220, 225), False, 'from flask import Flask, render_template, request, redirect\n'), ((297, 340), 'flask.render_template', 'render_template', (['"""user.html"""'], {'name': 'username'}), "('user.html', name=username)\n", (312, 340), False, 'from flask import Flask, render_template, request, redirect\n')] |
from PyQt5.QtWidgets import QWizard, QWidget, QHBoxLayout, QFormLayout, QSpinBox, QComboBox
from matplotlib.backends.backend_qt5 import NavigationToolbar2QT as NavigationToolbar
from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg as FigureCanvas
from matplotlib.figure import Figure
from pyntpg.analysis.discrete_fourier_transform.discrete_fourier_transform import ChooseParameters
from pyntpg.analysis.preview_result import PreviewResult
from pyntpg.analysis.spectrogram.spectro_window import SpectroWindow
class Spectrogram(QWizard):
def __init__(self):
super(Spectrogram, self).__init__()
self.page1 = ChooseSpectroParameters()
self.page2 = PreviewSpectroResult()
self.addPage(self.page1)
self.addPage(self.page2)
self.button(QWizard.NextButton).clicked.connect(lambda _: self.page2.do_calculation(self.calculate))
def calculate(self):
from scipy.signal import spectrogram
frequency, oslice = self.page1.choose_frequency.get_frequency_and_slice()
args = self.page1.get_arguments_for_spectrogram()
values = self.page1.choose_signal.get_data(oslice)
f, t, Sxx = spectrogram(values, frequency, **args)
return t, f, Sxx
class ChooseSpectroParameters(ChooseParameters):
def __init__(self):
super(ChooseSpectroParameters, self).__init__()
# Add all the other parameters for a spectrogram
options = QWidget()
options_layout = QHBoxLayout()
options.setLayout(options_layout)
self.layout.addWidget(options)
# Add the window type chooser
self.choose_window = SpectroWindow()
options_layout.addWidget(self.choose_window)
# make a new form layout for nperseg and lenstep
secondformcol = QWidget()
secondformcollayout = QFormLayout()
secondformcol.setLayout(secondformcollayout)
# Choose nperseg
self.choose_nperseg = QSpinBox()
self.choose_nperseg.setMinimum(3)
self.choose_nperseg.setMaximum(256) # defult taken from scipy.signal.spectrogram
self.choose_nperseg.setValue(256)
# self.choose_signal.y_picked.connect(lambda n: self.choose_nperseg.setMaximum(n))
secondformcollayout.addRow("nperseg", self.choose_nperseg)
# choose lenstep
self.choose_lenstep = QSpinBox()
self.choose_lenstep.setMinimum(1)
self.choose_lenstep.setMaximum(256)
self.choose_lenstep.setValue(256/8) # default taken from scipy.signal.spectrogram
# self.choose_signal.y_picked.connect(lambda n: self.choose_lenstep.setMaximum(n))
secondformcollayout.addRow("lenstep", self.choose_lenstep)
# coerce choose_signal to emit len b/c we probably missed it
# during this initialization
self.choose_signal.emit_y_picked()
options_layout.addWidget(secondformcol)
# make the third column for the remaining spectrogram params
thirdformcol = QWidget()
thirdformcollayout = QFormLayout()
thirdformcol.setLayout(thirdformcollayout)
# choose detrend
self.choose_detrend = QComboBox()
self.choose_detrend.addItems(["constant", "linear", "none"])
thirdformcollayout.addRow("detrend", self.choose_detrend)
# choose scaling
self.choose_scaling = QComboBox()
self.choose_scaling.addItems(["density", "spectrum"])
thirdformcollayout.addRow("scaling", self.choose_scaling)
options_layout.addWidget(thirdformcol)
def get_arguments_for_spectrogram(self):
"""
Get a dict of arguments for the spectrogram function.
See http://docs.scipy.org/doc/scipy-0.17.0/reference/generated/scipy.signal.spectrogram.html
:rtype: dict
:return: a dictionary of options for scipy.signal.spectrogram
"""
nperseg = self.choose_nperseg.value()
noverlap = nperseg - self.choose_lenstep.value()
window = self.choose_window.get_window()
scaling = str(self.choose_scaling.currentText())
detrend = str(self.choose_detrend.currentText())
return {
"nperseg": nperseg,
"noverlap": noverlap,
"window": window,
"scaling": scaling,
"detrend": detrend
}
class PreviewSpectroResult(PreviewResult):
"""
Subclass PreviewResult to implement make_plot
specific to displaying a Spectrogram in a
pcolormesh.
"""
def __init__(self):
super(PreviewSpectroResult, self).__init__()
def make_plot(self, result):
"""
Display the spectrogram.
:param result: result of Spectrogram.calculate function
:return: None
"""
# create the figure
figure = Figure(tight_layout=True)
ax = figure.add_subplot(1, 1, 1)
ax.pcolormesh(*result, rasterized=True)
canvas = FigureCanvas(figure)
toolbar = NavigationToolbar(canvas, self)
self.result_display_layout.addWidget(canvas)
self.result_display_layout.addWidget(toolbar)
| [
"PyQt5.QtWidgets.QWidget",
"PyQt5.QtWidgets.QSpinBox",
"PyQt5.QtWidgets.QComboBox",
"matplotlib.figure.Figure",
"scipy.signal.spectrogram",
"pyntpg.analysis.spectrogram.spectro_window.SpectroWindow",
"PyQt5.QtWidgets.QHBoxLayout",
"matplotlib.backends.backend_qt5agg.FigureCanvasQTAgg",
"PyQt5.QtWidg... | [((1178, 1216), 'scipy.signal.spectrogram', 'spectrogram', (['values', 'frequency'], {}), '(values, frequency, **args)\n', (1189, 1216), False, 'from scipy.signal import spectrogram\n'), ((1449, 1458), 'PyQt5.QtWidgets.QWidget', 'QWidget', ([], {}), '()\n', (1456, 1458), False, 'from PyQt5.QtWidgets import QWizard, QWidget, QHBoxLayout, QFormLayout, QSpinBox, QComboBox\n'), ((1484, 1497), 'PyQt5.QtWidgets.QHBoxLayout', 'QHBoxLayout', ([], {}), '()\n', (1495, 1497), False, 'from PyQt5.QtWidgets import QWizard, QWidget, QHBoxLayout, QFormLayout, QSpinBox, QComboBox\n'), ((1647, 1662), 'pyntpg.analysis.spectrogram.spectro_window.SpectroWindow', 'SpectroWindow', ([], {}), '()\n', (1660, 1662), False, 'from pyntpg.analysis.spectrogram.spectro_window import SpectroWindow\n'), ((1798, 1807), 'PyQt5.QtWidgets.QWidget', 'QWidget', ([], {}), '()\n', (1805, 1807), False, 'from PyQt5.QtWidgets import QWizard, QWidget, QHBoxLayout, QFormLayout, QSpinBox, QComboBox\n'), ((1838, 1851), 'PyQt5.QtWidgets.QFormLayout', 'QFormLayout', ([], {}), '()\n', (1849, 1851), False, 'from PyQt5.QtWidgets import QWizard, QWidget, QHBoxLayout, QFormLayout, QSpinBox, QComboBox\n'), ((1961, 1971), 'PyQt5.QtWidgets.QSpinBox', 'QSpinBox', ([], {}), '()\n', (1969, 1971), False, 'from PyQt5.QtWidgets import QWizard, QWidget, QHBoxLayout, QFormLayout, QSpinBox, QComboBox\n'), ((2360, 2370), 'PyQt5.QtWidgets.QSpinBox', 'QSpinBox', ([], {}), '()\n', (2368, 2370), False, 'from PyQt5.QtWidgets import QWizard, QWidget, QHBoxLayout, QFormLayout, QSpinBox, QComboBox\n'), ((2998, 3007), 'PyQt5.QtWidgets.QWidget', 'QWidget', ([], {}), '()\n', (3005, 3007), False, 'from PyQt5.QtWidgets import QWizard, QWidget, QHBoxLayout, QFormLayout, QSpinBox, QComboBox\n'), ((3037, 3050), 'PyQt5.QtWidgets.QFormLayout', 'QFormLayout', ([], {}), '()\n', (3048, 3050), False, 'from PyQt5.QtWidgets import QWizard, QWidget, QHBoxLayout, QFormLayout, QSpinBox, QComboBox\n'), ((3158, 3169), 'PyQt5.QtWidgets.QComboBox', 'QComboBox', ([], {}), '()\n', (3167, 3169), False, 'from PyQt5.QtWidgets import QWizard, QWidget, QHBoxLayout, QFormLayout, QSpinBox, QComboBox\n'), ((3361, 3372), 'PyQt5.QtWidgets.QComboBox', 'QComboBox', ([], {}), '()\n', (3370, 3372), False, 'from PyQt5.QtWidgets import QWizard, QWidget, QHBoxLayout, QFormLayout, QSpinBox, QComboBox\n'), ((4798, 4823), 'matplotlib.figure.Figure', 'Figure', ([], {'tight_layout': '(True)'}), '(tight_layout=True)\n', (4804, 4823), False, 'from matplotlib.figure import Figure\n'), ((4931, 4951), 'matplotlib.backends.backend_qt5agg.FigureCanvasQTAgg', 'FigureCanvas', (['figure'], {}), '(figure)\n', (4943, 4951), True, 'from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg as FigureCanvas\n'), ((4970, 5001), 'matplotlib.backends.backend_qt5.NavigationToolbar2QT', 'NavigationToolbar', (['canvas', 'self'], {}), '(canvas, self)\n', (4987, 5001), True, 'from matplotlib.backends.backend_qt5 import NavigationToolbar2QT as NavigationToolbar\n')] |
import os
c.ServerApp.log_level = 'INFO'
#################
# Network
#################
c.ServerApp.ip = '0.0.0.0'
c.ServerApp.port = 8686
c.ServerApp.port_retries = 0
#################
# Browser
#################
c.ServerApp.open_browser = False
#################
# Terminal
#################
c.ServerApp.terminals_enabled = True
#################
# Authentication
#################
c.ServerApp.token = '<PASSWORD>'
#################
# Security
#################
c.ServerApp.disable_check_xsrf = False
# ORIGIN = 'http://localhost:3208'
ORIGIN = '*'
# c.ServerApp.allow_origin = ORIGIN
c.ServerApp.allow_origin_pat = '.*'
c.ServerApp.allow_credentials = True
c.ServerApp.tornado_settings = {
'headers': {
# 'Access-Control-Allow-Origin': ORIGIN,
'Access-Control-Allow-Methods': '*',
'Access-Control-Allow-Headers': 'Accept, Accept-Encoding, Accept-Language, Authorization, Cache-Control, Connection, Content-Type, Host, Origin, Pragma, Referer, sec-ch-ua, sec-ch-ua-mobile, sec-ch-ua-platform, Sec-Fetch-Dest, Sec-Fetch-Mode, Sec-Fetch-Site, Upgrade, User-Agent, X-XSRFToken, X-Datalayer',
'Access-Control-Allow-Credentials': 'true',
'Content-Security-Policy': f"frame-ancestors 'self' {ORIGIN} ",
},
'cookie_options': {
'SameSite': 'None',
'Secure': True
}
}
c.ServerApp.cookie_options = {
"SameSite": "None",
"Secure": True,
}
#################
# Server Extensions
#################
c.ServerApp.jpserver_extensions = {
'jupyterlab': True,
}
#################
# Content
#################
# c.FileContentsManager.delete_to_trash = False
content_dir = os.path.dirname(os.path.realpath(__file__)) + '/../notebooks'
c.ServerApp.root_dir = content_dir
c.ServerApp.preferred_dir = content_dir
#################
# URLs
#################
c.ServerApp.base_url = '/api/jupyter'
c.ServerApp.default_url = '/api/jupyter/lab'
#################
# JupyterLab
#################
c.LabApp.collaborative = True
| [
"os.path.realpath"
] | [((1625, 1651), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (1641, 1651), False, 'import os\n')] |
import re
from enum import Enum, auto, unique
from typing import Mapping
from attr import attrib, attrs
from attr.validators import and_, in_, instance_of
from immutablecollections import immutabledict
from vistautils.range import Range
@unique
class MemoryUnit(Enum):
"""
A unit in which memory can be measured.
"""
KILOBYTES = auto()
MEGABYTES = auto()
GIGABYTES = auto()
TERABYTES = auto()
@staticmethod
def parse(memory_unit_string: str) -> "MemoryUnit":
"""
Parses a string of the format "[K|M|G|T]" as a memory unit.
Throws a `RuntimeException` on parse failure.
This may be expanded to accept more formats in the future.
"""
ret = _STRING_TO_UNIT.get(memory_unit_string.upper())
if ret:
return ret
else:
raise RuntimeError(
f"For a memory unit, expected one of {set(_STRING_TO_UNIT.keys())}"
f" but got {memory_unit_string}"
)
_STRING_TO_UNIT: Mapping[str, MemoryUnit] = immutabledict(
[
("K", MemoryUnit.KILOBYTES),
("M", MemoryUnit.MEGABYTES),
("G", MemoryUnit.GIGABYTES),
("T", MemoryUnit.TERABYTES),
]
)
@attrs(frozen=True, slots=True)
class MemoryAmount:
"""
An amount of memory, consisting of an *amount*
paired with its corresponding `MemoryUnit` *unit*.
"""
amount: int = attrib(validator=and_(instance_of(int), in_(Range.at_least(1))))
unit: MemoryUnit = attrib(validator=None)
_PARSE_PATTERN = re.compile(r"(\d+) ?([TtGgMmKk])[bB]?")
@staticmethod
def parse(memory_string: str) -> "MemoryAmount":
parts = MemoryAmount._PARSE_PATTERN.match(memory_string)
if parts:
return MemoryAmount(
amount=int(parts.group(1)), unit=MemoryUnit.parse(parts.group(2))
)
else:
raise RuntimeError(
f"Cannot parse {memory_string} as an amount of memory. "
f"Expected an integer followed by K, M, G, or T"
)
| [
"attr.attrs",
"enum.auto",
"re.compile",
"immutablecollections.immutabledict",
"attr.attrib",
"attr.validators.instance_of",
"vistautils.range.Range.at_least"
] | [((1057, 1193), 'immutablecollections.immutabledict', 'immutabledict', (["[('K', MemoryUnit.KILOBYTES), ('M', MemoryUnit.MEGABYTES), ('G', MemoryUnit\n .GIGABYTES), ('T', MemoryUnit.TERABYTES)]"], {}), "([('K', MemoryUnit.KILOBYTES), ('M', MemoryUnit.MEGABYTES), (\n 'G', MemoryUnit.GIGABYTES), ('T', MemoryUnit.TERABYTES)])\n", (1070, 1193), False, 'from immutablecollections import immutabledict\n'), ((1237, 1267), 'attr.attrs', 'attrs', ([], {'frozen': '(True)', 'slots': '(True)'}), '(frozen=True, slots=True)\n', (1242, 1267), False, 'from attr import attrib, attrs\n'), ((351, 357), 'enum.auto', 'auto', ([], {}), '()\n', (355, 357), False, 'from enum import Enum, auto, unique\n'), ((374, 380), 'enum.auto', 'auto', ([], {}), '()\n', (378, 380), False, 'from enum import Enum, auto, unique\n'), ((397, 403), 'enum.auto', 'auto', ([], {}), '()\n', (401, 403), False, 'from enum import Enum, auto, unique\n'), ((420, 426), 'enum.auto', 'auto', ([], {}), '()\n', (424, 426), False, 'from enum import Enum, auto, unique\n'), ((1517, 1539), 'attr.attrib', 'attrib', ([], {'validator': 'None'}), '(validator=None)\n', (1523, 1539), False, 'from attr import attrib, attrs\n'), ((1562, 1601), 're.compile', 're.compile', (['"""(\\\\d+) ?([TtGgMmKk])[bB]?"""'], {}), "('(\\\\d+) ?([TtGgMmKk])[bB]?')\n", (1572, 1601), False, 'import re\n'), ((1451, 1467), 'attr.validators.instance_of', 'instance_of', (['int'], {}), '(int)\n', (1462, 1467), False, 'from attr.validators import and_, in_, instance_of\n'), ((1473, 1490), 'vistautils.range.Range.at_least', 'Range.at_least', (['(1)'], {}), '(1)\n', (1487, 1490), False, 'from vistautils.range import Range\n')] |
import json
import pprint
import copy
from TextureUpscaler.TextureProcessing import gather_textures, run_processing_stage, save_hires_image
from TextureUpscaler.UpscaleESRGAN import upscale_esrgan
from TextureUpscaler.DenoiseImages import denoise_texture_opencv
from TextureUpscaler.DownsampleImages import downsample
from TextureUpscaler.AlphaChannelUpscale import alpha_channel_upscale
from TextureUpscaler.UpscaleNGX import upscale_ngx
def load_settings():
settingsFile = open("settings.json")
settings = json.load(settingsFile)
settingsFile.close()
return settings
def print_settings(settings):
pprint.pprint(settings)
def run_texture_processing_pipeline():
settings = load_settings()
print_settings(settings)
SourcePath = settings["SourcePath"]
WorkingPath = settings["WorkingPath"]
ExtensionsToFind = settings["ExtensionsToFind"]
images = gather_textures(SourcePath, WorkingPath, ExtensionsToFind)
print("Number of images gathered: " + str(len(images)))
images_src = copy.deepcopy(images)
run_processing_stage(denoise_texture_opencv, images, settings) #1
images_denoised = copy.deepcopy(images)
images_esrgan = copy.deepcopy(images)
images_ngx = copy.deepcopy(images)
run_processing_stage(upscale_ngx, images_ngx, settings) #2
run_processing_stage(upscale_ngx, images_src, settings) #3
upscale_esrgan(images_esrgan, WorkingPath, settings) #4
run_processing_stage(alpha_channel_upscale, images, settings) #5
run_processing_stage(downsample, images, settings) #6
run_processing_stage(save_hires_image, images, settings) #7
print(images_denoised[0].lastPath)
print(images_esrgan[0].lastPath)
print(images_ngx[0].lastPath)
if __name__ == "__main__":
run_texture_processing_pipeline()
| [
"TextureUpscaler.TextureProcessing.run_processing_stage",
"TextureUpscaler.TextureProcessing.gather_textures",
"TextureUpscaler.UpscaleESRGAN.upscale_esrgan",
"copy.deepcopy",
"json.load",
"pprint.pprint"
] | [((518, 541), 'json.load', 'json.load', (['settingsFile'], {}), '(settingsFile)\n', (527, 541), False, 'import json\n'), ((622, 645), 'pprint.pprint', 'pprint.pprint', (['settings'], {}), '(settings)\n', (635, 645), False, 'import pprint\n'), ((895, 953), 'TextureUpscaler.TextureProcessing.gather_textures', 'gather_textures', (['SourcePath', 'WorkingPath', 'ExtensionsToFind'], {}), '(SourcePath, WorkingPath, ExtensionsToFind)\n', (910, 953), False, 'from TextureUpscaler.TextureProcessing import gather_textures, run_processing_stage, save_hires_image\n'), ((1032, 1053), 'copy.deepcopy', 'copy.deepcopy', (['images'], {}), '(images)\n', (1045, 1053), False, 'import copy\n'), ((1059, 1121), 'TextureUpscaler.TextureProcessing.run_processing_stage', 'run_processing_stage', (['denoise_texture_opencv', 'images', 'settings'], {}), '(denoise_texture_opencv, images, settings)\n', (1079, 1121), False, 'from TextureUpscaler.TextureProcessing import gather_textures, run_processing_stage, save_hires_image\n'), ((1147, 1168), 'copy.deepcopy', 'copy.deepcopy', (['images'], {}), '(images)\n', (1160, 1168), False, 'import copy\n'), ((1189, 1210), 'copy.deepcopy', 'copy.deepcopy', (['images'], {}), '(images)\n', (1202, 1210), False, 'import copy\n'), ((1228, 1249), 'copy.deepcopy', 'copy.deepcopy', (['images'], {}), '(images)\n', (1241, 1249), False, 'import copy\n'), ((1254, 1309), 'TextureUpscaler.TextureProcessing.run_processing_stage', 'run_processing_stage', (['upscale_ngx', 'images_ngx', 'settings'], {}), '(upscale_ngx, images_ngx, settings)\n', (1274, 1309), False, 'from TextureUpscaler.TextureProcessing import gather_textures, run_processing_stage, save_hires_image\n'), ((1317, 1372), 'TextureUpscaler.TextureProcessing.run_processing_stage', 'run_processing_stage', (['upscale_ngx', 'images_src', 'settings'], {}), '(upscale_ngx, images_src, settings)\n', (1337, 1372), False, 'from TextureUpscaler.TextureProcessing import gather_textures, run_processing_stage, save_hires_image\n'), ((1380, 1432), 'TextureUpscaler.UpscaleESRGAN.upscale_esrgan', 'upscale_esrgan', (['images_esrgan', 'WorkingPath', 'settings'], {}), '(images_esrgan, WorkingPath, settings)\n', (1394, 1432), False, 'from TextureUpscaler.UpscaleESRGAN import upscale_esrgan\n'), ((1440, 1501), 'TextureUpscaler.TextureProcessing.run_processing_stage', 'run_processing_stage', (['alpha_channel_upscale', 'images', 'settings'], {}), '(alpha_channel_upscale, images, settings)\n', (1460, 1501), False, 'from TextureUpscaler.TextureProcessing import gather_textures, run_processing_stage, save_hires_image\n'), ((1509, 1559), 'TextureUpscaler.TextureProcessing.run_processing_stage', 'run_processing_stage', (['downsample', 'images', 'settings'], {}), '(downsample, images, settings)\n', (1529, 1559), False, 'from TextureUpscaler.TextureProcessing import gather_textures, run_processing_stage, save_hires_image\n'), ((1567, 1623), 'TextureUpscaler.TextureProcessing.run_processing_stage', 'run_processing_stage', (['save_hires_image', 'images', 'settings'], {}), '(save_hires_image, images, settings)\n', (1587, 1623), False, 'from TextureUpscaler.TextureProcessing import gather_textures, run_processing_stage, save_hires_image\n')] |
#!/usr/bin/env python
from __future__ import print_function
import roslib
roslib.load_manifest('image_folder_publisher')
import sys
import os
from os import listdir
from os.path import isfile, join
import rospy
import cv2
from sensor_msgs.msg import Image
from cv_bridge import CvBridge, CvBridgeError
class image_folder_publisher:
def __init__(self):
self.__app_name = "image_folder_publisher"
self._cv_bridge_left = CvBridge()
self._cv_bridge_right = CvBridge()
self._topic_name_left = rospy.get_param('~topic_name_left', '/image_raw_left')
self._topic_name_right = rospy.get_param('~topic_name_right', '/image_raw_right')
rospy.loginfo("[%s] (topic_name) Publishing Images to topic %s", self.__app_name, self._topic_name_left)
rospy.loginfo("[%s] (topic_name) Publishing Images to topic %s", self.__app_name, self._topic_name_right)
self._image_publisher_left = rospy.Publisher(self._topic_name_left, Image, queue_size=1)
self._image_publisher_right = rospy.Publisher(self._topic_name_right, Image, queue_size=1)
self._rate = rospy.get_param('~publish_rate', 15)
rospy.loginfo("[%s] (publish_rate) Publish rate set to %s hz", self.__app_name, self._rate)
self._sort_files = rospy.get_param('~sort_files', True)
rospy.loginfo("[%s] (sort_files) Sort Files: %r", self.__app_name, self._sort_files)
self._frame_id = rospy.get_param('~frame_id', 'camera')
rospy.loginfo("[%s] (frame_id) Frame ID set to %s", self.__app_name, self._frame_id)
self._loop = rospy.get_param('~loop', 1)
rospy.loginfo("[%s] (loop) Loop %d time(s) (set it -1 for infinite)", self.__app_name, self._loop)
self._image_folder = rospy.get_param('~image_folder', '')
if self._image_folder == '' or not os.path.exists(self._image_folder) or not os.path.isdir(self._image_folder):
#import pdb; pdb.set_trace()
rospy.logfatal("[%s] (image_folder) Invalid Image folder", self.__app_name)
sys.exit(0)
rospy.loginfo("[%s] Reading images from %s", self.__app_name, self._image_folder)
def run(self):
ros_rate = rospy.Rate(self._rate)
dir_left=self._image_folder + '/left/'
dir_right=self._image_folder + '/right/'
#import pdb; pdb.set_trace()
files_in_dir_right = [f for f in listdir(dir_right) if isfile(join(dir_right, f))]
files_in_dir_left = [f for f in listdir(dir_left) if isfile(join(dir_left, f))]
if self._sort_files:
files_in_dir_left.sort()
try:
while self._loop != 0:
for f in files_in_dir_left:
if not rospy.is_shutdown():
if isfile(join(dir_left, f)):
cv_image_left = cv2.imread(join(dir_left, f))
cv_image_right = cv2.imread(join(dir_right,'right'+f[4:]))
#import pdb; pdb.set_trace()
if cv_image_left is not None:
ros_msg = self._cv_bridge_left.cv2_to_imgmsg(cv_image_left, "bgr8")
ros_msg.header.frame_id = self._frame_id
ros_msg.header.stamp = rospy.Time.now()
self._image_publisher_left.publish(ros_msg)
ros_msg = self._cv_bridge_right.cv2_to_imgmsg(cv_image_right, "bgr8")
ros_msg.header.frame_id = self._frame_id
ros_msg.header.stamp = rospy.Time.now()
self._image_publisher_right.publish(ros_msg)
rospy.loginfo("[%s] Published %s", self.__app_name, join(self._image_folder, f))
else:
rospy.loginfo("[%s] Invalid image file %s", self.__app_name, join(self._image_folder, f))
ros_rate.sleep()
else:
return
self._loop = self._loop - 1
except CvBridgeError as e:
rospy.logerr(e)
def main(args):
rospy.init_node('image_folder_publisher', anonymous=True)
image_publisher = image_folder_publisher()
image_publisher.run()
if __name__ == '__main__':
main(sys.argv)
| [
"rospy.logerr",
"os.path.exists",
"os.listdir",
"rospy.logfatal",
"rospy.is_shutdown",
"rospy.init_node",
"rospy.get_param",
"os.path.join",
"roslib.load_manifest",
"cv_bridge.CvBridge",
"rospy.Time.now",
"os.path.isdir",
"rospy.Rate",
"sys.exit",
"rospy.Publisher",
"rospy.loginfo"
] | [((75, 121), 'roslib.load_manifest', 'roslib.load_manifest', (['"""image_folder_publisher"""'], {}), "('image_folder_publisher')\n", (95, 121), False, 'import roslib\n'), ((4225, 4282), 'rospy.init_node', 'rospy.init_node', (['"""image_folder_publisher"""'], {'anonymous': '(True)'}), "('image_folder_publisher', anonymous=True)\n", (4240, 4282), False, 'import rospy\n'), ((444, 454), 'cv_bridge.CvBridge', 'CvBridge', ([], {}), '()\n', (452, 454), False, 'from cv_bridge import CvBridge, CvBridgeError\n'), ((487, 497), 'cv_bridge.CvBridge', 'CvBridge', ([], {}), '()\n', (495, 497), False, 'from cv_bridge import CvBridge, CvBridgeError\n'), ((531, 585), 'rospy.get_param', 'rospy.get_param', (['"""~topic_name_left"""', '"""/image_raw_left"""'], {}), "('~topic_name_left', '/image_raw_left')\n", (546, 585), False, 'import rospy\n'), ((619, 675), 'rospy.get_param', 'rospy.get_param', (['"""~topic_name_right"""', '"""/image_raw_right"""'], {}), "('~topic_name_right', '/image_raw_right')\n", (634, 675), False, 'import rospy\n'), ((684, 794), 'rospy.loginfo', 'rospy.loginfo', (['"""[%s] (topic_name) Publishing Images to topic %s"""', 'self.__app_name', 'self._topic_name_left'], {}), "('[%s] (topic_name) Publishing Images to topic %s', self.\n __app_name, self._topic_name_left)\n", (697, 794), False, 'import rospy\n'), ((798, 909), 'rospy.loginfo', 'rospy.loginfo', (['"""[%s] (topic_name) Publishing Images to topic %s"""', 'self.__app_name', 'self._topic_name_right'], {}), "('[%s] (topic_name) Publishing Images to topic %s', self.\n __app_name, self._topic_name_right)\n", (811, 909), False, 'import rospy\n'), ((943, 1002), 'rospy.Publisher', 'rospy.Publisher', (['self._topic_name_left', 'Image'], {'queue_size': '(1)'}), '(self._topic_name_left, Image, queue_size=1)\n', (958, 1002), False, 'import rospy\n'), ((1041, 1101), 'rospy.Publisher', 'rospy.Publisher', (['self._topic_name_right', 'Image'], {'queue_size': '(1)'}), '(self._topic_name_right, Image, queue_size=1)\n', (1056, 1101), False, 'import rospy\n'), ((1124, 1160), 'rospy.get_param', 'rospy.get_param', (['"""~publish_rate"""', '(15)'], {}), "('~publish_rate', 15)\n", (1139, 1160), False, 'import rospy\n'), ((1169, 1265), 'rospy.loginfo', 'rospy.loginfo', (['"""[%s] (publish_rate) Publish rate set to %s hz"""', 'self.__app_name', 'self._rate'], {}), "('[%s] (publish_rate) Publish rate set to %s hz', self.\n __app_name, self._rate)\n", (1182, 1265), False, 'import rospy\n'), ((1289, 1325), 'rospy.get_param', 'rospy.get_param', (['"""~sort_files"""', '(True)'], {}), "('~sort_files', True)\n", (1304, 1325), False, 'import rospy\n'), ((1334, 1423), 'rospy.loginfo', 'rospy.loginfo', (['"""[%s] (sort_files) Sort Files: %r"""', 'self.__app_name', 'self._sort_files'], {}), "('[%s] (sort_files) Sort Files: %r', self.__app_name, self.\n _sort_files)\n", (1347, 1423), False, 'import rospy\n'), ((1445, 1483), 'rospy.get_param', 'rospy.get_param', (['"""~frame_id"""', '"""camera"""'], {}), "('~frame_id', 'camera')\n", (1460, 1483), False, 'import rospy\n'), ((1492, 1582), 'rospy.loginfo', 'rospy.loginfo', (['"""[%s] (frame_id) Frame ID set to %s"""', 'self.__app_name', 'self._frame_id'], {}), "('[%s] (frame_id) Frame ID set to %s', self.__app_name, self.\n _frame_id)\n", (1505, 1582), False, 'import rospy\n'), ((1600, 1627), 'rospy.get_param', 'rospy.get_param', (['"""~loop"""', '(1)'], {}), "('~loop', 1)\n", (1615, 1627), False, 'import rospy\n'), ((1636, 1740), 'rospy.loginfo', 'rospy.loginfo', (['"""[%s] (loop) Loop %d time(s) (set it -1 for infinite)"""', 'self.__app_name', 'self._loop'], {}), "('[%s] (loop) Loop %d time(s) (set it -1 for infinite)', self\n .__app_name, self._loop)\n", (1649, 1740), False, 'import rospy\n'), ((1766, 1802), 'rospy.get_param', 'rospy.get_param', (['"""~image_folder"""', '""""""'], {}), "('~image_folder', '')\n", (1781, 1802), False, 'import rospy\n'), ((2084, 2170), 'rospy.loginfo', 'rospy.loginfo', (['"""[%s] Reading images from %s"""', 'self.__app_name', 'self._image_folder'], {}), "('[%s] Reading images from %s', self.__app_name, self.\n _image_folder)\n", (2097, 2170), False, 'import rospy\n'), ((2205, 2227), 'rospy.Rate', 'rospy.Rate', (['self._rate'], {}), '(self._rate)\n', (2215, 2227), False, 'import rospy\n'), ((1976, 2051), 'rospy.logfatal', 'rospy.logfatal', (['"""[%s] (image_folder) Invalid Image folder"""', 'self.__app_name'], {}), "('[%s] (image_folder) Invalid Image folder', self.__app_name)\n", (1990, 2051), False, 'import rospy\n'), ((2064, 2075), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (2072, 2075), False, 'import sys\n'), ((1846, 1880), 'os.path.exists', 'os.path.exists', (['self._image_folder'], {}), '(self._image_folder)\n', (1860, 1880), False, 'import os\n'), ((1888, 1921), 'os.path.isdir', 'os.path.isdir', (['self._image_folder'], {}), '(self._image_folder)\n', (1901, 1921), False, 'import os\n'), ((2404, 2422), 'os.listdir', 'listdir', (['dir_right'], {}), '(dir_right)\n', (2411, 2422), False, 'from os import listdir\n'), ((2495, 2512), 'os.listdir', 'listdir', (['dir_left'], {}), '(dir_left)\n', (2502, 2512), False, 'from os import listdir\n'), ((4188, 4203), 'rospy.logerr', 'rospy.logerr', (['e'], {}), '(e)\n', (4200, 4203), False, 'import rospy\n'), ((2434, 2452), 'os.path.join', 'join', (['dir_right', 'f'], {}), '(dir_right, f)\n', (2438, 2452), False, 'from os.path import isfile, join\n'), ((2523, 2540), 'os.path.join', 'join', (['dir_left', 'f'], {}), '(dir_left, f)\n', (2527, 2540), False, 'from os.path import isfile, join\n'), ((2728, 2747), 'rospy.is_shutdown', 'rospy.is_shutdown', ([], {}), '()\n', (2745, 2747), False, 'import rospy\n'), ((2783, 2800), 'os.path.join', 'join', (['dir_left', 'f'], {}), '(dir_left, f)\n', (2787, 2800), False, 'from os.path import isfile, join\n'), ((2858, 2875), 'os.path.join', 'join', (['dir_left', 'f'], {}), '(dir_left, f)\n', (2862, 2875), False, 'from os.path import isfile, join\n'), ((2933, 2965), 'os.path.join', 'join', (['dir_right', "('right' + f[4:])"], {}), "(dir_right, 'right' + f[4:])\n", (2937, 2965), False, 'from os.path import isfile, join\n'), ((3308, 3324), 'rospy.Time.now', 'rospy.Time.now', ([], {}), '()\n', (3322, 3324), False, 'import rospy\n'), ((3631, 3647), 'rospy.Time.now', 'rospy.Time.now', ([], {}), '()\n', (3645, 3647), False, 'import rospy\n'), ((3810, 3837), 'os.path.join', 'join', (['self._image_folder', 'f'], {}), '(self._image_folder, f)\n', (3814, 3837), False, 'from os.path import isfile, join\n'), ((3966, 3993), 'os.path.join', 'join', (['self._image_folder', 'f'], {}), '(self._image_folder, f)\n', (3970, 3993), False, 'from os.path import isfile, join\n')] |
"""
Copyright (c) 2019 Red Hat, Inc
All rights reserved.
This software may be modified and distributed under the terms
of the BSD license. See the LICENSE file for details.
"""
from __future__ import absolute_import
from io import BytesIO
import os
import responses
import tarfile
from atomic_reactor.constants import REMOTE_SOURCE_DIR
from atomic_reactor.inner import DockerBuildWorkflow
from tests.constants import TEST_IMAGE
from tests.stubs import StubInsideBuilder
from atomic_reactor.plugins.pre_download_remote_source import (
DownloadRemoteSourcePlugin,
)
import pytest
class TestDownloadRemoteSource(object):
@responses.activate
@pytest.mark.parametrize('source_url', [True, False])
@pytest.mark.parametrize('archive_dir_exists', [True, False])
def test_download_remote_source(self, tmpdir, docker_tasker, source_url, archive_dir_exists):
workflow = DockerBuildWorkflow(
TEST_IMAGE,
source={"provider": "git", "uri": "asd"},
)
workflow.builder = StubInsideBuilder().for_workflow(workflow).set_df_path(str(tmpdir))
filename = 'source.tar.gz'
url = None
if source_url:
url = 'https://example.com/dir/{}'.format(filename)
# Make a compressed tarfile with a single file 'abc'
member = 'abc'
abc_content = b'def'
content = BytesIO()
with tarfile.open(mode='w:gz', fileobj=content) as tf:
ti = tarfile.TarInfo(name=member)
ti.size = len(abc_content)
tf.addfile(ti, fileobj=BytesIO(abc_content))
# GET from the url returns the compressed tarfile
if source_url:
responses.add(responses.GET, url, body=content.getvalue())
buildargs = {'spam': 'maps'}
plugin = DownloadRemoteSourcePlugin(docker_tasker, workflow,
remote_source_url=url,
remote_source_build_args=buildargs)
if archive_dir_exists and source_url:
dest_dir = os.path.join(workflow.builder.df_dir, plugin.REMOTE_SOURCE)
os.makedirs(dest_dir)
with pytest.raises(RuntimeError):
plugin.run()
os.rmdir(dest_dir)
return
result = plugin.run()
if not source_url:
assert result is None
return
# The return value should be the path to the downloaded archive itself
with open(result, 'rb') as f:
filecontent = f.read()
assert filecontent == content.getvalue()
# Expect a file 'abc' in the workdir
with open(os.path.join(workflow.builder.df_dir, plugin.REMOTE_SOURCE, member), 'rb') as f:
filecontent = f.read()
assert filecontent == abc_content
# Expect buildargs to have been set
for arg, value in buildargs.items():
assert workflow.builder.buildargs[arg] == value
# along with the args needed to add the sources in the Dockerfile
assert workflow.builder.buildargs['REMOTE_SOURCE'] == plugin.REMOTE_SOURCE
assert workflow.builder.buildargs['REMOTE_SOURCE_DIR'] == REMOTE_SOURCE_DIR
# https://github.com/openshift/imagebuilder/issues/139
assert not workflow.builder.buildargs['REMOTE_SOURCE'].startswith('/')
| [
"tarfile.open",
"atomic_reactor.inner.DockerBuildWorkflow",
"os.makedirs",
"io.BytesIO",
"os.path.join",
"pytest.mark.parametrize",
"os.rmdir",
"pytest.raises",
"tests.stubs.StubInsideBuilder",
"atomic_reactor.plugins.pre_download_remote_source.DownloadRemoteSourcePlugin",
"tarfile.TarInfo"
] | [((657, 709), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""source_url"""', '[True, False]'], {}), "('source_url', [True, False])\n", (680, 709), False, 'import pytest\n'), ((715, 775), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""archive_dir_exists"""', '[True, False]'], {}), "('archive_dir_exists', [True, False])\n", (738, 775), False, 'import pytest\n'), ((893, 966), 'atomic_reactor.inner.DockerBuildWorkflow', 'DockerBuildWorkflow', (['TEST_IMAGE'], {'source': "{'provider': 'git', 'uri': 'asd'}"}), "(TEST_IMAGE, source={'provider': 'git', 'uri': 'asd'})\n", (912, 966), False, 'from atomic_reactor.inner import DockerBuildWorkflow\n'), ((1370, 1379), 'io.BytesIO', 'BytesIO', ([], {}), '()\n', (1377, 1379), False, 'from io import BytesIO\n'), ((1793, 1907), 'atomic_reactor.plugins.pre_download_remote_source.DownloadRemoteSourcePlugin', 'DownloadRemoteSourcePlugin', (['docker_tasker', 'workflow'], {'remote_source_url': 'url', 'remote_source_build_args': 'buildargs'}), '(docker_tasker, workflow, remote_source_url=url,\n remote_source_build_args=buildargs)\n', (1819, 1907), False, 'from atomic_reactor.plugins.pre_download_remote_source import DownloadRemoteSourcePlugin\n'), ((1393, 1435), 'tarfile.open', 'tarfile.open', ([], {'mode': '"""w:gz"""', 'fileobj': 'content'}), "(mode='w:gz', fileobj=content)\n", (1405, 1435), False, 'import tarfile\n'), ((1460, 1488), 'tarfile.TarInfo', 'tarfile.TarInfo', ([], {'name': 'member'}), '(name=member)\n', (1475, 1488), False, 'import tarfile\n'), ((2061, 2120), 'os.path.join', 'os.path.join', (['workflow.builder.df_dir', 'plugin.REMOTE_SOURCE'], {}), '(workflow.builder.df_dir, plugin.REMOTE_SOURCE)\n', (2073, 2120), False, 'import os\n'), ((2133, 2154), 'os.makedirs', 'os.makedirs', (['dest_dir'], {}), '(dest_dir)\n', (2144, 2154), False, 'import os\n'), ((2242, 2260), 'os.rmdir', 'os.rmdir', (['dest_dir'], {}), '(dest_dir)\n', (2250, 2260), False, 'import os\n'), ((2172, 2199), 'pytest.raises', 'pytest.raises', (['RuntimeError'], {}), '(RuntimeError)\n', (2185, 2199), False, 'import pytest\n'), ((2659, 2726), 'os.path.join', 'os.path.join', (['workflow.builder.df_dir', 'plugin.REMOTE_SOURCE', 'member'], {}), '(workflow.builder.df_dir, plugin.REMOTE_SOURCE, member)\n', (2671, 2726), False, 'import os\n'), ((1563, 1583), 'io.BytesIO', 'BytesIO', (['abc_content'], {}), '(abc_content)\n', (1570, 1583), False, 'from io import BytesIO\n'), ((1029, 1048), 'tests.stubs.StubInsideBuilder', 'StubInsideBuilder', ([], {}), '()\n', (1046, 1048), False, 'from tests.stubs import StubInsideBuilder\n')] |
import random
import gym
import sys
import numpy as np
from collections import deque,namedtuple
import os
import time
import matplotlib.pyplot as plt
import torch
import torch.nn as nn
from torch.optim import Adam
plt.style.use('seaborn')
class DQN(nn.Module):
def __init__(self,hidden_sz,state_sz, action_sz):
super().__init__()
self.hidden_sz = hidden_sz
self.fc1 = nn.Linear(state_sz,self.hidden_sz)
self.fc2 = nn.Linear(self.hidden_sz,self.hidden_sz)
self.fc3 = nn.Linear(self.hidden_sz,action_sz)
self.relu = nn.ReLU()
def forward(self,x):
x = self.fc1(x)
x = self.relu(x)
x = self.fc2(x)
x = self.relu(x)
x = self.fc3(x)
return x
class Agent():
def __init__(self,env,target_update_frequency=100,eps=1):
self.env = env
self.action_sz = self.env.action_space.n
self.state_sz = self.env.observation_space.shape[0]
self.eps = eps
self.target_update_frequency = target_update_frequency
self.target_update_counter = 0
self.rewards = []
self.train_time = None
self.n_episodes = None
self.batch_size = None
self.gamma = None
self.lr = None
self.decay = None
self.replay_buffer = deque(maxlen=10000)
self.transition = namedtuple('transition',['s_prime','reward','s','action','done'])
self.network = DQN(256,self.state_sz, self.action_sz)
self.target_network = DQN(256,self.state_sz, self.action_sz)
self.loss_fn = nn.MSELoss()
self.optimizer = None
def print_env_settings(self):
print('State space: ',self.state_sz)
print('Action space: ',self.action_sz)
def init_hyperparameters(self, n_episodes,batch_size,gamma,lr,decay):
self.n_episodes = n_episodes
self.batch_size = batch_size
self.gamma = gamma
self.lr = lr
self.decay = decay
self.optimizer = Adam(self.network.parameters(), lr=self.lr)
def select_action(self,state,eps):
t = np.random.random()
if t < eps:
a = np.random.choice(range(self.action_sz))
else:
q = self.network(torch.FloatTensor(state))
a = q.argmax().item()
return a
def store(self,transition):
self.replay_buffer.append(transition)
def update(self):
if len(self.replay_buffer)< self.batch_size:
return
batch = random.sample(self.replay_buffer,self.batch_size)
s = torch.FloatTensor([t.s for t in batch])
r = torch.FloatTensor([t.reward for t in batch])
s_prime = torch.FloatTensor([t.s_prime for t in batch])
a = torch.LongTensor([t.action for t in batch]).unsqueeze(1)
done = torch.FloatTensor([t.done for t in batch])
target = (r + self.gamma*self.target_network(s_prime).max(dim=1)[0]*(1-done))
prediction = self.network(s).gather(1,a)
self.optimizer.zero_grad()
loss = self.loss_fn(target.unsqueeze(1),prediction)
loss.backward()
self.optimizer.step()
def get_train_time(self):
return self.train_time
def run_episode(self,render,k):
s = self.env.reset()
done = False
total_reward = 0.0
self.eps = self.eps * self.decay
transition_count = 0
while not done:
if render:
self.env.render()
self.target_update_counter += 1
if self.eps > 0.01:
eps = self.eps
else:
eps = 0.01
action = self.select_action(s,eps)
s_prime,reward,done,_ = self.env.step(action)
self.store((self.transition(s_prime,reward,s,action,done)))
total_reward += reward
s = s_prime
done = done
self.update()
transition_count+=1
if k % 100 == 0 and k > 1:
print('Transition Count: ',transition_count)
print('Episode Reward: ',total_reward)
self.rewards.append(total_reward)
def run_episode2(self,render,k):
s = self.env.reset()
done = False
total_reward = 0.0
self.eps = self.eps * self.decay
transition_count = 0
while not done:
if render:
self.env.render()
# eps = 0.0
transition_count+=1
self.target_update_counter += 1
if self.eps > 0.01:
eps = self.eps
else:
eps = 0.01
action = self.select_action(s,eps)
s_prime,reward,done,_ = self.env.step(action)
next_state = np.reshape(s_prime, [1, self.state_sz])
s_ = np.reshape(s, [1, self.state_sz])
# We want to encourage swing moves
if next_state[0][0] > s_[0][0] and next_state[0][0]>-0.4 and s_[0][0]>-0.4:
reward += 20
elif next_state[0][0] < s_[0][0] and next_state[0][0]<=-0.6 and s_[0][0]<=-0.6:
reward += 20
# Massive reward to reach flag
if done and transition_count != 200:
reward = reward + 10000
else:
# put extra penalty if not done
reward = reward - 10
self.store(self.transition(s_prime,reward,s,action,done))
total_reward += reward
s = s_prime
done = done
self.update()
if k % 100 == 0 and k > 1:
print('Transition Count: ',transition_count)
print('Episode Reward: ',total_reward)
self.rewards.append(total_reward)
def train(self):
t1 = time.time()
for k in range(self.n_episodes):
if k == self.n_episodes - 1:
self.train_time = time.time() - t1
render = False
# if k % 100 <= 10:
# render = True
if k % 100 == 0 and k > 1:
print('Episode: ',k)
self.run_episode(render,k)
if self.target_update_counter >= self.target_update_frequency:
self.target_update_counter = 0
self.target_network.load_state_dict(self.network.state_dict())
def train2(self):
t1 = time.time()
for k in range(self.n_episodes):
if k == self.n_episodes - 1:
self.train_time = time.time() - t1
render = False
# if k % 100 <= 10:
# render = True
if k % 100 == 0 and k > 1:
print('Episode: ',k)
self.run_episode2(render,k)
if self.target_update_counter >= self.target_update_frequency:
self.target_update_counter = 0
self.target_network.load_state_dict(self.network.state_dict())
| [
"torch.nn.ReLU",
"collections.namedtuple",
"collections.deque",
"random.sample",
"numpy.reshape",
"numpy.random.random",
"torch.LongTensor",
"matplotlib.pyplot.style.use",
"torch.nn.MSELoss",
"torch.nn.Linear",
"time.time",
"torch.FloatTensor"
] | [((214, 238), 'matplotlib.pyplot.style.use', 'plt.style.use', (['"""seaborn"""'], {}), "('seaborn')\n", (227, 238), True, 'import matplotlib.pyplot as plt\n'), ((398, 433), 'torch.nn.Linear', 'nn.Linear', (['state_sz', 'self.hidden_sz'], {}), '(state_sz, self.hidden_sz)\n', (407, 433), True, 'import torch.nn as nn\n'), ((452, 493), 'torch.nn.Linear', 'nn.Linear', (['self.hidden_sz', 'self.hidden_sz'], {}), '(self.hidden_sz, self.hidden_sz)\n', (461, 493), True, 'import torch.nn as nn\n'), ((512, 548), 'torch.nn.Linear', 'nn.Linear', (['self.hidden_sz', 'action_sz'], {}), '(self.hidden_sz, action_sz)\n', (521, 548), True, 'import torch.nn as nn\n'), ((568, 577), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (575, 577), True, 'import torch.nn as nn\n'), ((1303, 1322), 'collections.deque', 'deque', ([], {'maxlen': '(10000)'}), '(maxlen=10000)\n', (1308, 1322), False, 'from collections import deque, namedtuple\n'), ((1349, 1419), 'collections.namedtuple', 'namedtuple', (['"""transition"""', "['s_prime', 'reward', 's', 'action', 'done']"], {}), "('transition', ['s_prime', 'reward', 's', 'action', 'done'])\n", (1359, 1419), False, 'from collections import deque, namedtuple\n'), ((1569, 1581), 'torch.nn.MSELoss', 'nn.MSELoss', ([], {}), '()\n', (1579, 1581), True, 'import torch.nn as nn\n'), ((2085, 2103), 'numpy.random.random', 'np.random.random', ([], {}), '()\n', (2101, 2103), True, 'import numpy as np\n'), ((2492, 2542), 'random.sample', 'random.sample', (['self.replay_buffer', 'self.batch_size'], {}), '(self.replay_buffer, self.batch_size)\n', (2505, 2542), False, 'import random\n'), ((2555, 2594), 'torch.FloatTensor', 'torch.FloatTensor', (['[t.s for t in batch]'], {}), '([t.s for t in batch])\n', (2572, 2594), False, 'import torch\n'), ((2607, 2651), 'torch.FloatTensor', 'torch.FloatTensor', (['[t.reward for t in batch]'], {}), '([t.reward for t in batch])\n', (2624, 2651), False, 'import torch\n'), ((2670, 2715), 'torch.FloatTensor', 'torch.FloatTensor', (['[t.s_prime for t in batch]'], {}), '([t.s_prime for t in batch])\n', (2687, 2715), False, 'import torch\n'), ((2800, 2842), 'torch.FloatTensor', 'torch.FloatTensor', (['[t.done for t in batch]'], {}), '([t.done for t in batch])\n', (2817, 2842), False, 'import torch\n'), ((5752, 5763), 'time.time', 'time.time', ([], {}), '()\n', (5761, 5763), False, 'import time\n'), ((6344, 6355), 'time.time', 'time.time', ([], {}), '()\n', (6353, 6355), False, 'import time\n'), ((4737, 4776), 'numpy.reshape', 'np.reshape', (['s_prime', '[1, self.state_sz]'], {}), '(s_prime, [1, self.state_sz])\n', (4747, 4776), True, 'import numpy as np\n'), ((4794, 4827), 'numpy.reshape', 'np.reshape', (['s', '[1, self.state_sz]'], {}), '(s, [1, self.state_sz])\n', (4804, 4827), True, 'import numpy as np\n'), ((2223, 2247), 'torch.FloatTensor', 'torch.FloatTensor', (['state'], {}), '(state)\n', (2240, 2247), False, 'import torch\n'), ((2728, 2771), 'torch.LongTensor', 'torch.LongTensor', (['[t.action for t in batch]'], {}), '([t.action for t in batch])\n', (2744, 2771), False, 'import torch\n'), ((5880, 5891), 'time.time', 'time.time', ([], {}), '()\n', (5889, 5891), False, 'import time\n'), ((6472, 6483), 'time.time', 'time.time', ([], {}), '()\n', (6481, 6483), False, 'import time\n')] |
from ocdskingfisher.base import Source
import datetime
class ScotlandSource(Source):
"""
API documentation and bulk downloads: https://www.publiccontractsscotland.gov.uk/NoticeDownload/Download.aspx
"""
publisher_name = 'Scotland'
url = 'https://www.publiccontractsscotland.gov.uk/NoticeDownload/Download.aspx'
source_id = 'scotland'
def gather_all_download_urls(self):
if self.sample:
return [
{
'url': 'https://api.publiccontractsscotland.gov.uk/v1/Notices?dateFrom=2016-10-01&outputType=1',
'filename': 'sample.json',
'data_type': 'release_package',
}
]
now = datetime.datetime.today()
# It's meant to go back a year, but in testing it seemed to be year minus one day!
marker = now - datetime.timedelta(days=364)
out = []
while marker <= now:
datestring = '{:04d}-{:02d}-{:02d}'.format(marker.year, marker.month, marker.day)
out.append({
'url': 'https://api.publiccontractsscotland.gov.uk/v1/Notices?dateFrom={}&outputType=1'.format(datestring),
'filename': '{}.json'.format(datestring),
'data_type': 'release_package',
})
marker = marker + datetime.timedelta(days=14)
return out
| [
"datetime.datetime.today",
"datetime.timedelta"
] | [((728, 753), 'datetime.datetime.today', 'datetime.datetime.today', ([], {}), '()\n', (751, 753), False, 'import datetime\n'), ((868, 896), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(364)'}), '(days=364)\n', (886, 896), False, 'import datetime\n'), ((1337, 1364), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(14)'}), '(days=14)\n', (1355, 1364), False, 'import datetime\n')] |
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Name: sorting.py
# Purpose: Music21 class for sorting
#
# Authors: <NAME>
#
# Copyright: Copyright © 2014-2015 <NAME> and the music21
# Project
# License: BSD, see license.txt
# -----------------------------------------------------------------------------
'''
This module defines a single class, SortTuple, which is a named tuple that can
sort against bare offsets and other SortTuples.
This is a performance-critical object.
It also defines three singleton instance of the SortTupleLow class as ZeroSortTupleDefault,
ZeroSortTupleLow and
ZeroSortTupleHigh which are sortTuple at
offset 0.0, priority [0, -inf, inf] respectively:
>>> sorting.ZeroSortTupleDefault
SortTuple(atEnd=0, offset=0.0, priority=0, classSortOrder=0, isNotGrace=1, insertIndex=0)
>>> sorting.ZeroSortTupleLow
SortTuple(atEnd=0, offset=0.0, priority=-inf, classSortOrder=0, isNotGrace=1, insertIndex=0)
>>> sorting.ZeroSortTupleHigh
SortTuple(atEnd=0, offset=0.0, priority=inf, classSortOrder=0, isNotGrace=1, insertIndex=0)
'''
from collections import namedtuple
from math import inf as INFINITY
from music21 import exceptions21
_attrList = ['atEnd', 'offset', 'priority', 'classSortOrder', 'isNotGrace', 'insertIndex']
class SortingException(exceptions21.Music21Exception):
pass
class SortTuple(namedtuple('SortTuple', _attrList)):
'''
Derived class of namedTuple which allows for comparisons with pure ints/fractions.
>>> n = note.Note()
>>> s = stream.Stream()
>>> s.insert(4, n)
>>> st = n.sortTuple()
>>> st
SortTuple(atEnd=0, offset=4.0, priority=0, classSortOrder=20, isNotGrace=1, insertIndex=...)
>>> st.shortRepr()
'4.0 <0.20...>'
>>> st.atEnd
0
>>> st.offset
4.0
>>> st < 5.0
True
>>> 5.0 > st
True
>>> st > 3.0
True
>>> 3.0 < st
True
>>> st == 4.0
True
>>> ts = bar.Barline('double')
>>> t = stream.Stream()
>>> t.storeAtEnd(ts)
>>> ts_st = ts.sortTuple()
>>> ts_st
SortTuple(atEnd=1, offset=0.0, priority=0, classSortOrder=-5, isNotGrace=1, insertIndex=...)
>>> st < ts_st
True
>>> ts_st > 999999
True
>>> import math
>>> ts_st == math.inf
True
Construct one w/ keywords:
>>> st = sorting.SortTuple(atEnd=0, offset=1.0, priority=0, classSortOrder=20,
... isNotGrace=1, insertIndex=323)
>>> st.shortRepr()
'1.0 <0.20.323>'
or as tuple:
>>> st = sorting.SortTuple(0, 1.0, 0, 20, 1, 323)
>>> st.shortRepr()
'1.0 <0.20.323>'
'''
def __new__(cls, *tupEls, **kw):
# noinspection PyTypeChecker
return super(SortTuple, cls).__new__(cls, *tupEls, **kw)
def __eq__(self, other):
if isinstance(other, tuple):
return super().__eq__(other)
try:
if self.atEnd == 1 and other != INFINITY:
return False
elif self.atEnd == 1:
return True
else:
return self.offset == other
except ValueError:
return NotImplemented
def __lt__(self, other):
if isinstance(other, tuple):
return super().__lt__(other)
try:
if self.atEnd == 1:
return False
else:
return self.offset < other
except ValueError:
return NotImplemented
def __gt__(self, other):
if isinstance(other, tuple):
return super().__gt__(other)
try:
if self.atEnd == 1 and other != INFINITY:
return True
elif self.atEnd == 1:
return False
else:
return self.offset > other
except ValueError:
return NotImplemented
def __ne__(self, other):
return not self.__eq__(other)
def __le__(self, other):
return self.__lt__(other) or self.__eq__(other)
def __ge__(self, other):
return self.__gt__(other) or self.__eq__(other)
def shortRepr(self):
'''
Returns a nice representation of a SortTuple
>>> st = sorting.SortTuple(atEnd=0, offset=1.0, priority=0, classSortOrder=20,
... isNotGrace=1, insertIndex=323)
>>> st.shortRepr()
'1.0 <0.20.323>'
>>> st = sorting.SortTuple(atEnd=1, offset=1.0, priority=4, classSortOrder=7,
... isNotGrace=0, insertIndex=200)
>>> st.shortRepr()
'End <4.7.[Grace].200>'
'''
reprParts = []
if self.atEnd:
reprParts.append('End')
else:
reprParts.append(str(self.offset))
reprParts.append(' <')
reprParts.append(str(self.priority))
reprParts.append('.')
reprParts.append(str(self.classSortOrder))
if self.isNotGrace == 0:
reprParts.append('.[Grace]')
reprParts.append('.')
reprParts.append(str(self.insertIndex))
reprParts.append('>')
return ''.join(reprParts)
def modify(self, **kw):
'''
return a new SortTuple identical to the previous, except with
the given keyword modified. Works only with keywords.
>>> st = sorting.SortTuple(atEnd=0, offset=1.0, priority=0, classSortOrder=20,
... isNotGrace=1, insertIndex=32)
>>> st2 = st.modify(offset=2.0)
>>> st2.shortRepr()
'2.0 <0.20.32>'
>>> st2
SortTuple(atEnd=0, offset=2.0, priority=0, classSortOrder=20, isNotGrace=1, insertIndex=32)
>>> st3 = st2.modify(atEnd=1, isNotGrace=0)
>>> st3.shortRepr()
'End <0.20.[Grace].32>'
The original tuple is never modified (hence tuple):
>>> st.offset
1.0
Changing offset, but nothing else, helps in creating .flatten() positions.
'''
outList = [kw.get(attr, getattr(self, attr)) for attr in _attrList]
return self.__class__(*outList)
def add(self, other):
'''
Add all attributes from one sortTuple to another,
returning a new one.
>>> n = note.Note()
>>> n.offset = 10
>>> s = stream.Stream()
>>> s.offset = 10
>>> n.sortTuple()
SortTuple(atEnd=0, offset=10.0, priority=0, classSortOrder=20, isNotGrace=1, insertIndex=0)
>>> s.sortTuple()
SortTuple(atEnd=0, offset=10.0, priority=0, classSortOrder=-20, isNotGrace=1, insertIndex=0)
>>> s.sortTuple().add(n.sortTuple())
SortTuple(atEnd=0, offset=20.0, priority=0, classSortOrder=0, isNotGrace=1, insertIndex=0)
Note that atEnd and isNotGrace are equal to other's value. are upper bounded at 1 and
take the maxValue of either.
'''
if not isinstance(other, self.__class__):
raise SortingException('Cannot add attributes from a different class')
outList = [max(getattr(self, attr), getattr(other, attr))
if attr in ('atEnd', 'isNotGrace')
else (getattr(self, attr) + getattr(other, attr))
for attr in _attrList]
return self.__class__(*outList)
def sub(self, other):
'''
Subtract all attributes from to another. atEnd and isNotGrace take the min value of either.
>>> n = note.Note()
>>> n.offset = 10
>>> s = stream.Stream()
>>> s.offset = 10
>>> n.sortTuple()
SortTuple(atEnd=0, offset=10.0, priority=0, classSortOrder=20, isNotGrace=1, insertIndex=0)
>>> s.sortTuple()
SortTuple(atEnd=0, offset=10.0, priority=0, classSortOrder=-20, isNotGrace=1, insertIndex=0)
>>> s.sortTuple().sub(n.sortTuple())
SortTuple(atEnd=0, offset=0.0, priority=0, classSortOrder=-40, isNotGrace=1, insertIndex=0)
Note that atEnd and isNotGrace are lower bounded at 0.
'''
if not isinstance(other, self.__class__):
raise SortingException('Cannot add attributes from a different class')
outList = [min(getattr(self, attr), getattr(other, attr))
if attr in ('atEnd', 'isNotGrace')
else (getattr(self, attr) - getattr(other, attr))
for attr in _attrList]
return self.__class__(*outList)
ZeroSortTupleDefault = SortTuple(atEnd=0, offset=0.0, priority=0, classSortOrder=0,
isNotGrace=1, insertIndex=0)
ZeroSortTupleLow = SortTuple(atEnd=0, offset=0.0, priority=-INFINITY, classSortOrder=0,
isNotGrace=1, insertIndex=0)
ZeroSortTupleHigh = SortTuple(atEnd=0, offset=0.0, priority=INFINITY, classSortOrder=0,
isNotGrace=1, insertIndex=0)
# -----------------------------------------------------------------------------
if __name__ == '__main__':
import music21
music21.mainTest()
| [
"collections.namedtuple",
"music21.mainTest"
] | [((1427, 1461), 'collections.namedtuple', 'namedtuple', (['"""SortTuple"""', '_attrList'], {}), "('SortTuple', _attrList)\n", (1437, 1461), False, 'from collections import namedtuple\n'), ((8991, 9009), 'music21.mainTest', 'music21.mainTest', ([], {}), '()\n', (9007, 9009), False, 'import music21\n')] |
#!/usr/bin/env python
"""
$ python main.py 3
906609 = 913 * 993
"""
import sys
from math import sqrt
def is_palindrome(i):
return str(i) == str(i)[::-1]
def get_divisors(i):
for j in range(1, 1 + int(sqrt(i))):
if i % j == 0:
yield j, i // j
def find_palindrome(digits):
for i in range((10 ** digits) ** 2, 0, -1):
if is_palindrome(i):
for a, b in get_divisors(i):
if len(str(a)) == digits and len(str(b)) == digits:
return a, b
if __name__ == '__main__':
digits = int(sys.argv[1])
a, b = find_palindrome(digits)
print('{} = {} * {}'.format(a * b, a, b))
| [
"math.sqrt"
] | [((214, 221), 'math.sqrt', 'sqrt', (['i'], {}), '(i)\n', (218, 221), False, 'from math import sqrt\n')] |
# Copyright 2015, Pinterest, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Definition of a token used to generate unique version values.
Blessed version is stored in the master as any other token. Each time a new
version number is needed, it is generated off the value stored in that token.
The value stored in blessed version is a monotonically increasing counter
so it is guaranteed that no single value is issued more than once.
"""
import sys
import time
from pinball.config.utils import timestamp_to_str
from pinball.master.thrift_lib.ttypes import Token
__author__ = '<NAME>'
__copyright__ = 'Copyright 2015, Pinterest, Inc.'
__credits__ = [__author__]
__license__ = 'Apache'
__version__ = '2.0'
class BlessedVersion(Token):
"""A singleton token keeping track of token versions.
Versions of tokens stored in a given master are required to be unique.
"""
def __init__(self, name=None, owner=None):
"""Create blessed version with a given name and owner.
Name and owner have to either both be set or none should be set.
Blessed version in use should always have name and owner set. The
version of init with name and owner set to None relies on external
initialization of those fields.
Args:
name: The name of the blessed version token.
owner: The owner of the blessed version token.
"""
assert (name and owner) or (not name and not owner)
if name and owner:
now = BlessedVersion._get_timestamp_millis()
data_str = ('blessed version created at %s' %
timestamp_to_str(now / 1000))
Token.__init__(self, now, name, owner, sys.maxint, 0, data_str)
else:
Token.__init__(self)
@staticmethod
def from_token(token):
blessed_version = BlessedVersion()
for key, value in token.__dict__.items():
blessed_version.__dict__[key] = value
return blessed_version
@staticmethod
def _get_timestamp_millis():
"""Return time in milliseconds since the epoch."""
return int(time.time() * 1000)
def advance_version(self):
"""Increase the internal version counter.
The counter value is based on the current time. Since those values
are used as token modification ids, basing them on time has an
advantage for debugging - looking at the version we can tell when a
token was modified.
A BIG WARNING: as an application developer do not assume anything about
the semantics of version values other than their uniqueness. The
implementation details are subject to change.
"""
self.version = max(self.version + 1,
BlessedVersion._get_timestamp_millis())
return self.version
| [
"pinball.master.thrift_lib.ttypes.Token.__init__",
"time.time",
"pinball.config.utils.timestamp_to_str"
] | [((2175, 2238), 'pinball.master.thrift_lib.ttypes.Token.__init__', 'Token.__init__', (['self', 'now', 'name', 'owner', 'sys.maxint', '(0)', 'data_str'], {}), '(self, now, name, owner, sys.maxint, 0, data_str)\n', (2189, 2238), False, 'from pinball.master.thrift_lib.ttypes import Token\n'), ((2265, 2285), 'pinball.master.thrift_lib.ttypes.Token.__init__', 'Token.__init__', (['self'], {}), '(self)\n', (2279, 2285), False, 'from pinball.master.thrift_lib.ttypes import Token\n'), ((2133, 2161), 'pinball.config.utils.timestamp_to_str', 'timestamp_to_str', (['(now / 1000)'], {}), '(now / 1000)\n', (2149, 2161), False, 'from pinball.config.utils import timestamp_to_str\n'), ((2636, 2647), 'time.time', 'time.time', ([], {}), '()\n', (2645, 2647), False, 'import time\n')] |
"""
Some notes:
HDI: Highest Density Interval.
ROPE: Region of Practical Equivalence.
"""
import numpy as np
from matplotlib import pyplot as plt
def ch01_01():
"""
"""
thetas = np.linspace(0, 1, 1001)
print(thetas)
likelihood = lambda r: thetas if r else (1 - thetas)
def posterior(r, prior):
lp = likelihood(r) * prior
return lp / lp.sum()
p = np.array([1 / len(thetas) for _ in thetas])
print(p)
# Bayesian update by click-event(r=1)
p = posterior(1, p)
print(p)
plt.plot(thetas, p)
plt.xlabel(r'$\theta$')
plt.ylabel(r'$p(\theta)$')
plt.show()
#
clicks = 2
noclicks = 38
p = np.array([1 / len(thetas) for theta in thetas])
for _ in range(clicks):
p = posterior(1, p)
for _ in range(noclicks):
p = posterior(0, p)
print(p)
plt.plot(thetas, p)
plt.xlabel(r'$\theta$')
plt.ylabel(r'$p(\theta)$')
plt.show()
def ch01_02():
""" Binomial distribution
"""
thetas = np.linspace(0, 1, 1001)
print(thetas)
likelihood = lambda a, N: thetas ** a * (1 - thetas) ** (N - a)
def posterior(a, N, prior):
lp = likelihood(a, N) * prior
return lp / lp.sum()
prior = 1 / len(thetas)
plt.subplot(2, 1, 1)
plt.plot(thetas, posterior(2, 40, prior), label='Alice - A')
plt.plot(thetas, posterior(4, 50, prior), label='Alice - B')
plt.xlabel(r'$\theta$')
plt.ylabel(r'$p(\theta)$')
plt.xlim(0, 0.2)
plt.legend()
plt.subplot(2, 1, 2)
plt.plot(thetas, posterior(64, 1280, prior), label='Bob - A')
plt.plot(thetas, posterior(128, 1600, prior), label='Bob - B')
plt.xlabel(r'$\theta$')
plt.ylabel(r'$p(\theta)$')
plt.xlim(0, 0.2)
plt.legend()
plt.tight_layout()
plt.show()
def ch01_03():
"""
theta ~ Beta.
alpha ~ Binomial.
"""
thetas = np.linspace(0, 1, 1001)
print(thetas)
def betaf(alpha, beta):
numerator = thetas ** (alpha - 1) * (1 - thetas) ** (beta - 1)
return numerator / numerator.sum()
def posterior(a, N):
return betaf(a + 1, N - a + 1)
plt.subplot(2, 1, 1)
plt.plot(thetas, posterior(2, 40), label='Alice - A')
plt.plot(thetas, posterior(4, 50), label='Alice - B')
plt.xlabel(r'$\theta$')
plt.ylabel(r'$p(\theta)$')
plt.xlim(0, 0.2)
plt.legend()
plt.subplot(2, 1, 2)
plt.plot(thetas, posterior(64, 1280), label='Bob - A')
plt.plot(thetas, posterior(128, 1600), label='Bob - B')
plt.xlabel(r'$\theta$')
plt.ylabel(r'$p(\theta)$')
plt.xlim(0, 0.2)
plt.legend()
plt.tight_layout()
plt.show()
def ch01_04():
"""
"""
def hmv(xs, ps, alpha=0.95):
""" Highest Mass Value function.
Parameters:
xs : Probability variables.
ps : Probability Mass.
alpha : threshold.
Return:
"""
xps = sorted(zip(xs, ps), key=lambda xp: xp[1], reverse=True)
xps = np.array(xps)
xs = xps[:, 0]
ps = xps[:, 1]
return np.sort(xs[np.cumsum(ps) <= alpha])
thetas = np.linspace(0, 1, 1001)
def posterior(a, N):
alpha = a + 1
beta = N - a + 1
numerator = thetas ** (alpha - 1) * (1 - thetas) ** (beta - 1)
return numerator / numerator.sum()
ps = posterior(2, 40)
hm_thetas = hmv(thetas, ps, alpha=0.95)
plt.plot(thetas, ps)
plt.annotate('', xy=(hm_thetas.min(), 0),
xytext=(hm_thetas.max(), 0),
arrowprops=dict(color='black', shrinkA=0, shrinkB=0,
arrowstyle='<->', linewidth=2))
plt.annotate('%.3f' % hm_thetas.min(), xy=(hm_thetas.min(), 0),
ha='right', va='bottom')
plt.annotate('%.3f' % hm_thetas.max(), xy=(hm_thetas.max(), 0),
ha='left', va='bottom')
plt.annotate('95% HDI', xy=(hm_thetas.mean(), 0),
ha='center', va='bottom')
hm_region = (hm_thetas.min() < thetas) & (thetas < hm_thetas.max())
plt.fill_between(thetas[hm_region], ps[hm_region], 0, alpha=0.3)
plt.xlabel(r'$\theta$')
plt.ylabel(r'$p(\theta)$')
plt.xlim(0, 0.3)
plt.tight_layout()
plt.show()
def plot_hdi(ps, label):
""" """
hm_thetas = hmv(thetas, ps, 0.95)
plt.plot(thetas, ps)
plt.annotate('', xy=(hm_thetas.min(), 0),
xytext=(hm_thetas.max(), 0),
arrowprops=dict(color='black', shrinkA=0, shrinkB=0,
arrowstyle='<->', linewidth=2))
plt.annotate('%.3f' % hm_thetas.min(), xy=(hm_thetas.min(), 0),
ha='right', va='bottom')
plt.annotate('%.3f' % hm_thetas.max(), xy=(hm_thetas.max(), 0),
ha='left', va='bottom')
plt.annotate('95% HDI', xy=(hm_thetas.mean(), 0),
ha='center', va='bottom')
hm_region = (hm_thetas.min() < thetas) & (thetas < hm_thetas.max())
plt.fill_between(thetas[hm_region], ps[hm_region], 0, alpha=0.3)
plt.xlim(0, 0.3)
plt.ylabel(label)
plt.yticks([])
plt.subplot(4, 1, 1)
alice_a = posterior(2, 40)
plot_hdi(alice_a, '<NAME>')
plt.subplot(4, 1, 2)
alice_b = posterior(4, 50)
plot_hdi(alice_b, '<NAME>')
plt.subplot(4, 1, 3)
bob_a = posterior(64, 1280)
plot_hdi(bob_a, '<NAME>')
plt.subplot(4, 1, 4)
bob_b = posterior(128, 1600)
plot_hdi(bob_b, '<NAME>')
plt.xlabel(r'$\theta$')
plt.tight_layout()
plt.show
def ch01_05():
"""
"""
theta_a = np.random.beta(3, 39, size=100000)
theta_b = np.random.beta(5, 47, size=100000)
delta = theta_b - theta_a
plt.hist(delta, range=(-0.3, 0.3), bins=60)
plt.xlabel(r'$\delta$')
plt.ylabel(r'Frequency')
plt.show()
print((delta > 0).mean())
| [
"numpy.random.beta",
"matplotlib.pyplot.hist",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.plot",
"matplotlib.pyplot.fill_between",
"numpy.array",
"numpy.linspace",
"matplotlib.pyplot.yticks",
"matplotlib.pyplot.tight_layout",
"numpy.cumsum",
"matplotlib.pyplot.x... | [((203, 226), 'numpy.linspace', 'np.linspace', (['(0)', '(1)', '(1001)'], {}), '(0, 1, 1001)\n', (214, 226), True, 'import numpy as np\n'), ((548, 567), 'matplotlib.pyplot.plot', 'plt.plot', (['thetas', 'p'], {}), '(thetas, p)\n', (556, 567), True, 'from matplotlib import pyplot as plt\n'), ((572, 595), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""$\\\\theta$"""'], {}), "('$\\\\theta$')\n", (582, 595), True, 'from matplotlib import pyplot as plt\n'), ((600, 626), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""$p(\\\\theta)$"""'], {}), "('$p(\\\\theta)$')\n", (610, 626), True, 'from matplotlib import pyplot as plt\n'), ((631, 641), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (639, 641), True, 'from matplotlib import pyplot as plt\n'), ((869, 888), 'matplotlib.pyplot.plot', 'plt.plot', (['thetas', 'p'], {}), '(thetas, p)\n', (877, 888), True, 'from matplotlib import pyplot as plt\n'), ((893, 916), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""$\\\\theta$"""'], {}), "('$\\\\theta$')\n", (903, 916), True, 'from matplotlib import pyplot as plt\n'), ((921, 947), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""$p(\\\\theta)$"""'], {}), "('$p(\\\\theta)$')\n", (931, 947), True, 'from matplotlib import pyplot as plt\n'), ((952, 962), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (960, 962), True, 'from matplotlib import pyplot as plt\n'), ((1031, 1054), 'numpy.linspace', 'np.linspace', (['(0)', '(1)', '(1001)'], {}), '(0, 1, 1001)\n', (1042, 1054), True, 'import numpy as np\n'), ((1275, 1295), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(2)', '(1)', '(1)'], {}), '(2, 1, 1)\n', (1286, 1295), True, 'from matplotlib import pyplot as plt\n'), ((1430, 1453), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""$\\\\theta$"""'], {}), "('$\\\\theta$')\n", (1440, 1453), True, 'from matplotlib import pyplot as plt\n'), ((1458, 1484), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""$p(\\\\theta)$"""'], {}), "('$p(\\\\theta)$')\n", (1468, 1484), True, 'from matplotlib import pyplot as plt\n'), ((1489, 1505), 'matplotlib.pyplot.xlim', 'plt.xlim', (['(0)', '(0.2)'], {}), '(0, 0.2)\n', (1497, 1505), True, 'from matplotlib import pyplot as plt\n'), ((1510, 1522), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (1520, 1522), True, 'from matplotlib import pyplot as plt\n'), ((1527, 1547), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(2)', '(1)', '(2)'], {}), '(2, 1, 2)\n', (1538, 1547), True, 'from matplotlib import pyplot as plt\n'), ((1685, 1708), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""$\\\\theta$"""'], {}), "('$\\\\theta$')\n", (1695, 1708), True, 'from matplotlib import pyplot as plt\n'), ((1713, 1739), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""$p(\\\\theta)$"""'], {}), "('$p(\\\\theta)$')\n", (1723, 1739), True, 'from matplotlib import pyplot as plt\n'), ((1744, 1760), 'matplotlib.pyplot.xlim', 'plt.xlim', (['(0)', '(0.2)'], {}), '(0, 0.2)\n', (1752, 1760), True, 'from matplotlib import pyplot as plt\n'), ((1765, 1777), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (1775, 1777), True, 'from matplotlib import pyplot as plt\n'), ((1782, 1800), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (1798, 1800), True, 'from matplotlib import pyplot as plt\n'), ((1805, 1815), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1813, 1815), True, 'from matplotlib import pyplot as plt\n'), ((1910, 1933), 'numpy.linspace', 'np.linspace', (['(0)', '(1)', '(1001)'], {}), '(0, 1, 1001)\n', (1921, 1933), True, 'import numpy as np\n'), ((2165, 2185), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(2)', '(1)', '(1)'], {}), '(2, 1, 1)\n', (2176, 2185), True, 'from matplotlib import pyplot as plt\n'), ((2306, 2329), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""$\\\\theta$"""'], {}), "('$\\\\theta$')\n", (2316, 2329), True, 'from matplotlib import pyplot as plt\n'), ((2334, 2360), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""$p(\\\\theta)$"""'], {}), "('$p(\\\\theta)$')\n", (2344, 2360), True, 'from matplotlib import pyplot as plt\n'), ((2365, 2381), 'matplotlib.pyplot.xlim', 'plt.xlim', (['(0)', '(0.2)'], {}), '(0, 0.2)\n', (2373, 2381), True, 'from matplotlib import pyplot as plt\n'), ((2386, 2398), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (2396, 2398), True, 'from matplotlib import pyplot as plt\n'), ((2403, 2423), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(2)', '(1)', '(2)'], {}), '(2, 1, 2)\n', (2414, 2423), True, 'from matplotlib import pyplot as plt\n'), ((2547, 2570), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""$\\\\theta$"""'], {}), "('$\\\\theta$')\n", (2557, 2570), True, 'from matplotlib import pyplot as plt\n'), ((2575, 2601), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""$p(\\\\theta)$"""'], {}), "('$p(\\\\theta)$')\n", (2585, 2601), True, 'from matplotlib import pyplot as plt\n'), ((2606, 2622), 'matplotlib.pyplot.xlim', 'plt.xlim', (['(0)', '(0.2)'], {}), '(0, 0.2)\n', (2614, 2622), True, 'from matplotlib import pyplot as plt\n'), ((2627, 2639), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (2637, 2639), True, 'from matplotlib import pyplot as plt\n'), ((2644, 2662), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (2660, 2662), True, 'from matplotlib import pyplot as plt\n'), ((2667, 2677), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2675, 2677), True, 'from matplotlib import pyplot as plt\n'), ((3150, 3173), 'numpy.linspace', 'np.linspace', (['(0)', '(1)', '(1001)'], {}), '(0, 1, 1001)\n', (3161, 3173), True, 'import numpy as np\n'), ((3436, 3456), 'matplotlib.pyplot.plot', 'plt.plot', (['thetas', 'ps'], {}), '(thetas, ps)\n', (3444, 3456), True, 'from matplotlib import pyplot as plt\n'), ((4076, 4140), 'matplotlib.pyplot.fill_between', 'plt.fill_between', (['thetas[hm_region]', 'ps[hm_region]', '(0)'], {'alpha': '(0.3)'}), '(thetas[hm_region], ps[hm_region], 0, alpha=0.3)\n', (4092, 4140), True, 'from matplotlib import pyplot as plt\n'), ((4145, 4168), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""$\\\\theta$"""'], {}), "('$\\\\theta$')\n", (4155, 4168), True, 'from matplotlib import pyplot as plt\n'), ((4173, 4199), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""$p(\\\\theta)$"""'], {}), "('$p(\\\\theta)$')\n", (4183, 4199), True, 'from matplotlib import pyplot as plt\n'), ((4204, 4220), 'matplotlib.pyplot.xlim', 'plt.xlim', (['(0)', '(0.3)'], {}), '(0, 0.3)\n', (4212, 4220), True, 'from matplotlib import pyplot as plt\n'), ((4225, 4243), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (4241, 4243), True, 'from matplotlib import pyplot as plt\n'), ((4248, 4258), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (4256, 4258), True, 'from matplotlib import pyplot as plt\n'), ((5181, 5201), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(4)', '(1)', '(1)'], {}), '(4, 1, 1)\n', (5192, 5201), True, 'from matplotlib import pyplot as plt\n'), ((5269, 5289), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(4)', '(1)', '(2)'], {}), '(4, 1, 2)\n', (5280, 5289), True, 'from matplotlib import pyplot as plt\n'), ((5357, 5377), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(4)', '(1)', '(3)'], {}), '(4, 1, 3)\n', (5368, 5377), True, 'from matplotlib import pyplot as plt\n'), ((5444, 5464), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(4)', '(1)', '(4)'], {}), '(4, 1, 4)\n', (5455, 5464), True, 'from matplotlib import pyplot as plt\n'), ((5532, 5555), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""$\\\\theta$"""'], {}), "('$\\\\theta$')\n", (5542, 5555), True, 'from matplotlib import pyplot as plt\n'), ((5560, 5578), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (5576, 5578), True, 'from matplotlib import pyplot as plt\n'), ((5639, 5673), 'numpy.random.beta', 'np.random.beta', (['(3)', '(39)'], {'size': '(100000)'}), '(3, 39, size=100000)\n', (5653, 5673), True, 'import numpy as np\n'), ((5688, 5722), 'numpy.random.beta', 'np.random.beta', (['(5)', '(47)'], {'size': '(100000)'}), '(5, 47, size=100000)\n', (5702, 5722), True, 'import numpy as np\n'), ((5757, 5800), 'matplotlib.pyplot.hist', 'plt.hist', (['delta'], {'range': '(-0.3, 0.3)', 'bins': '(60)'}), '(delta, range=(-0.3, 0.3), bins=60)\n', (5765, 5800), True, 'from matplotlib import pyplot as plt\n'), ((5805, 5828), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""$\\\\delta$"""'], {}), "('$\\\\delta$')\n", (5815, 5828), True, 'from matplotlib import pyplot as plt\n'), ((5833, 5856), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Frequency"""'], {}), "('Frequency')\n", (5843, 5856), True, 'from matplotlib import pyplot as plt\n'), ((5862, 5872), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (5870, 5872), True, 'from matplotlib import pyplot as plt\n'), ((3025, 3038), 'numpy.array', 'np.array', (['xps'], {}), '(xps)\n', (3033, 3038), True, 'import numpy as np\n'), ((4355, 4375), 'matplotlib.pyplot.plot', 'plt.plot', (['thetas', 'ps'], {}), '(thetas, ps)\n', (4363, 4375), True, 'from matplotlib import pyplot as plt\n'), ((5037, 5101), 'matplotlib.pyplot.fill_between', 'plt.fill_between', (['thetas[hm_region]', 'ps[hm_region]', '(0)'], {'alpha': '(0.3)'}), '(thetas[hm_region], ps[hm_region], 0, alpha=0.3)\n', (5053, 5101), True, 'from matplotlib import pyplot as plt\n'), ((5110, 5126), 'matplotlib.pyplot.xlim', 'plt.xlim', (['(0)', '(0.3)'], {}), '(0, 0.3)\n', (5118, 5126), True, 'from matplotlib import pyplot as plt\n'), ((5135, 5152), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['label'], {}), '(label)\n', (5145, 5152), True, 'from matplotlib import pyplot as plt\n'), ((5161, 5175), 'matplotlib.pyplot.yticks', 'plt.yticks', (['[]'], {}), '([])\n', (5171, 5175), True, 'from matplotlib import pyplot as plt\n'), ((3111, 3124), 'numpy.cumsum', 'np.cumsum', (['ps'], {}), '(ps)\n', (3120, 3124), True, 'import numpy as np\n')] |
#!/usr/bin/env python
"""Provides scikit interface."""
import networkx as nx
import random
from itertools import combinations
import logging
logger = logging.getLogger(__name__)
class NeighborhoodNodeRemove(object):
def __init__(self, n_nodes=1, n_neighbors=10, part_importance_estimator=None):
self.part_importance_estimator = part_importance_estimator
self.n_neighbors = n_neighbors
self.n_nodes = n_nodes
def fit(self, graphs, targets):
"""fit."""
return self
def neighbors(self, graph):
"""neighbors."""
if self.n_neighbors is None:
nodes = list(graph.nodes())
combs = combinations(nodes, self.n_nodes)
neighs = [self._remove_node(graph, node_ids) for node_ids in combs]
else:
neighs = [self._remove(graph, self.n_nodes)
for i in range(self.n_neighbors)]
return neighs
def _remove_node(self, gg, node_ids):
g = gg.copy()
for node_id in node_ids:
if node_id in set(g.nodes()) and len(g) > 1:
g.remove_node(node_id)
max_cc = max(nx.connected_components(g), key=lambda x: len(x))
g = nx.subgraph(g, max_cc).copy()
return g
def _remove(self, gg, n_nodes):
g = gg.copy()
node_ids = [node_id for node_id in g.nodes()]
random.shuffle(node_ids)
node_ids = node_ids[:n_nodes]
g = self._remove_node(g, node_ids)
return g
| [
"logging.getLogger",
"random.shuffle",
"networkx.connected_components",
"networkx.subgraph",
"itertools.combinations"
] | [((151, 178), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (168, 178), False, 'import logging\n'), ((1394, 1418), 'random.shuffle', 'random.shuffle', (['node_ids'], {}), '(node_ids)\n', (1408, 1418), False, 'import random\n'), ((671, 704), 'itertools.combinations', 'combinations', (['nodes', 'self.n_nodes'], {}), '(nodes, self.n_nodes)\n', (683, 704), False, 'from itertools import combinations\n'), ((1156, 1182), 'networkx.connected_components', 'nx.connected_components', (['g'], {}), '(g)\n', (1179, 1182), True, 'import networkx as nx\n'), ((1226, 1248), 'networkx.subgraph', 'nx.subgraph', (['g', 'max_cc'], {}), '(g, max_cc)\n', (1237, 1248), True, 'import networkx as nx\n')] |
import sys
from Quartz import *
import Utilities
import array
def doColorSpaceFillAndStroke(context):
theColorSpace = Utilities.getTheCalibratedRGBColorSpace()
opaqueRed = ( 0.663, 0.0, 0.031, 1.0 ) # red,green,blue,alpha
aBlue = ( 0.482, 0.62, 0.871, 1.0 ) # red,green,blue,alpha
# Set the fill color space to be the generic calibrated RGB color space.
CGContextSetFillColorSpace(context, theColorSpace)
# Set the fill color to opaque red. The number of elements in the
# array passed to this function must be the number of color
# components in the current fill color space plus 1 for alpha.
CGContextSetFillColor(context, opaqueRed)
# Set the stroke color space to be the generic calibrated RGB color space.
CGContextSetStrokeColorSpace(context, theColorSpace)
# Set the stroke color to opaque blue. The number of elements
# in the array passed to this function must be the number of color
# components in the current stroke color space plus 1 for alpha.
CGContextSetStrokeColor(context, aBlue)
CGContextSetLineWidth(context, 8.0)
# Rectangle 1.
CGContextBeginPath(context)
CGContextAddRect(context, CGRectMake(20.0, 20.0, 100.0, 100.0))
CGContextDrawPath(context, kCGPathFillStroke)
# Continue to use the stroke colorspace already set
# but change the stroke alpha value to a semitransparent blue.
aBlue = list(aBlue)
aBlue[3] = 0.5
CGContextSetStrokeColor(context, aBlue)
# Rectangle 2.
CGContextBeginPath(context)
CGContextAddRect(context, CGRectMake(140.0, 20.0, 100.0, 100.0))
CGContextDrawPath(context, kCGPathFillStroke)
# Don't release the color space since this routine
# didn't create it.
_opaqueRedColor = None
_opaqueBlueColor = None
_transparentBlueColor = None
def drawWithColorRefs(context):
global _opaqueRedColor
global _opaqueBlueColor
global _transparentBlueColor
# Initialize the CGColorRefs if necessary
if _opaqueRedColor is None:
# Initialize the color array to an opaque red
# in the generic calibrated RGB color space.
color = (0.663, 0.0, 0.031, 1.0)
theColorSpace = Utilities.getTheCalibratedRGBColorSpace()
# Create a CGColorRef for opaque red.
_opaqueRedColor = CGColorCreate(theColorSpace, color)
# Make the color array correspond to an opaque blue color.
color = (0.482, 0.62, 0.87, 1.0)
# Create another CGColorRef for opaque blue.
_opaqueBlueColor = CGColorCreate(theColorSpace, color)
# Create a new CGColorRef from the opaqueBlue CGColorRef
# but with a different alpha value.
_transparentBlueColor = CGColorCreateCopyWithAlpha(
_opaqueBlueColor, 0.5)
if _opaqueRedColor is None or _opaqueBlueColor is None or _transparentBlueColor is None:
print >>sys.stderr, "Couldn't create one of the CGColorRefs!!!"
return
# Set the fill color to the opaque red CGColor object.
CGContextSetFillColorWithColor(context, _opaqueRedColor)
# Set the stroke color to the opaque blue CGColor object.
CGContextSetStrokeColorWithColor(context, _opaqueBlueColor)
CGContextSetLineWidth(context, 8.0)
# Draw the first rectangle.
CGContextBeginPath(context)
CGContextAddRect(context, CGRectMake(20.0, 20.0, 100.0, 100.0))
CGContextDrawPath(context, kCGPathFillStroke)
# Set the stroke color to be that of the transparent blue
# CGColor object.
CGContextSetStrokeColorWithColor(context, _transparentBlueColor)
# Draw a second rectangle to the right of the first one.
CGContextBeginPath(context)
CGContextAddRect(context, CGRectMake(140.0, 20.0, 100.0, 100.0))
CGContextDrawPath(context, kCGPathFillStroke)
def doIndexedColorDrawGraphics(context):
theBaseRGBSpace = Utilities.getTheCalibratedRGBColorSpace()
lookupTable = array.array('B', (0,)*6)
opaqueRed = (0, 1) # index, alpha
aBlue = (1, 1) # index, alpha
# Set the first 3 values in the lookup table to a red of
# 169/255 = 0.663, no green, and blue = 8/255 = 0.031. This makes
# the first entry in the lookup table a shade of red.
lookupTable[0] = 169; lookupTable[1] = 0; lookupTable[2] = 8
# Set the second 3 values in the lookup table to a red value
# of 123/255 = 0.482, a green value of 158/255 = 0.62, and
# a blue value of 222/255 = 0.871. This makes the second entry
# in the lookup table a shade of blue.
lookupTable[3] = 123; lookupTable[4] = 158; lookupTable[5] = 222
# Create the indexed color space with this color lookup table,
# using the RGB color space as the base color space and a 2 element
# color lookup table to characterize the indexed color space.
theIndexedSpace = CGColorSpaceCreateIndexed(theBaseRGBSpace, 1, lookupTable)
if theIndexedSpace is not None:
CGContextSetStrokeColorSpace(context, theIndexedSpace)
CGContextSetFillColorSpace(context, theIndexedSpace)
# Set the stroke color to an opaque blue.
CGContextSetStrokeColor(context, aBlue)
# Set the fill color to an opaque red.
CGContextSetFillColor(context, opaqueRed)
CGContextSetLineWidth(context, 8.0)
# Draw the first rectangle.
CGContextBeginPath(context)
CGContextAddRect(context, CGRectMake(20.0, 20.0, 100.0, 100.0))
CGContextDrawPath(context, kCGPathFillStroke)
# Continue to use the stroke colorspace already set
# but change the stroke alpha value to a semitransparent value
# while leaving the index value unchanged.
aBlue = list(aBlue)
aBlue[1] = 0.5
CGContextSetStrokeColor(context, aBlue)
# Draw another rectangle to the right of the first one.
CGContextBeginPath(context)
CGContextAddRect(context, CGRectMake(140.0, 20.0, 100.0, 100.0))
CGContextDrawPath(context, kCGPathFillStroke)
else:
print >>sys.stderr, "Couldn't make the indexed color space!"
def drawWithGlobalAlpha(context):
rect = CGRectMake(40.0, 210.0, 100.0, 100.0)
color = [1.0, 0.0, 0.0, 1.0] # opaque red
# Set the fill color space to that returned by getTheCalibratedRGBColorSpace.
CGContextSetFillColorSpace(context, Utilities.getTheCalibratedRGBColorSpace())
CGContextSetFillColor(context, color)
for i in range(2):
CGContextSaveGState(context)
# Paint the leftmost rect on this row with 100% opaque red.
CGContextFillRect(context, rect)
CGContextTranslateCTM(context, rect.size.width + 70.0, 0.0)
# Set the alpha value of this rgba color to 0.5.
color[3] = 0.5
# Use the new color as the fill color in the graphics state.
CGContextSetFillColor(context, color)
# Paint the center rect on this row with 50% opaque red.
CGContextFillRect(context, rect)
CGContextTranslateCTM(context, rect.size.width + 70.0, 0.0)
# Set the alpha value of this rgba color to 0.25.
color[3] = 0.25
# Use the new color as the fill color in the graphics state.
CGContextSetFillColor(context, color)
# Paint the rightmost rect on this row with 25% opaque red.
CGContextFillRect(context, rect)
CGContextRestoreGState(context)
# After restoring the graphics state, the fill color is set to
# that prior to calling CGContextSaveGState, that is, opaque
# red. The coordinate system is also restored.
# Now set the context global alpha value to 50% opaque.
CGContextSetAlpha(context, 0.5)
# Translate down for a second row of rectangles.
CGContextTranslateCTM(context, 0.0, -(rect.size.height + 70.0))
# Reset the alpha value of the color array to fully opaque.
color[3] = 1.0
def drawWithColorBlendMode(context, url):
# A pleasant green color.
green = [0.584, 0.871, 0.318, 1.0]
# Create a CGPDFDocument object from the URL.
pdfDoc = CGPDFDocumentCreateWithURL(url)
if pdfDoc is None:
print >>sys.stderr, "Couldn't create CGPDFDocument from URL!"
return
# Obtain the media box for page 1 of the PDF document.
pdfRect = CGPDFDocumentGetMediaBox(pdfDoc, 1)
# Set the origin of the rectangle to (0,0).
pdfRect.origin.x = pdfRect.origin.y = 0
# Graphic 1, the left portion of the figure.
CGContextTranslateCTM(context, 20, 10 + CGRectGetHeight(pdfRect)/2)
# Draw the PDF document.
CGContextDrawPDFDocument(context, pdfRect, pdfDoc, 1)
# Set the fill color space to that returned by getTheCalibratedRGBColorSpace.
CGContextSetFillColorSpace(context, Utilities.getTheCalibratedRGBColorSpace())
# Set the fill color to green.
CGContextSetFillColor(context, green)
# Graphic 2, the top-right portion of the figure.
CGContextTranslateCTM(context, CGRectGetWidth(pdfRect) + 10,
CGRectGetHeight(pdfRect)/2 + 10)
# Draw the PDF document again.
CGContextDrawPDFDocument(context, pdfRect, pdfDoc, 1)
# Make a fill rectangle that is the same size as the PDF document
# but inset each side by 80 units in x and 20 units in y.
insetRect = CGRectInset(pdfRect, 80, 20)
# Fill the rectangle with green. Because the fill color is opaque and
# the blend mode is Normal, this obscures the drawing underneath.
CGContextFillRect(context, insetRect)
# Graphic 3, the bottom-right portion of the figure.
CGContextTranslateCTM(context, 0, -(10 + CGRectGetHeight(pdfRect)))
# Draw the PDF document again.
CGContextDrawPDFDocument(context, pdfRect, pdfDoc, 1)
# Set the blend mode to kCGBlendModeColor which will
# colorize the destination with subsequent drawing.
CGContextSetBlendMode(context, kCGBlendModeColor)
# Draw the rectangle on top of the PDF document. The portion of the
# background that is covered by the rectangle is colorized
# with the fill color.
CGContextFillRect(context, insetRect)
def createEllipsePath(context, center, ellipseSize):
CGContextSaveGState(context)
# Translate the coordinate origin to the center point.
CGContextTranslateCTM(context, center.x, center.y)
# Scale the coordinate system to half the width and height
# of the ellipse.
CGContextScaleCTM(context, ellipseSize.width/2, ellipseSize.height/2)
CGContextBeginPath(context)
# Add a circular arc to the path, centered at the origin and
# with a radius of 1.0. This radius, together with the
# scaling above for the width and height, produces an ellipse
# of the correct size.
CGContextAddArc(context, 0.0, 0.0, 1.0, 0.0, Utilities.DEGREES_TO_RADIANS(360.0), 0.0)
# Close the path so that this path is suitable for stroking,
# should that be desired.
CGContextClosePath(context)
CGContextRestoreGState(context)
_opaqueBrownColor = None
_opaqueOrangeColor = None
def doClippedEllipse(context):
global _opaqueBrownColor, _opaqueOrangeColor
theCenterPoint = CGPoint(120.0, 120.0)
theEllipseSize = CGSize(100.0, 200.0)
dash = [ 2.0 ]
# Initialize the CGColorRefs if necessary.
if _opaqueBrownColor is None:
# The initial value of the color array is an
# opaque brown in an RGB color space.
color = [0.325, 0.208, 0.157, 1.0]
theColorSpace = Utilities.getTheCalibratedRGBColorSpace()
# Create a CGColorRef for opaque brown.
_opaqueBrownColor = CGColorCreate(theColorSpace, color)
# Make the color array correspond to an opaque orange.
color = [0.965, 0.584, 0.059, 1.0 ]
# Create another CGColorRef for opaque orange.
_opaqueOrangeColor = CGColorCreate(theColorSpace, color)
# Draw two ellipses centered about the same point, one
# rotated 45 degrees from the other.
CGContextSaveGState(context)
# Ellipse 1
createEllipsePath(context, theCenterPoint, theEllipseSize)
CGContextSetFillColorWithColor(context, _opaqueBrownColor)
CGContextFillPath(context)
# Translate and rotate about the center point of the ellipse.
CGContextTranslateCTM(context, theCenterPoint.x, theCenterPoint.y)
# Rotate by 45 degrees.
CGContextRotateCTM(context, Utilities.DEGREES_TO_RADIANS(45))
# Ellipse 2
# CGPointZero is a pre-defined Quartz point corresponding to
# the coordinate (0,0).
createEllipsePath(context, CGPointZero, theEllipseSize)
CGContextSetFillColorWithColor(context, _opaqueOrangeColor)
CGContextFillPath(context)
CGContextRestoreGState(context)
CGContextTranslateCTM(context, 170.0, 0.0)
# Now use the first ellipse as a clipping area prior to
# painting the second ellipse.
CGContextSaveGState(context)
# Ellipse 3
createEllipsePath(context, theCenterPoint, theEllipseSize)
CGContextSetStrokeColorWithColor(context, _opaqueBrownColor)
CGContextSetLineDash(context, 0, dash, 1)
# Stroke the path with a dash.
CGContextStrokePath(context)
# Ellipse 4
createEllipsePath(context, theCenterPoint, theEllipseSize)
# Clip to the elliptical path.
CGContextClip(context)
CGContextTranslateCTM(context, theCenterPoint.x, theCenterPoint.y)
# Rotate by 45 degrees.
CGContextRotateCTM(context, Utilities.DEGREES_TO_RADIANS(45))
# Ellipse 5
createEllipsePath(context, CGPointZero, theEllipseSize)
CGContextSetFillColorWithColor(context, _opaqueOrangeColor)
CGContextFillPath(context)
CGContextRestoreGState(context)
| [
"array.array",
"Utilities.getTheCalibratedRGBColorSpace",
"Utilities.DEGREES_TO_RADIANS"
] | [((125, 166), 'Utilities.getTheCalibratedRGBColorSpace', 'Utilities.getTheCalibratedRGBColorSpace', ([], {}), '()\n', (164, 166), False, 'import Utilities\n'), ((3861, 3902), 'Utilities.getTheCalibratedRGBColorSpace', 'Utilities.getTheCalibratedRGBColorSpace', ([], {}), '()\n', (3900, 3902), False, 'import Utilities\n'), ((3921, 3947), 'array.array', 'array.array', (['"""B"""', '((0,) * 6)'], {}), "('B', (0,) * 6)\n", (3932, 3947), False, 'import array\n'), ((2187, 2228), 'Utilities.getTheCalibratedRGBColorSpace', 'Utilities.getTheCalibratedRGBColorSpace', ([], {}), '()\n', (2226, 2228), False, 'import Utilities\n'), ((6311, 6352), 'Utilities.getTheCalibratedRGBColorSpace', 'Utilities.getTheCalibratedRGBColorSpace', ([], {}), '()\n', (6350, 6352), False, 'import Utilities\n'), ((8722, 8763), 'Utilities.getTheCalibratedRGBColorSpace', 'Utilities.getTheCalibratedRGBColorSpace', ([], {}), '()\n', (8761, 8763), False, 'import Utilities\n'), ((10744, 10779), 'Utilities.DEGREES_TO_RADIANS', 'Utilities.DEGREES_TO_RADIANS', (['(360.0)'], {}), '(360.0)\n', (10772, 10779), False, 'import Utilities\n'), ((11434, 11475), 'Utilities.getTheCalibratedRGBColorSpace', 'Utilities.getTheCalibratedRGBColorSpace', ([], {}), '()\n', (11473, 11475), False, 'import Utilities\n'), ((12319, 12351), 'Utilities.DEGREES_TO_RADIANS', 'Utilities.DEGREES_TO_RADIANS', (['(45)'], {}), '(45)\n', (12347, 12351), False, 'import Utilities\n'), ((13359, 13391), 'Utilities.DEGREES_TO_RADIANS', 'Utilities.DEGREES_TO_RADIANS', (['(45)'], {}), '(45)\n', (13387, 13391), False, 'import Utilities\n')] |
import torch
import numpy as np
import torch.nn as nn
import torch.nn.functional as F
def uniform_dist(a, b, size):
'''
Sample from a uniform distribution Unif(a,b)
'''
std_unif = torch.rand(size)
return std_unif*(b-a)+a
def safe_log(tens, epsilon:float=1e-5):
'''
Safe log to prevent infinities
'''
return torch.log(tens+epsilon)
def sample_dist(probs):
'''
Sample from a given probability distribution
Parameters
----------
probs: numpy.float array, shape = (num_samples, num_values)
Note: the sum of each row must be = 1
'''
num_values = probs.shape[1]
generated = []
for prob in probs:
generated.append(np.random.choice(np.arange(num_values), p=prob))
return np.array(generated) | [
"numpy.array",
"torch.log",
"torch.rand",
"numpy.arange"
] | [((198, 214), 'torch.rand', 'torch.rand', (['size'], {}), '(size)\n', (208, 214), False, 'import torch\n'), ((346, 371), 'torch.log', 'torch.log', (['(tens + epsilon)'], {}), '(tens + epsilon)\n', (355, 371), False, 'import torch\n'), ((760, 779), 'numpy.array', 'np.array', (['generated'], {}), '(generated)\n', (768, 779), True, 'import numpy as np\n'), ((717, 738), 'numpy.arange', 'np.arange', (['num_values'], {}), '(num_values)\n', (726, 738), True, 'import numpy as np\n')] |
# -*- coding: utf-8 -*-
from datetime import date
from flask import render_template, redirect, url_for, flash, request, make_response
from werkzeug.urls import url_unquote
from fibra import app
from fibra.models import db, Customer, Contact, Invoice, Payment, STATES
from fibra.forms import (
CustomerForm, ContactForm, InvoiceForm, CustomerInvoiceForm,
PaymentForm, CustomerPaymentForm, InvoicePaymentForm
)
from fibra.report import GeneralReport, CustomerReport
from fibra.utils import render_json
@app.route("/")
def index():
return redirect(url_for('customer_list'))
## Customers ##
@app.route('/customers/')
def customer_list():
customers = Customer.query.join(Invoice)\
.filter(Invoice.state.in_(['PENDING', 'EXPIRED']))\
.order_by(Invoice.expiration_date.asc())
return render_template("customer_list.html", customers=customers)
@app.route('/customers/report/')
def customers_report():
customers = Customer.query.join(Invoice)\
.filter(Invoice.state.in_(['PENDING', 'EXPIRED']))\
.order_by(Invoice.expiration_date.asc())
report = GeneralReport(query=customers)
return report.response()
@app.route('/customers/<int:customer_id>/')
def customer_detail(customer_id):
customer = Customer.query.get_or_404(customer_id)
invoices = customer.invoices.order_by(Invoice.expiration_date.desc())
return render_template("customer_detail.html", customer=customer, invoices=invoices)
@app.route('/customers/<int:customer_id>/report/')
def customer_report(customer_id):
customer = Customer.query.get_or_404(customer_id)
detailed = request.args.get('detailed', False, type=bool)
report = CustomerReport(customer=customer, detailed=detailed)
return report.response()
@app.route('/customers/<int:customer_id>/edit/', methods=['GET', 'POST'])
@app.route('/customers/new/', methods=['GET', 'POST'])
def customer_edit(customer_id=None):
customer = Customer()
msg = "El nuevo cliente se creó satisfactoriamente"
if customer_id:
customer = Customer.query.get_or_404(customer_id)
msg = "El cliente se editó satisfactoriamente"
if 'customer_name' in request.cookies and not customer.name:
customer.name = url_unquote(request.cookies.get('customer_name'))
form = CustomerForm(obj=customer)
if form.validate_on_submit():
form.populate_obj(customer)
if not customer.id:
customer.id = None
db.session.add(customer)
db.session.commit()
flash(msg)
resp = make_response(redirect(url_for('customer_detail', customer_id=customer.id)))
else:
if not form.id.data:
form.id.data = None
resp = make_response(render_template("customer_edit.html", form=form))
resp.set_cookie("customer_name", '')
return resp
@app.route('/customers/<int:customer_id>/invoices/new/', methods=['GET', 'POST'])
@app.route('/customers/<int:customer_id>/invoices/<int:invoice_id>/edit/', methods=['GET', 'POST'])
def customer_edit_invoice(customer_id, invoice_id=None):
customer = Customer.query.get_or_404(customer_id)
invoice = Invoice(issue_date=date.today())
msg = "El nuevo documento se agregó a <strong>%s</strong> satisfactoriamente" % customer.name
if invoice_id:
invoice = Invoice.query.get_or_404(invoice_id)
msg = "El documento se editó satisfactoriamente"
form = CustomerInvoiceForm(obj=invoice)
if form.validate_on_submit():
form.populate_obj(invoice)
if not invoice.id:
invoice.id = None
invoice.customer = customer
db.session.add(invoice)
db.session.commit()
flash(msg)
return redirect(url_for('customer_detail', customer_id=customer.id))
if not form.id.data:
form.id.data = None
return render_template('invoice_edit.html', form=form, customer=customer)
@app.route('/customers/<int:customer_id>/pay/', methods=['GET', 'POST'])
def customer_add_payment(customer_id):
customer = Customer.query.get_or_404(customer_id)
form = CustomerPaymentForm(date=date.today())
form.invoices.query = search_invoices_query(customer_id=customer.id)
if form.validate_on_submit():
payment = Payment()
invoices = form.invoices.data
del form.invoices
form.populate_obj(payment)
payment.add_invoices(invoices)
db.session.add(payment)
db.session.commit()
flash("El pago se agregó a <strong>%s</strong> satisfactoriamente" % customer.name)
return redirect(url_for('customer_detail', customer_id=customer.id))
return render_template("customer_add_payment.html", form=form, customer=customer, Invoice=Invoice)
## Contact ##
@app.route('/customers/<int:customer_id>/contacts/new/', methods=['GET', 'POST'])
@app.route('/customers/contacts/<int:contact_id>/edit/', methods=['GET', 'POST'])
def contact_edit(customer_id=None, contact_id=None):
if customer_id:
contact = Contact()
customer = Customer.query.get_or_404(customer_id)
msg = "El contacto se agregó a <strong>%s</strong> satisfactoriamente" % customer.name
if contact_id:
contact = Contact.query.get_or_404(contact_id)
customer = contact.customer
msg = "El contacto se editó satisfactoriamente"
form = ContactForm(obj=contact)
if form.validate_on_submit():
form.populate_obj(contact)
if not contact.id:
contact.id = None
contact.customer = customer
db.session.add(contact)
db.session.commit()
flash(msg)
return redirect(url_for("customer_detail", customer_id=customer.id))
if not form.id.data:
form.id.data = None
return render_template("contact_edit.html", form=form, customer=customer)
## Invoices ##
@app.route('/customers/invoices/<int:invoice_id>/')
def invoice_detail(invoice_id):
invoice = Invoice.query.get_or_404(invoice_id)
return render_template("invoice_detail.html", invoice=invoice)
@app.route('/customers/invoices/new/', methods=['GET', 'POST'])
def invoice_new():
invoice = Invoice(issue_date=date.today())
form = InvoiceForm(obj=invoice)
form.customer.query = Customer.query.order_by(Customer.name)
if form.validate_on_submit():
form.populate_obj(invoice)
if not invoice.id:
invoice.id = None
db.session.add(invoice)
db.session.commit()
flash("El documento se agregó a <strong>%s</strong> satisfactoriamente" % form.customer.data.name)
return redirect(url_for('customer_detail', customer_id=form.customer.data.id))
return render_template('invoice_new.html', form=form)
@app.route('/customers/invoices/<int:invoice_id>/pay/', methods=['GET', 'POST'])
def invoice_add_payment(invoice_id):
invoice = Invoice.query.get_or_404(invoice_id)
form = InvoicePaymentForm(date=date.today(), amount=invoice.balance)
if form.validate_on_submit():
payment = Payment()
form.populate_obj(payment)
invoice.add_payment(payment)
db.session.commit()
flash("El pago se agregó a <strong>%s</strong> satisfactoriamente" % invoice.customer.name)
return redirect(url_for('customer_detail', customer_id=invoice.customer.id))
return render_template("invoice_add_payment.html", form=form, invoice=invoice)
## Payments ##
@app.route('/customers/invoices/pay/', methods=['GET', 'POST'])
def payment_new():
form = PaymentForm(date=date.today())
form.customer.query = Customer.query.filter(Customer.id==Invoice.customer_id)\
.filter(Invoice.state.in_(['PENDING', 'EXPIRED']))
if form.validate_on_submit():
payment = Payment()
form.populate_obj(payment)
db.session.add(payment)
db.session.commit()
flash("El pago se agregó a <strong>%s</strong> satisfactoriamente" % form.customer.data.name)
return redirect(url_for('customer_list'))
return render_template('payment_new.html', form=form)
# ajax interface
@app.route('/_search/customers/')
def ajax_search_customers():
def _serialize(i):
return {
'value': i.name,
'url': url_for('customer_detail', customer_id=i.id)
}
term = str(request.args.get('q', '', type=str)).strip().split()
if term:
query = Customer.query
for t in term:
query = query.filter(Customer.name.ilike('%'+t+'%'))
customers = query.values('id', 'name')
retval = list(map(_serialize, customers))
else:
retval = []
return render_json(retval)
@app.route('/_search/invoices/')
def ajax_search_invoices():
def _serialize(i):
return {
'id': i.id,
'fulldesc': i.fulldesc,
'state': i.state,
'balance': str(i.balance),
'expiration_date': i.expiration_date.isoformat(),
'customer_id': i.customer_id,
}
customer_id = request.args.get('customer', None, type=int)
states = request.args.getlist('state', type=str) or ['PENDING', 'EXPIRED']
query = search_invoices_query(customer_id, states)
return render_json(list(map(_serialize, query)))
# helpers
def search_invoices_query(customer_id=None, states=['PENDING', 'EXPIRED']):
query = Invoice.query.order_by(Invoice.expiration_date.asc())
if customer_id:
query = query.join(Customer).filter(Customer.id==customer_id)
if len(states) == 1 and states[0] in ("all", "*"):
states = list(STATES.keys())
return query.filter(Invoice.state.in_(states))
| [
"flask.render_template",
"flask.request.args.get",
"fibra.models.Customer.query.order_by",
"fibra.models.Contact.query.get_or_404",
"fibra.forms.InvoiceForm",
"fibra.models.Payment",
"fibra.models.Customer.query.get_or_404",
"fibra.app.route",
"fibra.models.Contact",
"flask.flash",
"fibra.utils.... | [((521, 535), 'fibra.app.route', 'app.route', (['"""/"""'], {}), "('/')\n", (530, 535), False, 'from fibra import app\n'), ((615, 639), 'fibra.app.route', 'app.route', (['"""/customers/"""'], {}), "('/customers/')\n", (624, 639), False, 'from fibra import app\n'), ((913, 944), 'fibra.app.route', 'app.route', (['"""/customers/report/"""'], {}), "('/customers/report/')\n", (922, 944), False, 'from fibra import app\n'), ((1224, 1266), 'fibra.app.route', 'app.route', (['"""/customers/<int:customer_id>/"""'], {}), "('/customers/<int:customer_id>/')\n", (1233, 1266), False, 'from fibra import app\n'), ((1521, 1570), 'fibra.app.route', 'app.route', (['"""/customers/<int:customer_id>/report/"""'], {}), "('/customers/<int:customer_id>/report/')\n", (1530, 1570), False, 'from fibra import app\n'), ((1819, 1891), 'fibra.app.route', 'app.route', (['"""/customers/<int:customer_id>/edit/"""'], {'methods': "['GET', 'POST']"}), "('/customers/<int:customer_id>/edit/', methods=['GET', 'POST'])\n", (1828, 1891), False, 'from fibra import app\n'), ((1893, 1946), 'fibra.app.route', 'app.route', (['"""/customers/new/"""'], {'methods': "['GET', 'POST']"}), "('/customers/new/', methods=['GET', 'POST'])\n", (1902, 1946), False, 'from fibra import app\n'), ((2891, 2976), 'fibra.app.route', 'app.route', (['"""/customers/<int:customer_id>/invoices/new/"""'], {'methods': "['GET', 'POST']"}), "('/customers/<int:customer_id>/invoices/new/', methods=['GET', 'POST']\n )\n", (2900, 2976), False, 'from fibra import app\n'), ((2973, 3075), 'fibra.app.route', 'app.route', (['"""/customers/<int:customer_id>/invoices/<int:invoice_id>/edit/"""'], {'methods': "['GET', 'POST']"}), "('/customers/<int:customer_id>/invoices/<int:invoice_id>/edit/',\n methods=['GET', 'POST'])\n", (2982, 3075), False, 'from fibra import app\n'), ((3963, 4034), 'fibra.app.route', 'app.route', (['"""/customers/<int:customer_id>/pay/"""'], {'methods': "['GET', 'POST']"}), "('/customers/<int:customer_id>/pay/', methods=['GET', 'POST'])\n", (3972, 4034), False, 'from fibra import app\n'), ((4801, 4886), 'fibra.app.route', 'app.route', (['"""/customers/<int:customer_id>/contacts/new/"""'], {'methods': "['GET', 'POST']"}), "('/customers/<int:customer_id>/contacts/new/', methods=['GET', 'POST']\n )\n", (4810, 4886), False, 'from fibra import app\n'), ((4883, 4968), 'fibra.app.route', 'app.route', (['"""/customers/contacts/<int:contact_id>/edit/"""'], {'methods': "['GET', 'POST']"}), "('/customers/contacts/<int:contact_id>/edit/', methods=['GET', 'POST']\n )\n", (4892, 4968), False, 'from fibra import app\n'), ((5895, 5945), 'fibra.app.route', 'app.route', (['"""/customers/invoices/<int:invoice_id>/"""'], {}), "('/customers/invoices/<int:invoice_id>/')\n", (5904, 5945), False, 'from fibra import app\n'), ((6099, 6161), 'fibra.app.route', 'app.route', (['"""/customers/invoices/new/"""'], {'methods': "['GET', 'POST']"}), "('/customers/invoices/new/', methods=['GET', 'POST'])\n", (6108, 6161), False, 'from fibra import app\n'), ((6774, 6853), 'fibra.app.route', 'app.route', (['"""/customers/invoices/<int:invoice_id>/pay/"""'], {'methods': "['GET', 'POST']"}), "('/customers/invoices/<int:invoice_id>/pay/', methods=['GET', 'POST'])\n", (6783, 6853), False, 'from fibra import app\n'), ((7464, 7526), 'fibra.app.route', 'app.route', (['"""/customers/invoices/pay/"""'], {'methods': "['GET', 'POST']"}), "('/customers/invoices/pay/', methods=['GET', 'POST'])\n", (7473, 7526), False, 'from fibra import app\n'), ((8150, 8182), 'fibra.app.route', 'app.route', (['"""/_search/customers/"""'], {}), "('/_search/customers/')\n", (8159, 8182), False, 'from fibra import app\n'), ((8718, 8749), 'fibra.app.route', 'app.route', (['"""/_search/invoices/"""'], {}), "('/_search/invoices/')\n", (8727, 8749), False, 'from fibra import app\n'), ((851, 909), 'flask.render_template', 'render_template', (['"""customer_list.html"""'], {'customers': 'customers'}), "('customer_list.html', customers=customers)\n", (866, 909), False, 'from flask import render_template, redirect, url_for, flash, request, make_response\n'), ((1161, 1191), 'fibra.report.GeneralReport', 'GeneralReport', ([], {'query': 'customers'}), '(query=customers)\n', (1174, 1191), False, 'from fibra.report import GeneralReport, CustomerReport\n'), ((1316, 1354), 'fibra.models.Customer.query.get_or_404', 'Customer.query.get_or_404', (['customer_id'], {}), '(customer_id)\n', (1341, 1354), False, 'from fibra.models import db, Customer, Contact, Invoice, Payment, STATES\n'), ((1440, 1517), 'flask.render_template', 'render_template', (['"""customer_detail.html"""'], {'customer': 'customer', 'invoices': 'invoices'}), "('customer_detail.html', customer=customer, invoices=invoices)\n", (1455, 1517), False, 'from flask import render_template, redirect, url_for, flash, request, make_response\n'), ((1620, 1658), 'fibra.models.Customer.query.get_or_404', 'Customer.query.get_or_404', (['customer_id'], {}), '(customer_id)\n', (1645, 1658), False, 'from fibra.models import db, Customer, Contact, Invoice, Payment, STATES\n'), ((1674, 1720), 'flask.request.args.get', 'request.args.get', (['"""detailed"""', '(False)'], {'type': 'bool'}), "('detailed', False, type=bool)\n", (1690, 1720), False, 'from flask import render_template, redirect, url_for, flash, request, make_response\n'), ((1734, 1786), 'fibra.report.CustomerReport', 'CustomerReport', ([], {'customer': 'customer', 'detailed': 'detailed'}), '(customer=customer, detailed=detailed)\n', (1748, 1786), False, 'from fibra.report import GeneralReport, CustomerReport\n'), ((1999, 2009), 'fibra.models.Customer', 'Customer', ([], {}), '()\n', (2007, 2009), False, 'from fibra.models import db, Customer, Contact, Invoice, Payment, STATES\n'), ((2349, 2375), 'fibra.forms.CustomerForm', 'CustomerForm', ([], {'obj': 'customer'}), '(obj=customer)\n', (2361, 2375), False, 'from fibra.forms import CustomerForm, ContactForm, InvoiceForm, CustomerInvoiceForm, PaymentForm, CustomerPaymentForm, InvoicePaymentForm\n'), ((3144, 3182), 'fibra.models.Customer.query.get_or_404', 'Customer.query.get_or_404', (['customer_id'], {}), '(customer_id)\n', (3169, 3182), False, 'from fibra.models import db, Customer, Contact, Invoice, Payment, STATES\n'), ((3470, 3502), 'fibra.forms.CustomerInvoiceForm', 'CustomerInvoiceForm', ([], {'obj': 'invoice'}), '(obj=invoice)\n', (3489, 3502), False, 'from fibra.forms import CustomerForm, ContactForm, InvoiceForm, CustomerInvoiceForm, PaymentForm, CustomerPaymentForm, InvoicePaymentForm\n'), ((3893, 3959), 'flask.render_template', 'render_template', (['"""invoice_edit.html"""'], {'form': 'form', 'customer': 'customer'}), "('invoice_edit.html', form=form, customer=customer)\n", (3908, 3959), False, 'from flask import render_template, redirect, url_for, flash, request, make_response\n'), ((4089, 4127), 'fibra.models.Customer.query.get_or_404', 'Customer.query.get_or_404', (['customer_id'], {}), '(customer_id)\n', (4114, 4127), False, 'from fibra.models import db, Customer, Contact, Invoice, Payment, STATES\n'), ((4691, 4786), 'flask.render_template', 'render_template', (['"""customer_add_payment.html"""'], {'form': 'form', 'customer': 'customer', 'Invoice': 'Invoice'}), "('customer_add_payment.html', form=form, customer=customer,\n Invoice=Invoice)\n", (4706, 4786), False, 'from flask import render_template, redirect, url_for, flash, request, make_response\n'), ((5395, 5419), 'fibra.forms.ContactForm', 'ContactForm', ([], {'obj': 'contact'}), '(obj=contact)\n', (5406, 5419), False, 'from fibra.forms import CustomerForm, ContactForm, InvoiceForm, CustomerInvoiceForm, PaymentForm, CustomerPaymentForm, InvoicePaymentForm\n'), ((5810, 5876), 'flask.render_template', 'render_template', (['"""contact_edit.html"""'], {'form': 'form', 'customer': 'customer'}), "('contact_edit.html', form=form, customer=customer)\n", (5825, 5876), False, 'from flask import render_template, redirect, url_for, flash, request, make_response\n'), ((5992, 6028), 'fibra.models.Invoice.query.get_or_404', 'Invoice.query.get_or_404', (['invoice_id'], {}), '(invoice_id)\n', (6016, 6028), False, 'from fibra.models import db, Customer, Contact, Invoice, Payment, STATES\n'), ((6040, 6095), 'flask.render_template', 'render_template', (['"""invoice_detail.html"""'], {'invoice': 'invoice'}), "('invoice_detail.html', invoice=invoice)\n", (6055, 6095), False, 'from flask import render_template, redirect, url_for, flash, request, make_response\n'), ((6239, 6263), 'fibra.forms.InvoiceForm', 'InvoiceForm', ([], {'obj': 'invoice'}), '(obj=invoice)\n', (6250, 6263), False, 'from fibra.forms import CustomerForm, ContactForm, InvoiceForm, CustomerInvoiceForm, PaymentForm, CustomerPaymentForm, InvoicePaymentForm\n'), ((6290, 6328), 'fibra.models.Customer.query.order_by', 'Customer.query.order_by', (['Customer.name'], {}), '(Customer.name)\n', (6313, 6328), False, 'from fibra.models import db, Customer, Contact, Invoice, Payment, STATES\n'), ((6724, 6770), 'flask.render_template', 'render_template', (['"""invoice_new.html"""'], {'form': 'form'}), "('invoice_new.html', form=form)\n", (6739, 6770), False, 'from flask import render_template, redirect, url_for, flash, request, make_response\n'), ((6905, 6941), 'fibra.models.Invoice.query.get_or_404', 'Invoice.query.get_or_404', (['invoice_id'], {}), '(invoice_id)\n', (6929, 6941), False, 'from fibra.models import db, Customer, Contact, Invoice, Payment, STATES\n'), ((7373, 7444), 'flask.render_template', 'render_template', (['"""invoice_add_payment.html"""'], {'form': 'form', 'invoice': 'invoice'}), "('invoice_add_payment.html', form=form, invoice=invoice)\n", (7388, 7444), False, 'from flask import render_template, redirect, url_for, flash, request, make_response\n'), ((8082, 8128), 'flask.render_template', 'render_template', (['"""payment_new.html"""'], {'form': 'form'}), "('payment_new.html', form=form)\n", (8097, 8128), False, 'from flask import render_template, redirect, url_for, flash, request, make_response\n'), ((8695, 8714), 'fibra.utils.render_json', 'render_json', (['retval'], {}), '(retval)\n', (8706, 8714), False, 'from fibra.utils import render_json\n'), ((9081, 9125), 'flask.request.args.get', 'request.args.get', (['"""customer"""', 'None'], {'type': 'int'}), "('customer', None, type=int)\n", (9097, 9125), False, 'from flask import render_template, redirect, url_for, flash, request, make_response\n'), ((569, 593), 'flask.url_for', 'url_for', (['"""customer_list"""'], {}), "('customer_list')\n", (576, 593), False, 'from flask import render_template, redirect, url_for, flash, request, make_response\n'), ((809, 838), 'fibra.models.Invoice.expiration_date.asc', 'Invoice.expiration_date.asc', ([], {}), '()\n', (836, 838), False, 'from fibra.models import db, Customer, Contact, Invoice, Payment, STATES\n'), ((1117, 1146), 'fibra.models.Invoice.expiration_date.asc', 'Invoice.expiration_date.asc', ([], {}), '()\n', (1144, 1146), False, 'from fibra.models import db, Customer, Contact, Invoice, Payment, STATES\n'), ((1397, 1427), 'fibra.models.Invoice.expiration_date.desc', 'Invoice.expiration_date.desc', ([], {}), '()\n', (1425, 1427), False, 'from fibra.models import db, Customer, Contact, Invoice, Payment, STATES\n'), ((2105, 2143), 'fibra.models.Customer.query.get_or_404', 'Customer.query.get_or_404', (['customer_id'], {}), '(customer_id)\n', (2130, 2143), False, 'from fibra.models import db, Customer, Contact, Invoice, Payment, STATES\n'), ((2550, 2569), 'fibra.models.db.session.commit', 'db.session.commit', ([], {}), '()\n', (2567, 2569), False, 'from fibra.models import db, Customer, Contact, Invoice, Payment, STATES\n'), ((2578, 2588), 'flask.flash', 'flash', (['msg'], {}), '(msg)\n', (2583, 2588), False, 'from flask import render_template, redirect, url_for, flash, request, make_response\n'), ((3365, 3401), 'fibra.models.Invoice.query.get_or_404', 'Invoice.query.get_or_404', (['invoice_id'], {}), '(invoice_id)\n', (3389, 3401), False, 'from fibra.models import db, Customer, Contact, Invoice, Payment, STATES\n'), ((3713, 3732), 'fibra.models.db.session.commit', 'db.session.commit', ([], {}), '()\n', (3730, 3732), False, 'from fibra.models import db, Customer, Contact, Invoice, Payment, STATES\n'), ((3741, 3751), 'flask.flash', 'flash', (['msg'], {}), '(msg)\n', (3746, 3751), False, 'from flask import render_template, redirect, url_for, flash, request, make_response\n'), ((4303, 4312), 'fibra.models.Payment', 'Payment', ([], {}), '()\n', (4310, 4312), False, 'from fibra.models import db, Customer, Contact, Invoice, Payment, STATES\n'), ((4459, 4482), 'fibra.models.db.session.add', 'db.session.add', (['payment'], {}), '(payment)\n', (4473, 4482), False, 'from fibra.models import db, Customer, Contact, Invoice, Payment, STATES\n'), ((4491, 4510), 'fibra.models.db.session.commit', 'db.session.commit', ([], {}), '()\n', (4508, 4510), False, 'from fibra.models import db, Customer, Contact, Invoice, Payment, STATES\n'), ((4519, 4606), 'flask.flash', 'flash', (["('El pago se agregó a <strong>%s</strong> satisfactoriamente' % customer.name)"], {}), "('El pago se agregó a <strong>%s</strong> satisfactoriamente' %\n customer.name)\n", (4524, 4606), False, 'from flask import render_template, redirect, url_for, flash, request, make_response\n'), ((5055, 5064), 'fibra.models.Contact', 'Contact', ([], {}), '()\n', (5062, 5064), False, 'from fibra.models import db, Customer, Contact, Invoice, Payment, STATES\n'), ((5084, 5122), 'fibra.models.Customer.query.get_or_404', 'Customer.query.get_or_404', (['customer_id'], {}), '(customer_id)\n', (5109, 5122), False, 'from fibra.models import db, Customer, Contact, Invoice, Payment, STATES\n'), ((5255, 5291), 'fibra.models.Contact.query.get_or_404', 'Contact.query.get_or_404', (['contact_id'], {}), '(contact_id)\n', (5279, 5291), False, 'from fibra.models import db, Customer, Contact, Invoice, Payment, STATES\n'), ((5630, 5649), 'fibra.models.db.session.commit', 'db.session.commit', ([], {}), '()\n', (5647, 5649), False, 'from fibra.models import db, Customer, Contact, Invoice, Payment, STATES\n'), ((5658, 5668), 'flask.flash', 'flash', (['msg'], {}), '(msg)\n', (5663, 5668), False, 'from flask import render_template, redirect, url_for, flash, request, make_response\n'), ((6499, 6518), 'fibra.models.db.session.commit', 'db.session.commit', ([], {}), '()\n', (6516, 6518), False, 'from fibra.models import db, Customer, Contact, Invoice, Payment, STATES\n'), ((6527, 6629), 'flask.flash', 'flash', (["('El documento se agregó a <strong>%s</strong> satisfactoriamente' % form.\n customer.data.name)"], {}), "('El documento se agregó a <strong>%s</strong> satisfactoriamente' %\n form.customer.data.name)\n", (6532, 6629), False, 'from flask import render_template, redirect, url_for, flash, request, make_response\n'), ((7067, 7076), 'fibra.models.Payment', 'Payment', ([], {}), '()\n', (7074, 7076), False, 'from fibra.models import db, Customer, Contact, Invoice, Payment, STATES\n'), ((7157, 7176), 'fibra.models.db.session.commit', 'db.session.commit', ([], {}), '()\n', (7174, 7176), False, 'from fibra.models import db, Customer, Contact, Invoice, Payment, STATES\n'), ((7185, 7280), 'flask.flash', 'flash', (["('El pago se agregó a <strong>%s</strong> satisfactoriamente' % invoice.\n customer.name)"], {}), "('El pago se agregó a <strong>%s</strong> satisfactoriamente' %\n invoice.customer.name)\n", (7190, 7280), False, 'from flask import render_template, redirect, url_for, flash, request, make_response\n'), ((7719, 7760), 'fibra.models.Invoice.state.in_', 'Invoice.state.in_', (["['PENDING', 'EXPIRED']"], {}), "(['PENDING', 'EXPIRED'])\n", (7736, 7760), False, 'from fibra.models import db, Customer, Contact, Invoice, Payment, STATES\n'), ((7814, 7823), 'fibra.models.Payment', 'Payment', ([], {}), '()\n', (7821, 7823), False, 'from fibra.models import db, Customer, Contact, Invoice, Payment, STATES\n'), ((7867, 7890), 'fibra.models.db.session.add', 'db.session.add', (['payment'], {}), '(payment)\n', (7881, 7890), False, 'from fibra.models import db, Customer, Contact, Invoice, Payment, STATES\n'), ((7899, 7918), 'fibra.models.db.session.commit', 'db.session.commit', ([], {}), '()\n', (7916, 7918), False, 'from fibra.models import db, Customer, Contact, Invoice, Payment, STATES\n'), ((7927, 8025), 'flask.flash', 'flash', (["('El pago se agregó a <strong>%s</strong> satisfactoriamente' % form.\n customer.data.name)"], {}), "('El pago se agregó a <strong>%s</strong> satisfactoriamente' % form.\n customer.data.name)\n", (7932, 8025), False, 'from flask import render_template, redirect, url_for, flash, request, make_response\n'), ((9139, 9178), 'flask.request.args.getlist', 'request.args.getlist', (['"""state"""'], {'type': 'str'}), "('state', type=str)\n", (9159, 9178), False, 'from flask import render_template, redirect, url_for, flash, request, make_response\n'), ((9437, 9466), 'fibra.models.Invoice.expiration_date.asc', 'Invoice.expiration_date.asc', ([], {}), '()\n', (9464, 9466), False, 'from fibra.models import db, Customer, Contact, Invoice, Payment, STATES\n'), ((9674, 9699), 'fibra.models.Invoice.state.in_', 'Invoice.state.in_', (['states'], {}), '(states)\n', (9691, 9699), False, 'from fibra.models import db, Customer, Contact, Invoice, Payment, STATES\n'), ((2300, 2336), 'flask.request.cookies.get', 'request.cookies.get', (['"""customer_name"""'], {}), "('customer_name')\n", (2319, 2336), False, 'from flask import render_template, redirect, url_for, flash, request, make_response\n'), ((2517, 2541), 'fibra.models.db.session.add', 'db.session.add', (['customer'], {}), '(customer)\n', (2531, 2541), False, 'from fibra.models import db, Customer, Contact, Invoice, Payment, STATES\n'), ((2781, 2829), 'flask.render_template', 'render_template', (['"""customer_edit.html"""'], {'form': 'form'}), "('customer_edit.html', form=form)\n", (2796, 2829), False, 'from flask import render_template, redirect, url_for, flash, request, make_response\n'), ((3216, 3228), 'datetime.date.today', 'date.today', ([], {}), '()\n', (3226, 3228), False, 'from datetime import date\n'), ((3681, 3704), 'fibra.models.db.session.add', 'db.session.add', (['invoice'], {}), '(invoice)\n', (3695, 3704), False, 'from fibra.models import db, Customer, Contact, Invoice, Payment, STATES\n'), ((3776, 3827), 'flask.url_for', 'url_for', (['"""customer_detail"""'], {'customer_id': 'customer.id'}), "('customer_detail', customer_id=customer.id)\n", (3783, 3827), False, 'from flask import render_template, redirect, url_for, flash, request, make_response\n'), ((4164, 4176), 'datetime.date.today', 'date.today', ([], {}), '()\n', (4174, 4176), False, 'from datetime import date\n'), ((4627, 4678), 'flask.url_for', 'url_for', (['"""customer_detail"""'], {'customer_id': 'customer.id'}), "('customer_detail', customer_id=customer.id)\n", (4634, 4678), False, 'from flask import render_template, redirect, url_for, flash, request, make_response\n'), ((5598, 5621), 'fibra.models.db.session.add', 'db.session.add', (['contact'], {}), '(contact)\n', (5612, 5621), False, 'from fibra.models import db, Customer, Contact, Invoice, Payment, STATES\n'), ((5693, 5744), 'flask.url_for', 'url_for', (['"""customer_detail"""'], {'customer_id': 'customer.id'}), "('customer_detail', customer_id=customer.id)\n", (5700, 5744), False, 'from flask import render_template, redirect, url_for, flash, request, make_response\n'), ((6214, 6226), 'datetime.date.today', 'date.today', ([], {}), '()\n', (6224, 6226), False, 'from datetime import date\n'), ((6467, 6490), 'fibra.models.db.session.add', 'db.session.add', (['invoice'], {}), '(invoice)\n', (6481, 6490), False, 'from fibra.models import db, Customer, Contact, Invoice, Payment, STATES\n'), ((6650, 6711), 'flask.url_for', 'url_for', (['"""customer_detail"""'], {'customer_id': 'form.customer.data.id'}), "('customer_detail', customer_id=form.customer.data.id)\n", (6657, 6711), False, 'from flask import render_template, redirect, url_for, flash, request, make_response\n'), ((6977, 6989), 'datetime.date.today', 'date.today', ([], {}), '()\n', (6987, 6989), False, 'from datetime import date\n'), ((7301, 7360), 'flask.url_for', 'url_for', (['"""customer_detail"""'], {'customer_id': 'invoice.customer.id'}), "('customer_detail', customer_id=invoice.customer.id)\n", (7308, 7360), False, 'from flask import render_template, redirect, url_for, flash, request, make_response\n'), ((7574, 7586), 'datetime.date.today', 'date.today', ([], {}), '()\n', (7584, 7586), False, 'from datetime import date\n'), ((7614, 7671), 'fibra.models.Customer.query.filter', 'Customer.query.filter', (['(Customer.id == Invoice.customer_id)'], {}), '(Customer.id == Invoice.customer_id)\n', (7635, 7671), False, 'from fibra.models import db, Customer, Contact, Invoice, Payment, STATES\n'), ((8045, 8069), 'flask.url_for', 'url_for', (['"""customer_list"""'], {}), "('customer_list')\n", (8052, 8069), False, 'from flask import render_template, redirect, url_for, flash, request, make_response\n'), ((8301, 8345), 'flask.url_for', 'url_for', (['"""customer_detail"""'], {'customer_id': 'i.id'}), "('customer_detail', customer_id=i.id)\n", (8308, 8345), False, 'from flask import render_template, redirect, url_for, flash, request, make_response\n'), ((9635, 9648), 'fibra.models.STATES.keys', 'STATES.keys', ([], {}), '()\n', (9646, 9648), False, 'from fibra.models import db, Customer, Contact, Invoice, Payment, STATES\n'), ((735, 776), 'fibra.models.Invoice.state.in_', 'Invoice.state.in_', (["['PENDING', 'EXPIRED']"], {}), "(['PENDING', 'EXPIRED'])\n", (752, 776), False, 'from fibra.models import db, Customer, Contact, Invoice, Payment, STATES\n'), ((1043, 1084), 'fibra.models.Invoice.state.in_', 'Invoice.state.in_', (["['PENDING', 'EXPIRED']"], {}), "(['PENDING', 'EXPIRED'])\n", (1060, 1084), False, 'from fibra.models import db, Customer, Contact, Invoice, Payment, STATES\n'), ((2627, 2678), 'flask.url_for', 'url_for', (['"""customer_detail"""'], {'customer_id': 'customer.id'}), "('customer_detail', customer_id=customer.id)\n", (2634, 2678), False, 'from flask import render_template, redirect, url_for, flash, request, make_response\n'), ((8525, 8559), 'fibra.models.Customer.name.ilike', 'Customer.name.ilike', (["('%' + t + '%')"], {}), "('%' + t + '%')\n", (8544, 8559), False, 'from fibra.models import db, Customer, Contact, Invoice, Payment, STATES\n'), ((677, 705), 'fibra.models.Customer.query.join', 'Customer.query.join', (['Invoice'], {}), '(Invoice)\n', (696, 705), False, 'from fibra.models import db, Customer, Contact, Invoice, Payment, STATES\n'), ((985, 1013), 'fibra.models.Customer.query.join', 'Customer.query.join', (['Invoice'], {}), '(Invoice)\n', (1004, 1013), False, 'from fibra.models import db, Customer, Contact, Invoice, Payment, STATES\n'), ((8372, 8407), 'flask.request.args.get', 'request.args.get', (['"""q"""', '""""""'], {'type': 'str'}), "('q', '', type=str)\n", (8388, 8407), False, 'from flask import render_template, redirect, url_for, flash, request, make_response\n')] |
"""Get status of CouchDB."""
import json
from status_base import save, setup_environment, schedule_log, safe_run, mongo_collection, mongo_database, get_parameters
setup_environment()
def status():
"""Run check on CouchDB."""
schedule_log("Starting CouchDB Monitor")
databases = get_parameters()
store_data = []
for database_name in databases:
schedule_log(database_name)
command_text = 'curl http://127.0.0.1:5984/%s' % database_name
schedule_log('Running: %s' % command_text)
output, error = safe_run(command_text)
schedule_log('Parsing JSON')
try:
data = json.loads(output)
schedule_log('Loaded json, saving.')
store_data.append(data)
except Exception as ex:
schedule_log('Reporting as failed.')
schedule_log('%s' % ex)
schedule_log(output)
error = '%s'
if error:
save(False, store_data, mongo_database(), mongo_collection(), error)
else:
save(True, store_data, mongo_database(), mongo_collection(), output)
schedule_log('Finished')
| [
"json.loads",
"status_base.safe_run",
"status_base.mongo_database",
"status_base.schedule_log",
"status_base.get_parameters",
"status_base.mongo_collection",
"status_base.setup_environment"
] | [((165, 184), 'status_base.setup_environment', 'setup_environment', ([], {}), '()\n', (182, 184), False, 'from status_base import save, setup_environment, schedule_log, safe_run, mongo_collection, mongo_database, get_parameters\n'), ((237, 277), 'status_base.schedule_log', 'schedule_log', (['"""Starting CouchDB Monitor"""'], {}), "('Starting CouchDB Monitor')\n", (249, 277), False, 'from status_base import save, setup_environment, schedule_log, safe_run, mongo_collection, mongo_database, get_parameters\n'), ((295, 311), 'status_base.get_parameters', 'get_parameters', ([], {}), '()\n', (309, 311), False, 'from status_base import save, setup_environment, schedule_log, safe_run, mongo_collection, mongo_database, get_parameters\n'), ((1113, 1137), 'status_base.schedule_log', 'schedule_log', (['"""Finished"""'], {}), "('Finished')\n", (1125, 1137), False, 'from status_base import save, setup_environment, schedule_log, safe_run, mongo_collection, mongo_database, get_parameters\n'), ((378, 405), 'status_base.schedule_log', 'schedule_log', (['database_name'], {}), '(database_name)\n', (390, 405), False, 'from status_base import save, setup_environment, schedule_log, safe_run, mongo_collection, mongo_database, get_parameters\n'), ((486, 528), 'status_base.schedule_log', 'schedule_log', (["('Running: %s' % command_text)"], {}), "('Running: %s' % command_text)\n", (498, 528), False, 'from status_base import save, setup_environment, schedule_log, safe_run, mongo_collection, mongo_database, get_parameters\n'), ((554, 576), 'status_base.safe_run', 'safe_run', (['command_text'], {}), '(command_text)\n', (562, 576), False, 'from status_base import save, setup_environment, schedule_log, safe_run, mongo_collection, mongo_database, get_parameters\n'), ((586, 614), 'status_base.schedule_log', 'schedule_log', (['"""Parsing JSON"""'], {}), "('Parsing JSON')\n", (598, 614), False, 'from status_base import save, setup_environment, schedule_log, safe_run, mongo_collection, mongo_database, get_parameters\n'), ((648, 666), 'json.loads', 'json.loads', (['output'], {}), '(output)\n', (658, 666), False, 'import json\n'), ((680, 716), 'status_base.schedule_log', 'schedule_log', (['"""Loaded json, saving."""'], {}), "('Loaded json, saving.')\n", (692, 716), False, 'from status_base import save, setup_environment, schedule_log, safe_run, mongo_collection, mongo_database, get_parameters\n'), ((976, 992), 'status_base.mongo_database', 'mongo_database', ([], {}), '()\n', (990, 992), False, 'from status_base import save, setup_environment, schedule_log, safe_run, mongo_collection, mongo_database, get_parameters\n'), ((994, 1012), 'status_base.mongo_collection', 'mongo_collection', ([], {}), '()\n', (1010, 1012), False, 'from status_base import save, setup_environment, schedule_log, safe_run, mongo_collection, mongo_database, get_parameters\n'), ((1062, 1078), 'status_base.mongo_database', 'mongo_database', ([], {}), '()\n', (1076, 1078), False, 'from status_base import save, setup_environment, schedule_log, safe_run, mongo_collection, mongo_database, get_parameters\n'), ((1080, 1098), 'status_base.mongo_collection', 'mongo_collection', ([], {}), '()\n', (1096, 1098), False, 'from status_base import save, setup_environment, schedule_log, safe_run, mongo_collection, mongo_database, get_parameters\n'), ((798, 834), 'status_base.schedule_log', 'schedule_log', (['"""Reporting as failed."""'], {}), "('Reporting as failed.')\n", (810, 834), False, 'from status_base import save, setup_environment, schedule_log, safe_run, mongo_collection, mongo_database, get_parameters\n'), ((847, 870), 'status_base.schedule_log', 'schedule_log', (["('%s' % ex)"], {}), "('%s' % ex)\n", (859, 870), False, 'from status_base import save, setup_environment, schedule_log, safe_run, mongo_collection, mongo_database, get_parameters\n'), ((883, 903), 'status_base.schedule_log', 'schedule_log', (['output'], {}), '(output)\n', (895, 903), False, 'from status_base import save, setup_environment, schedule_log, safe_run, mongo_collection, mongo_database, get_parameters\n')] |
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2017-04-22 12:03
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import uuid
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('education', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='LectureDay',
fields=[
('day', models.PositiveIntegerField()),
('start_date', models.DateTimeField()),
('end_date', models.DateTimeField()),
('code', models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)),
('lecture', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='education.Lecture')),
],
),
migrations.CreateModel(
name='LecturePoll',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('is_attended', models.BooleanField(default=False)),
('created', models.DateTimeField(auto_now_add=True)),
('lecture_day', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='education.LectureDay')),
('student', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
| [
"django.db.models.ForeignKey",
"django.db.models.BooleanField",
"django.db.models.AutoField",
"django.db.models.PositiveIntegerField",
"django.db.models.DateTimeField",
"django.db.migrations.swappable_dependency",
"django.db.models.UUIDField"
] | [((302, 359), 'django.db.migrations.swappable_dependency', 'migrations.swappable_dependency', (['settings.AUTH_USER_MODEL'], {}), '(settings.AUTH_USER_MODEL)\n', (333, 359), False, 'from django.db import migrations, models\n'), ((534, 563), 'django.db.models.PositiveIntegerField', 'models.PositiveIntegerField', ([], {}), '()\n', (561, 563), False, 'from django.db import migrations, models\n'), ((597, 619), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (617, 619), False, 'from django.db import migrations, models\n'), ((651, 673), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (671, 673), False, 'from django.db import migrations, models\n'), ((701, 772), 'django.db.models.UUIDField', 'models.UUIDField', ([], {'default': 'uuid.uuid4', 'primary_key': '(True)', 'serialize': '(False)'}), '(default=uuid.uuid4, primary_key=True, serialize=False)\n', (717, 772), False, 'from django.db import migrations, models\n'), ((803, 894), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""education.Lecture"""'}), "(on_delete=django.db.models.deletion.CASCADE, to=\n 'education.Lecture')\n", (820, 894), False, 'from django.db import migrations, models\n'), ((1026, 1119), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (1042, 1119), False, 'from django.db import migrations, models\n'), ((1150, 1184), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (1169, 1184), False, 'from django.db import migrations, models\n'), ((1215, 1254), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (1235, 1254), False, 'from django.db import migrations, models\n'), ((1289, 1383), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""education.LectureDay"""'}), "(on_delete=django.db.models.deletion.CASCADE, to=\n 'education.LectureDay')\n", (1306, 1383), False, 'from django.db import migrations, models\n'), ((1409, 1505), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': 'settings.AUTH_USER_MODEL'}), '(on_delete=django.db.models.deletion.CASCADE, to=settings.\n AUTH_USER_MODEL)\n', (1426, 1505), False, 'from django.db import migrations, models\n')] |
"""
monobit.mac - MacOS suitcases and resources
(c) 2019--2021 <NAME>
licence: https://opensource.org/licenses/MIT
"""
import logging
from ..binary import bytes_to_bits
from ..struct import bitfield, big_endian as be
from .. import struct
from ..storage import loaders, savers
from ..font import Font, Glyph, Coord
##############################################################################
# AppleSingle/AppleDouble
# see https://web.archive.org/web/20160304101440/http://kaiser-edv.de/documents/Applesingle_AppleDouble_v1.html
_APPLE_HEADER = be.Struct(
magic='uint32',
version='uint32',
home_fs='16s',
number_entities='uint16',
)
_APPLE_ENTRY = be.Struct(
entry_id='uint32',
offset='uint32',
length='uint32',
)
_APPLESINGLE_MAGIC = 0x00051600
_APPLEDOUBLE_MAGIC = 0x00051607
# Entry IDs
# Data fork 1 standard Macintosh data fork
# Resource fork 2 standard Macintosh resource fork
# Real name 3 file's name in its home file system
# Comment 4 standard Macintosh comments
# Icon, B&W 5 standard Macintosh black-and-white icon
# Icon, color 6 Macintosh color icon
# file info 7 file information: attributes and so on
# Finder info 9 standard Macintosh Finder information
_ID_RESOURCE = 2
##############################################################################
# resource fork/dfont format
# see https://developer.apple.com/library/archive/documentation/mac/pdf/MoreMacintoshToolbox.pdf
# Page 1-122 Figure 1-12 Format of a resource header in a resource fork
_RSRC_HEADER = be.Struct(
data_offset='uint32',
map_offset='uint32',
data_length='uint32',
map_length='uint32',
# header is padded with zeros to 256 bytes
# https://github.com/fontforge/fontforge/blob/master/fontforge/macbinary.c
reserved='240s',
)
# Figure 1-13 Format of resource data for a single resource
_DATA_HEADER = be.Struct(
length='uint32',
# followed by `length` bytes of data
)
# Figure 1-14 Format of the resource map in a resource fork
_MAP_HEADER = be.Struct(
reserved_header='16s',
reserved_handle='4s',
reserved_fileref='2s',
attributes='uint16',
type_list_offset='uint16',
name_list_offset='uint16',
# number of types minus 1
last_type='uint16',
# followed by:
# type list
# reference lists
# name list
)
# Figure 1-15 Format of an item in a resource type list
_TYPE_ENTRY = be.Struct(
rsrc_type='4s',
# number of resources minus 1
last_rsrc='uint16',
ref_list_offset='uint16',
)
# Figure 1-16 Format of an entry in the reference list for a resource type
_REF_ENTRY = be.Struct(
rsrc_id='uint16',
name_offset='uint16',
attributes='uint8',
# we need a 3-byte offset, will have to construct ourselves...
data_offset_hi='uint8',
data_offset='uint16',
reserved_handle='4s',
)
# Figure 1-17 Format of an item in a resource name list
# 1-byte length followed by bytes
##############################################################################
# NFNT/FONT resource
# the Font Type Element
# https://developer.apple.com/library/archive/documentation/mac/Text/Text-251.html#MARKER-9-442
_FONT_TYPE = be.Struct(
# 15 Reserved. Should be set to 0.
reserved_15=bitfield('uint16', 1),
# 14 This bit is set to 1 if the font is not to be expanded to match the screen depth. The
# font is for color Macintosh computers only if this bit is set to 1. This is for some
# fonts, such as Kanji, which are too large for synthetic fonts to be effective or
# meaningful, or bitmapped fonts that are larger than 50 points.
dont_expand_to_match_screen_depth=bitfield('uint16', 1),
# 13 This bit is set to 1 if the font describes a fixed-width font, and is set to 0 if the
# font describes a proportional font. The Font Manager does not check the setting of this bit.
fixed_width=bitfield('uint16', 1),
# 12 Reserved. Should be set to 1.
reserved_12=bitfield('uint16', 1),
# 10-11 Reserved. Should be set to 0.
reserved_10_11=bitfield('uint16', 2),
# 9 This bit is set to 1 if the font contains colors other than black. This font is for
# color Macintosh computers only if this bit is set to 1.
has_colors=bitfield('uint16', 1),
# 8 This bit is set to 1 if the font is a synthetic font, created dynamically from the
# available font resources in response to a certain color and screen depth combination.
# The font is for color Macintosh computers only if this bit is set to 1.
synthetic=bitfield('uint16', 1),
# 7 This bit is set to 1 if the font has a font color table ('fctb') resource. The font
# is for color Macintosh computers only if this bit is set to 1.
has_fctb=bitfield('uint16', 1),
# 4-6 Reserved. Should be set to 0.
reserved_4_6=bitfield('uint16', 3),
# 2-3 These two bits define the depth of the font. Each of the four possible values indicates
# the number of bits (and therefore, the number of colors) used to represent each pixel
# in the glyph images.
# Value Font depth Number of colors
# 0 1-bit 1
# 1 2-bit 4
# 2 4-bit 16
# 3 8-bit 256
# Normally the font depth is 0 and the glyphs are specified as monochrome images. If
# bit 7 of this field is set to 1, a resource of type 'fctb' with the same ID as the font
# can optionally be provided to assign RGB colors to specific pixel values.
#
# If this font resource is a member of a font family, the settings of bits 8 and 9 of the
# fontStyle field in this font's association table entry should be the same as the settings of
# bits 2 and 3 in the fontType field. For more information, see "The Font Association Table"
# on page 4-89.
depth=bitfield('uint16', 2),
# 1 This bit is set to 1 if the font resource contains a glyph-width table.
has_width_table=bitfield('uint16', 1),
# 0 This bit is set to 1 if the font resource contains an image height table.
has_height_table=bitfield('uint16', 1),
)
# the header of the NFNT is a FontRec
# https://developer.apple.com/library/archive/documentation/mac/Text/Text-214.html
_NFNT_HEADER = be.Struct(
# {font type}
fontType=_FONT_TYPE,
# {character code of first glyph}
firstChar='uint16',
# {character code of last glyph}
lastChar='uint16',
# {maximum glyph width}
widMax='uint16',
# {maximum glyph kern}
kernMax='int16',
# {negative of descent}
nDescent='int16',
# {width of font rectangle}
fRectWidth='uint16',
# {height of font rectangle}
fRectHeight='uint16',
# {offset to width/offset table}
owTLoc='uint16',
# {maximum ascent measurement}
ascent='uint16',
# {maximum descent measurement}
descent='uint16',
# {leading measurement}
leading='uint16',
# {row width of bit image in 16-bit wds}
rowWords='uint16',
# followed by:
# bit image table
# bitmap location table
# width offset table
# glyph-width table
# image height table
)
# location table entry
_LOC_ENTRY = be.Struct(
offset='uint16',
)
# width/offset table entry
# Width/offset table. For every glyph in the font, this table contains a word with the glyph offset
# in the high-order byte and the glyph's width, in integer form, in the low-order byte. The value of
# the offset, when added to the maximum kerning value for the font, determines the horizontal
# distance from the glyph origin to the left edge of the bit image of the glyph, in pixels. If this
# sum is negative, the glyph origin is to the right of the glyph image's left edge, meaning the
# glyph kerns to the left. If the sum is positive, the origin is to the left of the image's left
# edge. If the sum equals zero, the glyph origin corresponds with the left edge of the bit image.
# Missing glyphs are represented by a word value of -1. The last word of this table is also -1,
# representing the end.
_WO_ENTRY = be.Struct(
offset='uint8',
width='uint8',
)
# glyph width table entry
_WIDTH_ENTRY = be.Struct(
width='uint16',
)
# height table entry
# Image height table. For every glyph in the font, this table contains a word that specifies the
# image height of the glyph, in pixels. The image height is the height of the glyph image and is
# less than or equal to the font height. QuickDraw uses the image height for improved character
# plotting, because it only draws the visible part of the glyph. The high-order byte of the word is
# the offset from the top of the font rectangle of the first non-blank (or nonwhite) row in the
# glyph, and the low-order byte is the number of rows that must be drawn. The Font Manager creates
# this table.
_HEIGHT_ENTRY = be.Struct(
offset='uint8',
height='uint8',
)
##############################################################################
# FOND resource
# https://developer.apple.com/library/archive/documentation/mac/Text/Text-269.html#MARKER-2-525
_FFLAGS = be.Struct(
# bit 15: This bit is set to 1 if the font family describes fixed-width fonts, and is cleared
# to 0 if the font describes proportional fonts.
fixed_width=bitfield('uint16', 1),
# bit 14: This bit is set to 1 if the family fractional-width table is not used, and is cleared
# to 0 if the table is used.
frac_width_unused=bitfield('uint16', 1),
# bit 13: This bit is set to 1 if the font family should use integer extra width for stylistic
# variations. If not set, the font family should compute the fixed-point extra width
# from the family style-mapping table, but only if the FractEnable global variable
# has a value of TRUE.
use_int_extra_width=bitfield('uint16', 1),
# bit 12: This bit is set to 1 if the font family ignores the value of the FractEnable global
# variable when deciding whether to use fixed-point values for stylistic variations;
# the value of bit 13 is then the deciding factor. The value of the FractEnable global
# variable is set by the SetFractEnable procedure.
ignore_global_fract_enable=bitfield('uint16', 1),
# bits 2-11: These bits are reserved by Apple and should be cleared to 0.
reserved_2_11=bitfield('uint16', 10),
# bit 1: This bit is set to 1 if the resource contains a glyph-width table.
has_width_table=bitfield('uint16', 1),
# bit 0: This bit is reserved by Apple and should be cleared to 0.
reserved_0=bitfield('uint16', 1),
)
# p1-110
# actually these are all signed??
_FOND_HEADER = be.Struct(
# {flags for family}
ffFlags=_FFLAGS,
# {family ID number}
ffFamID='uint16',
# {ASCII code of first character}
ffFirstChar='uint16',
# {ASCII code of last character}
ffLastChar='uint16',
# {maximum ascent for 1-pt font}
ffAscent='uint16',
# {maximum descent for 1-pt font}
ffDescent='uint16',
# {maximum leading for 1-pt font}
ffLeading='uint16',
# {maximum glyph width for 1-pt font}
ffWidMax='uint16',
# {offset to family glyph-width table}
ffWTabOff='uint32',
# {offset to kerning table}
ffKernOff='uint32',
# {offset to style-mapping table}
ffStylOff='uint32',
# {style properties info}
ffProperty=struct.uint16 * 9,
# {for international use}
ffIntl=struct.uint16 * 2,
# {version number}
ffVersion='uint16',
)
# font association table
# definitions I.M. p4-110
# signed??
_FA_HEADER = be.Struct(
# number of entries - 1
numAssoc='uint16',
)
# record - p4-111
_FA_ENTRY = be.Struct(
fontSize='uint16',
fontStyle='uint16',
fontID='uint16',
)
_STYLE_MAP = {
0: 'bold',
1: 'italic',
2: 'underline',
3: 'outline',
4: 'shadow',
5: 'condensed',
6: 'extended',
}
# offset table
# Fig 4-15, <NAME>.: Text p. 4-96
# will max_entry be -1 for an empty table?
_OFFS_HEADER = be.Struct(
max_entry='int16',
)
# followed by array of uint32 offsets
_OFFS_ENTRY = be.Struct(
offset='uint32',
)
# p. 4-99
# > Each width is in 16-bit fixed-point format, with the integer part
# > in the high-order 4 bits and the fractional part in the low-order 12 bits.
_FIXED_TYPE = struct.int16
# remember to divide by 2**12...
# bounding-box table
# Fig. 4.26
_BBX_HEADER = _OFFS_HEADER
_BBX_ENTRY = be.Struct(
# _STYLE_MAP bitfield
style='uint16',
left=_FIXED_TYPE,
bottom=_FIXED_TYPE,
right=_FIXED_TYPE,
top=_FIXED_TYPE,
)
# Family glyph width table
# definitions I.M. p.4-109 / p.4-115
# handle; elsewhere 4 bytes
_HANDLE = struct.uint32
# guess
_BOOLEAN = struct.uint8
# Point data type; 4 bytes e.g. I.M. C-29
# 4-29 "two integers: vertical, horizontal"
_POINT = be.Struct(
vert='int16',
horiz='int16',
)
_WIDTH_TABLE = be.Struct(
tabData=_FIXED_TYPE*256,
tabFont=_HANDLE,
# extra line spacing
sExtra='int32',
# extra line spacing due to style
style='int32',
# font family ID
fID='int16',
# font size request
fSize='int16',
# style (face) request
face='int16',
# device requested
device='int16',
# scale factors requested
inNumer=_POINT,
inDenom=_POINT,
# actual font family ID for table
aFID='int16',
# family record used to build up table
fHand=_HANDLE,
# used fixed-point family widths
usedFam=_BOOLEAN,
# actual face produced
aFace='uint8',
# vertical scale output value
vOutput='int16',
# horizontal scale output value
hOutput='int16',
# vertical scale output value
vFactor='int16',
# horizontal scale output value
hFactor='int16',
# actual size of font used
aSize='int16',
# total size of table
tabSize='int16',
)
# Style-mapping table
# I.M.:Text p. 4-99
# > The font name suffix subtable and the glyph-encoding subtable that are part of the style-mapping
# > table immediately follow it in the resource data. The font name suffix subtable contains the
# > base font name and the suffixes that can be added to the font family’s name to produce a real
# > PostScript name (one that is recognized by the PostScript LaserWriter printer driver). The
# > style-mapping table uses the suffix table to build a font name for a PostScript printer. The
# > glyph-encoding table allows character codes to be mapped to PostScript glyph names.
_STYLE_TABLE = be.Struct(
# bit field holding rendering hints - see I.M. p 4-101
fontClass='int16',
# offset from the start of this table to the glyph-encoding subtable component
offset='int32',
reserved='int32',
# indexes into the font suffix name table that follows this table
# "This is an array of 48 integer index values"
# note C summary has 47 but Pascal summary has 0..47 inclusive
indexes=struct.int8 * 48,
)
# https://www6.uniovi.es/cscene/CS5/CS5-04.html
# > In Pascal, on the other hand, the first character of the string is the length of the
# > string, and the string is stored in the 255 characters that follow
# > On the Mac, there is a predefined type for Pascal strings, namely, Str255.
_STR255 = be.Struct(
length='uint8',
string=struct.char * 255, #*length
)
_NAME_TABLE = be.Struct(
stringCount='int16',
#baseFontName=_STR255,
)
def string_from_bytes(data, offset):
length = be.uint8.from_bytes(data, offset)
string = data[offset+1:offset+1+length]
return string, offset+1+length
# glyph encoding subtable
_ENC_TABLE = be.Struct(
stringCount='int16',
)
_ENC_ENTRY = be.Struct(
char='uint8',
name=struct.char * 255,
)
# Kerning table
_KERN_TABLE = be.Struct(
# number of entries - 1
numKerns='int16',
)
_KERN_ENTRY = be.Struct(
kernStyle='uint16',
kernLength='uint16',
)
_KERN_PAIR = be.Struct(
kernFirst='uint8',
kernSecond='uint8',
# kerning value in 1pt fixed format
kernWidth=_FIXED_TYPE,
)
# based on:
# [1] Apple Technotes (As of 2002)/te/te_02.html
# [2] https://developer.apple.com/library/archive/documentation/mac/Text/Text-367.html#HEADING367-0
_MAC_ENCODING = {
0: 'mac-roman',
1: 'mac-japanese',
2: 'mac-trad-chinese',
3: 'mac-korean',
4: 'mac-arabic',
5: 'mac-hebrew',
6: 'mac-greek',
7: 'mac-cyrillic', # [1] russian
# 8: [2] right-to-left symbols
9: 'mac-devanagari',
10: 'mac-gurmukhi',
11: 'mac-gujarati',
12: 'mac-oriya',
13: 'mac-bengali',
14: 'mac-tamil',
15: 'mac-telugu',
16: 'mac-kannada',
17: 'mac-malayalam',
18: 'mac-sinhalese',
19: 'mac-burmese',
20: 'mac-khmer',
21: 'mac-thai',
22: 'mac-laotian',
23: 'mac-georgian',
24: 'mac-armenian',
25: 'mac-simp-chinese', # [1] maldivian
26: 'mac-tibetan',
27: 'mac-mongolian',
28: 'mac-ethiopic', # [2] == geez
29: 'mac-centraleurope', # [1] non-cyrillic slavic
30: 'mac-vietnamese',
31: 'mac-sindhi', # [2] == ext-arabic
#32: [1] [2] 'uninterpreted symbols'
}
# font names for system fonts in FONT resources
_FONT_NAMES = {
0: 'Chicago', # system font
1: 'application font',
2: 'New York',
3: 'Geneva',
4: 'Monaco',
5: 'Venice',
6: 'London',
7: 'Athens',
8: 'San Francisco',
9: 'Toronto',
11: 'Cairo',
12: 'Los Angeles',
16: 'Palatino', # found experimentally
20: 'Times',
21: 'Helvetica',
22: 'Courier',
23: 'Symbol',
24: 'Taliesin', # later named Mobile, but it's have a FOND entry then.
}
# fonts which clain mac-roman encoding but aren't
_NON_ROMAN_NAMES = {
# https://www.unicode.org/Public/MAPPINGS/VENDORS/APPLE/SYMBOL.TXT
# > The Mac OS Symbol encoding shares the script code smRoman
# > (0) with the Mac OS Roman encoding. To determine if the Symbol
# > encoding is being used, you must check if the font name is
# > "Symbol".
'Symbol': 'mac-symbol',
'Cairo': '',
'Taliesin': '',
'Mobile': '',
}
##############################################################################
@loaders.register('dfont', 'suit', name='MacOS resource')
def load_dfont(instream, where=None):
"""
Load font from a MacOS suitcase.
"""
data = instream.read()
return _parse_resource_fork(data)
@loaders.register('apple',
magic=(_APPLESINGLE_MAGIC.to_bytes(4, 'big'), _APPLEDOUBLE_MAGIC.to_bytes(4, 'big')),
name='MacOS resource (AppleSingle/AppleDouble container)',
)
def load_apple(instream, where=None):
"""
Load font from an AppleSingle or AppleDouble container.
"""
data = instream.read()
return _parse_apple(data)
##############################################################################
def _parse_apple(data):
"""Parse an AppleSingle or AppleDouble file."""
header = _APPLE_HEADER.from_bytes(data)
if header.magic == _APPLESINGLE_MAGIC:
container = 'AppleSingle'
elif header.magic == _APPLEDOUBLE_MAGIC:
container = 'AppleDouble'
else:
raise ValueError('Not an AppleSingle or AppleDouble file.')
entry_array = _APPLE_ENTRY.array(header.number_entities)
entries = entry_array.from_bytes(data, _APPLE_HEADER.size)
for entry in entries:
if entry.entry_id == _ID_RESOURCE:
fork_data = data[entry.offset:entry.offset+entry.length]
fonts = _parse_resource_fork(fork_data)
fonts = [
font.set_properties(
source_format=f'MacOS {font.source_format} ({container} container)'
)
for font in fonts
]
return fonts
raise ValueError('No resource fork found.')
def _parse_resource_fork(data):
"""Parse a MacOS resource fork."""
rsrc_header = _RSRC_HEADER.from_bytes(data)
map_header = _MAP_HEADER.from_bytes(data, rsrc_header.map_offset)
type_array = _TYPE_ENTRY.array(map_header.last_type + 1)
# +2 because the length field is considered part of the type list
type_list_offset = rsrc_header.map_offset + map_header.type_list_offset + 2
type_list = type_array.from_bytes(data, type_list_offset)
resources = []
for type_entry in type_list:
ref_array = _REF_ENTRY.array(type_entry.last_rsrc + 1)
ref_list = ref_array.from_bytes(
data, type_list_offset -2 + type_entry.ref_list_offset
)
for ref_entry in ref_list:
# get name from name list
if ref_entry.name_offset == 0xffff:
name = ''
else:
name_offset = (
rsrc_header.map_offset + map_header.name_list_offset
+ ref_entry.name_offset
)
name_length = data[name_offset]
name = data[name_offset+1:name_offset+name_length+1].decode('ascii', 'replace')
# construct the 3-byte integer
data_offset = ref_entry.data_offset_hi * 0x10000 + ref_entry.data_offset
offset = rsrc_header.data_offset + _DATA_HEADER.size + data_offset
if type_entry.rsrc_type == b'sfnt':
logging.warning('sfnt resources (vector or bitmap) not supported')
if type_entry.rsrc_type in (b'FONT', b'NFNT', b'FOND'):
resources.append((type_entry.rsrc_type, ref_entry.rsrc_id, offset, name))
# construct directory
info = {}
for rsrc_type, rsrc_id, offset, name in resources:
if rsrc_type == b'FOND':
info.update(_parse_fond(data, offset, name))
else:
if rsrc_type == b'FONT':
font_number, font_size = divmod(rsrc_id, 128)
if not font_size:
info[font_number] = {
'family': name,
}
# parse fonts
fonts = []
for rsrc_type, rsrc_id, offset, name in resources:
if rsrc_type in (b'FONT', b'NFNT'):
props = {
'family': name if name else f'{rsrc_id}',
'source-format': rsrc_type.decode('ascii'),
}
if rsrc_type == b'FONT':
font_number, font_size = divmod(rsrc_id, 128)
if not font_size:
# directory entry only
continue
if font_number in _FONT_NAMES:
props['family'] = _FONT_NAMES[font_number]
else:
props['family'] = f'Family {font_number}'
if font_number in info:
props.update({
**info[font_number],
'point-size': font_size,
})
if rsrc_id in info:
props.update(info[rsrc_id])
if 'encoding' not in props or props.get('family', '') in _NON_ROMAN_NAMES:
props['encoding'] = _NON_ROMAN_NAMES.get(props.get('family', ''), 'mac-roman')
try:
font = _parse_nfnt(data, offset, props)
except ValueError as e:
logging.error('Could not load font: %s', e)
else:
fonts.append(font)
return fonts
def _parse_fond(data, offset, name):
"""Parse a MacOS FOND resource."""
fond_header = _FOND_HEADER.from_bytes(data, offset)
# Font Family Tables:
# Font Association table (mandatory)
fa_offset = offset + _FOND_HEADER.size
fa_header = _FA_HEADER.from_bytes(data, fa_offset)
fa_list = _FA_ENTRY.array(fa_header.numAssoc+1).from_bytes(data, fa_offset + _FA_HEADER.size)
# check if any optional tables are expected
# we don't have a field for bounding-box table offset
if fond_header.ffWTabOff or fond_header.ffKernOff or fond_header.ffStylOff:
# Offset table (optional)
# > Whenever any table, including the glyph-width, kerning, and
# > style-mapping tables, is included in the resource data, an offset table is included.
# > The offset table contains a long integer offset value for each table that follows it
offs_offset = fa_offset + _FA_HEADER.size + _FA_ENTRY.size * (fa_header.numAssoc+1)
offs_header = _OFFS_HEADER.from_bytes(data, offs_offset)
# max_entry==-1 if the table is absent?
offs_list = _OFFS_ENTRY.array(offs_header.max_entry+1).from_bytes(
data, offs_offset + _OFFS_HEADER.size
)
# we already have the offsets we need so no need to use the Offset Table
# Bounding-box table (optional)
bbx_offset = offs_offset + _OFFS_HEADER.size + _OFFS_ENTRY.size * (offs_header.max_entry+1)
bbx_header = _BBX_HEADER.from_bytes(data, bbx_offset)
bbx_list = _BBX_ENTRY.array(bbx_header.max_entry+1).from_bytes(
data, bbx_offset + _BBX_HEADER.size
)
# Family glyph-width table (optional)
# use offset given in FOND header
# this could also be determined from current position ,or from offset table
if not fond_header.ffWTabOff:
wtab = ()
else:
wtab_offset = offset + fond_header.ffWTabOff
wtab = _WIDTH_TABLE.from_bytes(data, wtab_offset)
# Style-mapping table (optional)
if not fond_header.ffStylOff:
stab = ()
names = ()
encs = ()
else:
stab_offset = offset + fond_header.ffStylOff
stab = _STYLE_TABLE.from_bytes(data, stab_offset)
# font name suffix subtable
ntab_offset = stab_offset + _STYLE_TABLE.size
ntab = _NAME_TABLE.from_bytes(data, ntab_offset)
# count + 1 as we take the base font name as well
names = []
offs = ntab_offset + _NAME_TABLE.size
for i in range(ntab.stringCount+1):
string, offs = string_from_bytes(data, offs)
names.append(string)
etab_offset = offs
etab = _ENC_TABLE.from_bytes(data, etab_offset)
offs += _ENC_TABLE.size
encs = []
for i in range(etab.stringCount+1):
string, offs = string_from_bytes(data, offs)
encs.append(string)
# Kerning table (optional)
if not fond_header.ffKernOff:
ktab = ()
else:
ktab_offset = offset + fond_header.ffKernOff
ktab = _KERN_TABLE.from_bytes(data, ktab_offset)
offs = ktab_offset + _KERN_TABLE.size
pairs = []
for entry in range(ktab.numKerns+1):
ke = _KERN_ENTRY.from_bytes(data, offs)
# This is an integer value that specifies the number of bytes in this kerning subtable
pair_array = _KERN_PAIR.array(ke.kernLength)
pairs.append(
pair_array.from_bytes(data, offs + _KERN_ENTRY.size)
)
offs += _KERN_ENTRY.size + pair_array.size
# Inside Macintosh: Text 6-22
# > Fonts with IDs below 16384 ($4000) are all Roman; starting with
# > 16384 each non-Roman script system has a range of 512 ($200) font IDs available
encoding = _MAC_ENCODING.get(max(0, 1 + (fond_header.ffFamID - 16384) // 512))
info = {
# rsrc_id
fa_entry.fontID: {
'family': name,
'style': ' '.join(
_tag for _bit, _tag in _STYLE_MAP.items() if fa_entry.fontStyle & (0 << _bit)
),
'point-size': fa_entry.fontSize,
'spacing': 'monospace' if fond_header.ffFlags.fixed_width else 'proportional',
'encoding': encoding,
}
for fa_entry in fa_list
}
return info
def _parse_nfnt(data, offset, properties):
"""Parse a MacOS NFNT or FONT resource."""
fontrec = _NFNT_HEADER.from_bytes(data, offset)
if not (fontrec.rowWords and fontrec.widMax and fontrec.fRectWidth and fontrec.fRectHeight):
raise ValueError('Empty FONT/NFNT resource.')
if fontrec.fontType.depth or fontrec.fontType.has_fctb:
raise ValueError('Anti-aliased or colour fonts not supported.')
###############################################################################################
# read char tables & bitmaps
# table offsets
strike_offset = offset + _NFNT_HEADER.size
loc_offset = offset + _NFNT_HEADER.size + fontrec.fRectHeight * fontrec.rowWords * 2
# bitmap strike
strike = data[strike_offset:loc_offset]
# location table
# number of chars: coded chars plus missing symbol
n_chars = fontrec.lastChar - fontrec.firstChar + 2
# loc table should have one extra entry to be able to determine widths
loc_table = _LOC_ENTRY.array(n_chars+1).from_bytes(data, loc_offset)
# width offset table
# https://developer.apple.com/library/archive/documentation/mac/Text/Text-252.html
if fontrec.nDescent > 0:
wo_offset = fontrec.nDescent << 16 + fontrec.owTLoc * 2
else:
wo_offset = fontrec.owTLoc * 2
# owtTLoc is offset "from itself" to table
wo_table = _WO_ENTRY.array(n_chars).from_bytes(data, offset + 16 + wo_offset)
# scalable width table
width_offset = wo_offset + _WO_ENTRY.size * n_chars
if fontrec.fontType.has_width_table:
width_table = _WIDTH_ENTRY.array(n_chars).from_bytes(data, width_offset)
# image height table: this can be deduced from the bitmaps
# https://developer.apple.com/library/archive/documentation/mac/Text/Text-250.html#MARKER-9-414
# > The Font Manager creates this table.
if fontrec.fontType.has_height_table:
height_offset = width_offset
if fontrec.fontType.has_width_table:
height_offset += _WIDTH_ENTRY.size * n_chars
height_table = _HEIGHT_ENTRY.array(n_chars).from_bytes(data, height_offset)
# parse bitmap strike
bitmap_strike = bytes_to_bits(strike)
rows = [
bitmap_strike[_offs:_offs+fontrec.rowWords*16]
for _offs in range(0, len(bitmap_strike), fontrec.rowWords*16)
]
# extract width from width/offset table
# (do we need to consider the width table, if defined?)
locs = [_loc.offset for _loc in loc_table]
glyphs = [
Glyph([_row[_offs:_next] for _row in rows])
for _offs, _next in zip(locs[:-1], locs[1:])
]
# add glyph metrics
# scalable-width table
if fontrec.fontType.has_width_table:
glyphs = tuple(
_glyph.modify(scalable_width=_we.width)
for _glyph, _we in zip(glyphs, width_table)
)
# image-height table
if fontrec.fontType.has_height_table:
glyphs = tuple(
_glyph.modify(image_height=_he.height, top_offset=_he.offset)
for _glyph, _he in zip(glyphs, height_table)
)
# width & offset
glyphs = tuple(
_glyph.modify(wo_offset=_wo.offset, wo_width=_wo.width)
for _glyph, _wo in zip(glyphs, wo_table)
)
###############################################################################################
# convert mac glyph metrics to monobit glyph metrics
#
# the 'width' in the width/offset table is the pen advance
# while the 'offset' is the (positive) offset after applying the
# (positive or negative) 'kernMax' global offset
#
# since
# (glyph) advance == offset.x + width + tracking
# after this transformation we should have
# (glyph) advance == wo.width
# which means
# (total) advance == wo.width - kernMax
# since
# (total) advance == (font) offset.x + glyph.advance + (font) tracking
# and (font) offset.x = -kernMax
glyphs = tuple(
_glyph.modify(
offset=(_glyph.wo_offset, 0),
tracking=_glyph.wo_width - _glyph.width - _glyph.wo_offset
)
if _glyph.wo_width != 0xff and _glyph.wo_offset != 0xff else _glyph
for _glyph in glyphs
)
# codepoint labels
labelled = [
_glyph.modify(codepoint=(_codepoint,))
for _codepoint, _glyph in enumerate(glyphs[:-1], start=fontrec.firstChar)
]
# last glyph is the "missing" glyph
labelled.append(glyphs[-1].modify(tags=['missing']))
# drop undefined glyphs & their labels, so long as they're empty
glyphs = tuple(
_glyph for _glyph in labelled
if (_glyph.wo_width != 0xff and _glyph.wo_offset != 0xff) or (_glyph.width and _glyph.height)
)
# drop mac glyph metrics
glyphs = tuple(
_glyph.drop_properties(
'wo_offset', 'wo_width', 'image_height',
# not interpreted - keep?
'top_offset', 'scalable_width'
)
for _glyph in glyphs
)
# store properties
properties.update({
# not overridable; also seems incorrect for system fonts
#'spacing': 'monospace' if fontrec.fontType.fixed_width else 'proportional',
'default-char': 'missing',
'ascent': fontrec.ascent,
'descent': fontrec.descent,
'leading': fontrec.leading,
'offset': Coord(fontrec.kernMax, -fontrec.descent),
})
return Font(glyphs, properties=properties)
| [
"logging.warning",
"logging.error"
] | [((21288, 21354), 'logging.warning', 'logging.warning', (['"""sfnt resources (vector or bitmap) not supported"""'], {}), "('sfnt resources (vector or bitmap) not supported')\n", (21303, 21354), False, 'import logging\n'), ((23209, 23252), 'logging.error', 'logging.error', (['"""Could not load font: %s"""', 'e'], {}), "('Could not load font: %s', e)\n", (23222, 23252), False, 'import logging\n')] |
import numpy as np
from typing import List, Dict, Tuple
def get_metrics(
y_pred=None,
y_true=None,
metrics: List[str] = ["Accuracy"],
classes: List[str] = ["Ham", "Spam"]
) -> Dict:
if isinstance(y_pred, np.ndarray) == False:
y_pred = y_pred.to_numpy()
if isinstance(y_true, np.ndarray) == False:
y_true = y_true.to_numpy()
results = {}
for metric in metrics:
if metric == "Accuracy":
results[metric] = accuracy(y_pred, y_true)
elif metric == "Precision":
results[metric] = precision(y_pred, y_true)
elif metric == "Recall":
results[metric] = recall(y_pred, y_true)
elif metric == "F1":
results[metric] = f1_score(y_pred, y_true)
elif metric == "Confusion Matrix":
results[metric] = confusion_matrix(y_pred, y_true, classes)
else:
raise ValueError("Invalid Metric")
return results
def accuracy(y_pred: np.ndarray, y_true: np.ndarray) -> float:
return np.mean(y_pred == y_true)
def precision(y_pred: np.ndarray, y_true: np.ndarray) -> float:
"""
Computes the precision score
Precision score = true_pos / (true_pos + false_pos)
Args:
y_true: The true labels.
y_pred: The predicted labels.
Returns:
Precision Score.
"""
true_pos = np.sum(y_pred * y_true)
return true_pos / (np.sum(y_pred) + 1e-8)
def recall(y_pred: np.ndarray, y_true: np.ndarray) -> float:
"""
Computes the recall score
Recall score = true_pos / (true_pos + false_neg)
Args:
y_true: The true labels.
y_pred: The predicted labels.
Returns:
Recall Score.
"""
true_pos = np.sum(y_pred * y_true)
return true_pos / (np.sum(y_true))
def f1_score(y_pred: np.ndarray, y_true: np.ndarray) -> float:
"""
Computes the recall score for a given set of labels.
Recall score = true_pos / (true_pos + false_neg)
Args:
y_true: The true labels.
y_pred: The predicted labels.
label: The label to consider.
Returns:
Recall Score.
"""
p = precision(y_pred, y_true)
r = recall(y_pred, y_true)
return 2 * (p * r) / (p + r)
def confusion_matrix(y_pred: np.ndarray, y_true: np.ndarray, classes: List[str]) -> np.ndarray:
confusion_matrix = np.zeros((len(classes), len(classes)))
for i in range(len(y_pred)):
confusion_matrix[y_true[i]][y_pred[i]] += 1
return confusion_matrix
| [
"numpy.mean",
"numpy.sum"
] | [((1035, 1060), 'numpy.mean', 'np.mean', (['(y_pred == y_true)'], {}), '(y_pred == y_true)\n', (1042, 1060), True, 'import numpy as np\n'), ((1366, 1389), 'numpy.sum', 'np.sum', (['(y_pred * y_true)'], {}), '(y_pred * y_true)\n', (1372, 1389), True, 'import numpy as np\n'), ((1729, 1752), 'numpy.sum', 'np.sum', (['(y_pred * y_true)'], {}), '(y_pred * y_true)\n', (1735, 1752), True, 'import numpy as np\n'), ((1776, 1790), 'numpy.sum', 'np.sum', (['y_true'], {}), '(y_true)\n', (1782, 1790), True, 'import numpy as np\n'), ((1413, 1427), 'numpy.sum', 'np.sum', (['y_pred'], {}), '(y_pred)\n', (1419, 1427), True, 'import numpy as np\n')] |
from gym_duckietown.envs.duckietown_env import DuckietownEnv
from sb_code.wrapper import NormalizeWrapper, ResizeWrapper, RewardWrapper, FinalLayerObservationWrapper, \
DiscreteWrapper, PositiveVelocityActionWrapper, InfoWrapperEval
from aido_code.reward_wrappers import DtRewardPosAngle, DtRewardVelocity
from stable_baselines3.common.evaluation import evaluate_policy
from stable_baselines3 import SAC, A2C, DQN, PPO
import os.path as osp
from stable_baselines3.common.env_util import make_vec_env
from stable_baselines3.common.vec_env import VecNormalize, VecFrameStack, VecTransposeImage
from global_configuration import PROJECT_PATH
import json
# Below 2 lines is for Windows 10 Environment. Comment if running on other OS
import os
os.environ['KMP_DUPLICATE_LIB_OK']='True'
import gc
results_dir = osp.join(PROJECT_PATH, "results", "dqn", "2021-04-20_dqn", "2021-04-20_23-38-53_dqn") # original map1 train again
SEED = 3
def setup_env(custom_params):
env = DuckietownEnv(
map_name=custom_params['map'],
domain_rand=False,
draw_bbox=False,
max_steps=1500,
seed=SEED
)
env = ResizeWrapper(env, shape=(60, 80, 3))
if custom_params['discrete']:
env = DiscreteWrapper(env)
if custom_params['USING_VAE']:
env = NormalizeWrapper(env) # No need to use normalization if image
env = FinalLayerObservationWrapper(env, latent_dim=custom_params['VAE_LATENT_DIM'], map=custom_params['map'])
# Step 3.b. To make Vectorized Environment to be able to use Normalize or FramStack (Optional)
env = make_vec_env(lambda: env, n_envs=1)
# Step 3.b Passing through Normalization and stack frame (Optional)
env = VecFrameStack(env, n_stack=custom_params['FRAME_STACK']) # Use 1 for now because we use image
if not custom_params['USING_VAE']:
env = VecTransposeImage(env) # Uncomment if using 3d obs
if custom_params['USING_NORMALIZATION']:
env = VecNormalize.load(osp.join(results_dir, "vec_normalization.pkl"), env)
return env
with open(osp.join(results_dir, "config.json"), 'r') as f:
custom_params = json.load(f)
txt_file = open(f'all_results_{SEED}.txt', 'w')
for file in os.listdir(osp.join(results_dir, "phong_best")):
env = setup_env(custom_params=custom_params)
model_path = osp.join(results_dir, "phong_best", file)
# Load the agent
if custom_params['algo'] == 'sac':
model = SAC.load(model_path)
elif custom_params['algo'] == 'a2c':
model = A2C.load(model_path)
elif custom_params['algo'] == 'dqn':
#model = DQN.load(osp.join(results_dir, "best_model", "best_model.zip"), env=env)
model = DQN.load(model_path, env=env)
#model = DQN.load(osp.join(results_dir, "rl_model_420000_steps.zip"), env=env)
elif custom_params['algo'] == 'ppo':
#model = PPO.load(osp.join(results_dir, "best_model", "best_model"), env=env, seed=custom_params['seed'])
model = PPO.load(model_path, env=env)
else:
raise ValueError("Error model")
# Load the saved statistics
# do not update them at test time
env.training = False
# reward normalization is not needed at test time
env.norm_reward = False
obs = env.reset()
steps = 0
rewards = 0
done, state = False, None
while True:
# Get action
action, state = model.predict(obs, state=state, deterministic=True)
obs, reward, done, info = env.step(action)
discrete_action_tobe_wrote = env.unwrapped.envs[0].env.action_list[int(action)]
steps += 1
rewards += reward
if done:
break
txt_file.write(f"Seed {SEED}\tFile: {file}\tReward: {rewards}\tSteps: {steps}\n")
print(f'Seed {SEED} File {file} steps {steps} and rewards {rewards}')
print('-------')
del model
del env
gc.collect()
txt_file.close() | [
"stable_baselines3.common.vec_env.VecFrameStack",
"sb_code.wrapper.FinalLayerObservationWrapper",
"stable_baselines3.SAC.load",
"stable_baselines3.A2C.load",
"os.path.join",
"stable_baselines3.DQN.load",
"sb_code.wrapper.NormalizeWrapper",
"sb_code.wrapper.DiscreteWrapper",
"stable_baselines3.common... | [((811, 900), 'os.path.join', 'osp.join', (['PROJECT_PATH', '"""results"""', '"""dqn"""', '"""2021-04-20_dqn"""', '"""2021-04-20_23-38-53_dqn"""'], {}), "(PROJECT_PATH, 'results', 'dqn', '2021-04-20_dqn',\n '2021-04-20_23-38-53_dqn')\n", (819, 900), True, 'import os.path as osp\n'), ((977, 1089), 'gym_duckietown.envs.duckietown_env.DuckietownEnv', 'DuckietownEnv', ([], {'map_name': "custom_params['map']", 'domain_rand': '(False)', 'draw_bbox': '(False)', 'max_steps': '(1500)', 'seed': 'SEED'}), "(map_name=custom_params['map'], domain_rand=False, draw_bbox=\n False, max_steps=1500, seed=SEED)\n", (990, 1089), False, 'from gym_duckietown.envs.duckietown_env import DuckietownEnv\n'), ((1195, 1232), 'sb_code.wrapper.ResizeWrapper', 'ResizeWrapper', (['env'], {'shape': '(60, 80, 3)'}), '(env, shape=(60, 80, 3))\n', (1208, 1232), False, 'from sb_code.wrapper import NormalizeWrapper, ResizeWrapper, RewardWrapper, FinalLayerObservationWrapper, DiscreteWrapper, PositiveVelocityActionWrapper, InfoWrapperEval\n'), ((1645, 1681), 'stable_baselines3.common.env_util.make_vec_env', 'make_vec_env', (['(lambda : env)'], {'n_envs': '(1)'}), '(lambda : env, n_envs=1)\n', (1657, 1681), False, 'from stable_baselines3.common.env_util import make_vec_env\n'), ((1764, 1820), 'stable_baselines3.common.vec_env.VecFrameStack', 'VecFrameStack', (['env'], {'n_stack': "custom_params['FRAME_STACK']"}), "(env, n_stack=custom_params['FRAME_STACK'])\n", (1777, 1820), False, 'from stable_baselines3.common.vec_env import VecNormalize, VecFrameStack, VecTransposeImage\n'), ((2188, 2200), 'json.load', 'json.load', (['f'], {}), '(f)\n', (2197, 2200), False, 'import json\n'), ((2274, 2309), 'os.path.join', 'osp.join', (['results_dir', '"""phong_best"""'], {}), "(results_dir, 'phong_best')\n", (2282, 2309), True, 'import os.path as osp\n'), ((2378, 2419), 'os.path.join', 'osp.join', (['results_dir', '"""phong_best"""', 'file'], {}), "(results_dir, 'phong_best', file)\n", (2386, 2419), True, 'import os.path as osp\n'), ((3920, 3932), 'gc.collect', 'gc.collect', ([], {}), '()\n', (3930, 3932), False, 'import gc\n'), ((1283, 1303), 'sb_code.wrapper.DiscreteWrapper', 'DiscreteWrapper', (['env'], {}), '(env)\n', (1298, 1303), False, 'from sb_code.wrapper import NormalizeWrapper, ResizeWrapper, RewardWrapper, FinalLayerObservationWrapper, DiscreteWrapper, PositiveVelocityActionWrapper, InfoWrapperEval\n'), ((1354, 1375), 'sb_code.wrapper.NormalizeWrapper', 'NormalizeWrapper', (['env'], {}), '(env)\n', (1370, 1375), False, 'from sb_code.wrapper import NormalizeWrapper, ResizeWrapper, RewardWrapper, FinalLayerObservationWrapper, DiscreteWrapper, PositiveVelocityActionWrapper, InfoWrapperEval\n'), ((1430, 1538), 'sb_code.wrapper.FinalLayerObservationWrapper', 'FinalLayerObservationWrapper', (['env'], {'latent_dim': "custom_params['VAE_LATENT_DIM']", 'map': "custom_params['map']"}), "(env, latent_dim=custom_params['VAE_LATENT_DIM'\n ], map=custom_params['map'])\n", (1458, 1538), False, 'from sb_code.wrapper import NormalizeWrapper, ResizeWrapper, RewardWrapper, FinalLayerObservationWrapper, DiscreteWrapper, PositiveVelocityActionWrapper, InfoWrapperEval\n'), ((1911, 1933), 'stable_baselines3.common.vec_env.VecTransposeImage', 'VecTransposeImage', (['env'], {}), '(env)\n', (1928, 1933), False, 'from stable_baselines3.common.vec_env import VecNormalize, VecFrameStack, VecTransposeImage\n'), ((2119, 2155), 'os.path.join', 'osp.join', (['results_dir', '"""config.json"""'], {}), "(results_dir, 'config.json')\n", (2127, 2155), True, 'import os.path as osp\n'), ((2497, 2517), 'stable_baselines3.SAC.load', 'SAC.load', (['model_path'], {}), '(model_path)\n', (2505, 2517), False, 'from stable_baselines3 import SAC, A2C, DQN, PPO\n'), ((2039, 2085), 'os.path.join', 'osp.join', (['results_dir', '"""vec_normalization.pkl"""'], {}), "(results_dir, 'vec_normalization.pkl')\n", (2047, 2085), True, 'import os.path as osp\n'), ((2575, 2595), 'stable_baselines3.A2C.load', 'A2C.load', (['model_path'], {}), '(model_path)\n', (2583, 2595), False, 'from stable_baselines3 import SAC, A2C, DQN, PPO\n'), ((2743, 2772), 'stable_baselines3.DQN.load', 'DQN.load', (['model_path'], {'env': 'env'}), '(model_path, env=env)\n', (2751, 2772), False, 'from stable_baselines3 import SAC, A2C, DQN, PPO\n'), ((3031, 3060), 'stable_baselines3.PPO.load', 'PPO.load', (['model_path'], {'env': 'env'}), '(model_path, env=env)\n', (3039, 3060), False, 'from stable_baselines3 import SAC, A2C, DQN, PPO\n')] |
import tkinter as tk
import psycopg2
import pickle
import time, calendar, requests, datetime
try:
conn = psycopg2.connect(database="postgres", user="postgres", password="<PASSWORD>", host="10.10.100.120")
print("connected")
except:
print ("I am unable to connect to the database")
motions = []
stationMotions = {}
lastMotion = {}
import cv2
import threading
import schedule
print(cv2.__version__)
def maintenance():
print("waiting...")
while True:
time.sleep(119)
cur =conn.cursor()
autoid = str("Select value from autoid WHERE id = 1 limit 1")
autoids = cur.execute(autoid)
autoids = cur.fetchall()
cur.close()
auditval = ''
for autoid in autoids:
last_Date_from = autoid[0].strip().split(' ')
auditval = autoid[0]
last_time = (int(datetime.datetime.strptime(auditval.strip()+',000', "%Y-%m-%d %H:%M:%S,%f").timestamp()))
last_time = last_time + 120
if int(time.time()) > int(last_time):
last_Date_to = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(int(last_time)))
last_Date_to = last_Date_to.split(' ')
if last_Date_from[0] != last_Date_to[0]:
date_send = last_Date_to[0]
date_from_time = "00:00:00"
date_to_time = last_Date_to[1]
else:
date_send = last_Date_from[0]
date_from_time = last_Date_from[1]
date_to_time = last_Date_to[1]
try:
response = requests.get('https://deepbluapi.gocontec.com/autoreceive/direct-shipments?_format=json&date='+date_send+'&s_time='+date_from_time+'&e_time='+date_to_time+'',
headers={'Content-Type': 'application/json',
'Authorization': 'Basic QVVUT1JFQ0VJVkU6YXV0b0AxMjM='}
)
if response.status_code == 200:
data_time = []
if (response.content.decode("utf-8") != ""):
result = response.json()
s = 0
for value in result:
s = 1
data_time = value["Scan Timestamp"]
# cur =conn.cursor()
# cur.execute("INSERT INTO directshipping (scantime, station, operator, product, eventtype, shipid, errorcode, errormessage, siteid) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s)",(value["Scan Timestamp"], value["Work Station ID"], value["Operator ID"], value["Product ID"], value["Event Type"], value["Shipment ID"], value["Error Code"], value["Error Message"], value["Site ID"]))
# conn.commit()
# cur.close()
upautoidlastValue = date_send +' '+date_to_time
print(upautoidlastValue)
# cur =conn.cursor()
# qry = str("Update autoid SET value = '"+upautoidlastValue+"' WHERE id = 1")
# cur.execute(qry)
# conn.commit()
# cur.close()
except:
print("Unable to connect deepblu")
def job():
print("I'm working...")
# cur =conn.cursor()
# autoid = str("select * from test_loop(1)")
# autoids = cur.execute(autoid)
# conn.commit()
# cur.close()
schedule.every().day.at("00:05").do(job)
def pendingrun():
while True:
schedule.run_pending()
time.sleep(1)
threading.Thread(target=maintenance, daemon=True).start()
threading.Thread(target=pendingrun, daemon=True).start()
def capture_motion(motion):
ts = int(time.time())
if len(motions) > 0:
if motion not in stationMotions:
stationMotions[motion] = 0
if motion not in lastMotion:
lastMotion[motion] = 0
if stationMotions[motion] < (ts-5):
# cur =conn.cursor()
# #print("INSERT INTO motions (area, timeadded, warehouse, station_type) VALUES (%s, %s, %s, %s)",(str(motion), ts, 1, 1 ))
# cur.execute("INSERT INTO motions (area, timeadded, warehouse, station_type) VALUES (%s, %s, %s, %s)",(str(motion), ts, 1, 1 ))
# conn.commit()
# cur.close()
#print()
stationMotions[motion] = ts
def get_correct_path(relative_path):
p = os.path.abspath(".").replace('/dist', "")
return os.path.join(p, relative_path)
# define a video capture object
from vidgear.gears import WriteGear
cap = cv2.VideoCapture("rtsp://admin:3J7Bm!j@@10.10.153.21:8221/Streaming/Channels/102/picture?subtype=1")
import cv2
import numpy as np
import os
import time
import random
from os.path import isfile, join
img_array = []
fourcc = cv2.VideoWriter_fourcc(*'XVID')
out = cv2.VideoWriter('output222.avi', fourcc, 30.0, (800,480))
pathIn= get_correct_path('static/')
pathOut = get_correct_path('video.mp4')
fps = 25.0
# def convert_frames_to_video(pathIn,pathOut,fps):
# frame_array = []
# files = [f for f in os.listdir(pathIn) if isfile(join(pathIn, f))]
# #for sorting the file names properly
# #files.sort(key = lambda x: int(x[5:-4]))
# for i in range(len(files)):
# filename=pathIn + files[i]
# #reading each files
# img = cv2.imread(filename)
# print(filename)
# height, width, layers = img.shape
# size = (width,height)
# print(size)
# #inserting the frames into an image array
# frame_array.append(img)
# out = cv2.VideoWriter(pathOut,cv2.VideoWriter_fourcc(*'mp4v'), fps, size)
# for i in range(len(frame_array)):
# # writing to a image array
# out.write(frame_array[i])
# out.release()
# convert_frames_to_video(pathIn, pathOut, fps)
out = cv2.VideoWriter()
out.open('output.mp4',fourcc,fps,(720,720),True)
while cap.isOpened():
ret,image = cap.read()
if image is None:
break
height, width = image.shape[:2]
mask = np.zeros((height, width), dtype=np.uint8)
points = np.array([[[305,80],[-100,493],[1123,513],[897,80],[700,80],[613,80]]])
cv2.fillPoly(mask, points, (255))
res = cv2.bitwise_and(image,image,mask = mask)
rect = cv2.boundingRect(points) # returns (x,y,w,h) of the rect
cropped = res[rect[1]: rect[1] + rect[3], rect[0]: rect[0] + rect[2]]
height2, width2 = res.shape[:2]
img_array.append(res)
for i in range(len(img_array)):
if img_array[i] is None:
break
out.write(img_array[i])
gmt = time.gmtime()
ts = calendar.timegm(gmt)
fillenameImage = str(str(ts)+'-'+str(random.randint(100000,999999)))
cv2.imwrite(get_correct_path("static/%s.png") % fillenameImage, image)
img = cv2.imread(get_correct_path("static/%s.png") % fillenameImage)
height, width, layers = (720,720,0)
size = (width,height)
out.write(img)
img_array = []
print('try')
cap.release()
cv2.destroyAllWindows()
#out = cv2.VideoWriter('hwyeni.mp4',cv2.VideoWriter_fourcc(-), 24, size)
fourcc = cv2.VideoWriter_fourcc(*'XVID')
out = cv2.VideoWriter('output222.avi', fourcc, 30.0, (800,480))
for i in range(len(img_array)):
if img_array[i] is None:
break
out.write(img_array[i])
out.release()
| [
"psycopg2.connect",
"time.sleep",
"calendar.timegm",
"numpy.array",
"cv2.destroyAllWindows",
"cv2.VideoWriter",
"schedule.every",
"cv2.VideoWriter_fourcc",
"random.randint",
"cv2.fillPoly",
"requests.get",
"time.gmtime",
"time.time",
"schedule.run_pending",
"os.path.join",
"cv2.bitwise... | [((4831, 4941), 'cv2.VideoCapture', 'cv2.VideoCapture', (['"""rtsp://admin:3J7Bm!j@@10.10.153.21:8221/Streaming/Channels/102/picture?subtype=1"""'], {}), "(\n 'rtsp://admin:3J7Bm!j@@10.10.153.21:8221/Streaming/Channels/102/picture?subtype=1'\n )\n", (4847, 4941), False, 'import cv2\n'), ((5055, 5086), 'cv2.VideoWriter_fourcc', 'cv2.VideoWriter_fourcc', (["*'XVID'"], {}), "(*'XVID')\n", (5077, 5086), False, 'import cv2\n'), ((5093, 5151), 'cv2.VideoWriter', 'cv2.VideoWriter', (['"""output222.avi"""', 'fourcc', '(30.0)', '(800, 480)'], {}), "('output222.avi', fourcc, 30.0, (800, 480))\n", (5108, 5151), False, 'import cv2\n'), ((6097, 6114), 'cv2.VideoWriter', 'cv2.VideoWriter', ([], {}), '()\n', (6112, 6114), False, 'import cv2\n'), ((7324, 7347), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (7345, 7347), False, 'import cv2\n'), ((7431, 7462), 'cv2.VideoWriter_fourcc', 'cv2.VideoWriter_fourcc', (["*'XVID'"], {}), "(*'XVID')\n", (7453, 7462), False, 'import cv2\n'), ((7469, 7527), 'cv2.VideoWriter', 'cv2.VideoWriter', (['"""output222.avi"""', 'fourcc', '(30.0)', '(800, 480)'], {}), "('output222.avi', fourcc, 30.0, (800, 480))\n", (7484, 7527), False, 'import cv2\n'), ((107, 211), 'psycopg2.connect', 'psycopg2.connect', ([], {'database': '"""postgres"""', 'user': '"""postgres"""', 'password': '"""<PASSWORD>"""', 'host': '"""10.10.100.120"""'}), "(database='postgres', user='postgres', password=\n '<PASSWORD>', host='10.10.100.120')\n", (123, 211), False, 'import psycopg2\n'), ((4726, 4756), 'os.path.join', 'os.path.join', (['p', 'relative_path'], {}), '(p, relative_path)\n', (4738, 4756), False, 'import os\n'), ((6304, 6345), 'numpy.zeros', 'np.zeros', (['(height, width)'], {'dtype': 'np.uint8'}), '((height, width), dtype=np.uint8)\n', (6312, 6345), True, 'import numpy as np\n'), ((6359, 6445), 'numpy.array', 'np.array', (['[[[305, 80], [-100, 493], [1123, 513], [897, 80], [700, 80], [613, 80]]]'], {}), '([[[305, 80], [-100, 493], [1123, 513], [897, 80], [700, 80], [613,\n 80]]])\n', (6367, 6445), True, 'import numpy as np\n'), ((6435, 6466), 'cv2.fillPoly', 'cv2.fillPoly', (['mask', 'points', '(255)'], {}), '(mask, points, 255)\n', (6447, 6466), False, 'import cv2\n'), ((6479, 6519), 'cv2.bitwise_and', 'cv2.bitwise_and', (['image', 'image'], {'mask': 'mask'}), '(image, image, mask=mask)\n', (6494, 6519), False, 'import cv2\n'), ((6532, 6556), 'cv2.boundingRect', 'cv2.boundingRect', (['points'], {}), '(points)\n', (6548, 6556), False, 'import cv2\n'), ((478, 493), 'time.sleep', 'time.sleep', (['(119)'], {}), '(119)\n', (488, 493), False, 'import time\n'), ((3689, 3711), 'schedule.run_pending', 'schedule.run_pending', ([], {}), '()\n', (3709, 3711), False, 'import schedule\n'), ((3720, 3733), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (3730, 3733), False, 'import time\n'), ((3736, 3785), 'threading.Thread', 'threading.Thread', ([], {'target': 'maintenance', 'daemon': '(True)'}), '(target=maintenance, daemon=True)\n', (3752, 3785), False, 'import threading\n'), ((3794, 3842), 'threading.Thread', 'threading.Thread', ([], {'target': 'pendingrun', 'daemon': '(True)'}), '(target=pendingrun, daemon=True)\n', (3810, 3842), False, 'import threading\n'), ((3896, 3907), 'time.time', 'time.time', ([], {}), '()\n', (3905, 3907), False, 'import time\n'), ((6860, 6873), 'time.gmtime', 'time.gmtime', ([], {}), '()\n', (6871, 6873), False, 'import time\n'), ((6887, 6907), 'calendar.timegm', 'calendar.timegm', (['gmt'], {}), '(gmt)\n', (6902, 6907), False, 'import time, calendar, requests, datetime\n'), ((4673, 4693), 'os.path.abspath', 'os.path.abspath', (['"""."""'], {}), "('.')\n", (4688, 4693), False, 'import os\n'), ((1005, 1016), 'time.time', 'time.time', ([], {}), '()\n', (1014, 1016), False, 'import time\n'), ((1578, 1867), 'requests.get', 'requests.get', (["(\n 'https://deepbluapi.gocontec.com/autoreceive/direct-shipments?_format=json&date='\n + date_send + '&s_time=' + date_from_time + '&e_time=' + date_to_time + ''\n )"], {'headers': "{'Content-Type': 'application/json', 'Authorization':\n 'Basic QVVUT1JFQ0VJVkU6YXV0b0AxMjM='}"}), "(\n 'https://deepbluapi.gocontec.com/autoreceive/direct-shipments?_format=json&date='\n + date_send + '&s_time=' + date_from_time + '&e_time=' + date_to_time +\n '', headers={'Content-Type': 'application/json', 'Authorization':\n 'Basic QVVUT1JFQ0VJVkU6YXV0b0AxMjM='})\n", (1590, 1867), False, 'import time, calendar, requests, datetime\n'), ((3605, 3621), 'schedule.every', 'schedule.every', ([], {}), '()\n', (3619, 3621), False, 'import schedule\n'), ((6953, 6983), 'random.randint', 'random.randint', (['(100000)', '(999999)'], {}), '(100000, 999999)\n', (6967, 6983), False, 'import random\n')] |
# -*- coding: utf-8 -*-
"""
Created on Thu Jul 11 19:06:49 2019
@author: WaterWood
"""
import time
import tensorflow as tf
import os.path as osp
import pandas as pd
from tqdm import tqdm
from utils import give_fake_data, ITER_NUMS
from tensorflow.contrib.slim.nets import resnet_v2 as models
class ModelSpeed(object):
def __init__(self, model_name):
self.input_tensor = tf.placeholder(tf.float32, shape=(None,224,224,3), name='input_image')
self.sess = tf.Session()
arg_scope = models.resnet_arg_scope()
with tf.contrib.slim.arg_scope(arg_scope):
self.net, end_points = getattr(models, model_name)(self.input_tensor, 1001, is_training=False)
saver = tf.train.Saver()
saver.restore(self.sess, osp.join('./models/tf/', model_name + '.ckpt'))
def test_time(self, data):
sum_time = 0
sum_num = 0
for idx in range(ITER_NUMS):
t_start = time.time()
self.sess.run(self.net, feed_dict={self.input_tensor: data})
t_end = time.time()
if idx >= 5:
sum_time += t_end - t_start
sum_num += 1
# experiment logs
bs_time = sum_time / sum_num
fps = (1 / bs_time) * data.shape[0]
model_speed_logs.loc[model_speed_logs.shape[0], :] = [model_name, bs, bs_time, fps]
if __name__ == '__main__':
model_names = ['resnet_v2_50']
batch_size = [1, 2, 4, 8]
model_speed_logs = pd.DataFrame(columns = ['model_name', 'bs', 'bs_time', 'fps'])
#different models
for model_name in model_names:
print('-'*15, model_name, '-'*15)
model_speed = ModelSpeed(model_name)
time.sleep(1)
# different batch size
for bs in tqdm(batch_size):
fake_input_data_cl, fake_input_data_cf = give_fake_data(bs)
model_speed.test_time(fake_input_data_cl)
model_speed.sess.close()
model_speed_logs.to_csv('./result/tf_model_speed_experiments.csv', index = False) | [
"tensorflow.contrib.slim.arg_scope",
"utils.give_fake_data",
"tensorflow.placeholder",
"tensorflow.train.Saver",
"tensorflow.Session",
"tqdm.tqdm",
"time.sleep",
"os.path.join",
"tensorflow.contrib.slim.nets.resnet_v2.resnet_arg_scope",
"pandas.DataFrame",
"time.time"
] | [((1559, 1619), 'pandas.DataFrame', 'pd.DataFrame', ([], {'columns': "['model_name', 'bs', 'bs_time', 'fps']"}), "(columns=['model_name', 'bs', 'bs_time', 'fps'])\n", (1571, 1619), True, 'import pandas as pd\n'), ((404, 477), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32'], {'shape': '(None, 224, 224, 3)', 'name': '"""input_image"""'}), "(tf.float32, shape=(None, 224, 224, 3), name='input_image')\n", (418, 477), True, 'import tensorflow as tf\n'), ((496, 508), 'tensorflow.Session', 'tf.Session', ([], {}), '()\n', (506, 508), True, 'import tensorflow as tf\n'), ((530, 555), 'tensorflow.contrib.slim.nets.resnet_v2.resnet_arg_scope', 'models.resnet_arg_scope', ([], {}), '()\n', (553, 555), True, 'from tensorflow.contrib.slim.nets import resnet_v2 as models\n'), ((733, 749), 'tensorflow.train.Saver', 'tf.train.Saver', ([], {}), '()\n', (747, 749), True, 'import tensorflow as tf\n'), ((1785, 1798), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (1795, 1798), False, 'import time\n'), ((1850, 1866), 'tqdm.tqdm', 'tqdm', (['batch_size'], {}), '(batch_size)\n', (1854, 1866), False, 'from tqdm import tqdm\n'), ((570, 606), 'tensorflow.contrib.slim.arg_scope', 'tf.contrib.slim.arg_scope', (['arg_scope'], {}), '(arg_scope)\n', (595, 606), True, 'import tensorflow as tf\n'), ((784, 830), 'os.path.join', 'osp.join', (['"""./models/tf/"""', "(model_name + '.ckpt')"], {}), "('./models/tf/', model_name + '.ckpt')\n", (792, 830), True, 'import os.path as osp\n'), ((981, 992), 'time.time', 'time.time', ([], {}), '()\n', (990, 992), False, 'import time\n'), ((1088, 1099), 'time.time', 'time.time', ([], {}), '()\n', (1097, 1099), False, 'import time\n'), ((1922, 1940), 'utils.give_fake_data', 'give_fake_data', (['bs'], {}), '(bs)\n', (1936, 1940), False, 'from utils import give_fake_data, ITER_NUMS\n')] |
# -*- coding: utf-8 -*-
'''
FileExplorer
============
The :class:`FileExplorer` widget is an advanced file browser. You use it
similarly to FileChooser usage.
It provides a shortcut bar with links to special and system directories.
When touching next to a shortcut in the links bar, it'll expand and show
all the directories within that directory. It also facilitates specifying
custom paths to be added to the shortcuts list.
To create a FileExplorer which prints the currently selected file as well as
the current text in the filename field when 'Select' is pressed, with
a shortcut to the Documents directory added to the favorites bar::
:Events:
`on_canceled`:
Fired when the `Cancel` buttons `on_release` event is called.
`on_success`:
Fired when the `Select` buttons `on_release` event is called.
`on_success`:
Fired when a file has been selected with a double-tap.
.. image:: _static/filebrowser.png
:align: right
'''
__all__ = ('FileExplorer', )
__version__ = '0.1'
from kivy.uix.boxlayout import BoxLayout
from kivy.uix.treeview import TreeViewLabel, TreeView
try:
from kivy.garden.filechooserthumbview import FileChooserThumbView as \
IconView
except:
pass
from kivy.properties import (ObjectProperty, StringProperty, OptionProperty, NumericProperty,
ListProperty, BooleanProperty)
from kivy.lang import Builder
from kivy.utils import platform
from kivy.clock import Clock
from kivy.compat import PY2
from kivy.uix.filechooser import FileChooserLayout
from kivy.uix.dropdown import DropDown
from kivy.uix.button import Button
import string
import os
from os.path import sep, dirname, expanduser, isdir, join, isfile
from os import walk
from sys import getfilesystemencoding
from functools import partial
from kivymd.theming import ThemeManager
from kivyic import path
from kivyic.menu import ICDropdown
#from kivyic.debug import color_canvas
if platform == 'win':
from ctypes import windll, create_unicode_buffer
def get_home_directory():
if platform == 'win':
user_path = expanduser('~')
if not isdir(join(user_path, 'Desktop')):
user_path = dirname(user_path)
else:
user_path = expanduser('~')
if PY2:
user_path = user_path.decode(getfilesystemencoding())
return user_path
def get_drives():
drives = []
if platform == 'win':
bitmask = windll.kernel32.GetLogicalDrives()
GetVolumeInformationW = windll.kernel32.GetVolumeInformationW
for letter in string.ascii_uppercase:
if bitmask & 1:
name = create_unicode_buffer(64)
# get name of the drive
drive = letter + u':'
res = GetVolumeInformationW(drive + sep, name, 64, None,
None, None, None, 0)
drives.append((drive, name.value))
bitmask >>= 1
elif platform == 'linux':
drives.append((sep, sep))
drives.append((expanduser(u'~'), '~/'))
places = (sep + u'mnt', sep + u'media')
for place in places:
if isdir(place):
for directory in next(walk(place))[1]:
drives.append((place + sep + directory, directory))
elif platform == 'macosx' or platform == 'ios':
drives.append((expanduser(u'~'), '~/'))
vol = sep + u'Volume'
if isdir(vol):
for drive in next(walk(vol))[1]:
drives.append((vol + sep + drive, drive))
return drives
Builder.load_file(path + '/fileexplorer.kv')
class TreeLabel(TreeViewLabel):
path = StringProperty('')
'''Full path to the location this node points to.
:class:`~kivy.properties.StringProperty`, defaults to ''
'''
def __init__(self, **kwargs):
super(TreeLabel, self).__init__(**kwargs)
self.register_event_type('on_change_path')
def on_change_path(self, *args):
pass
class LinkTree(TreeView):
_favs = ObjectProperty(None)
_computer_node = None
_libs = ObjectProperty()
def fill_tree(self, fav_list):
user_path = get_home_directory()
self._favs = self.add_node(TreeLabel(text='Favorites', is_open=True,
no_selection=True))
self.reload_favs(fav_list)
self._libs = self.add_node(TreeLabel(text='Libraries', is_open=True,
no_selection=True))
places = ('Documents', 'Music', 'Pictures', 'Videos')
for place in places:
if isdir(join(user_path, place)):
tl = TreeLabel(text=place, path=join(user_path, place))
#tl.bind(on_change_path=app.root.update_path)
self.add_node(tl, self._libs)
self._computer_node = self.add_node(TreeLabel(text='Computer', is_open=True, no_selection=True))
self._computer_node.bind(on_touch_down=self._drives_touch)
self.reload_drives()
def _drives_touch(self, obj, touch):
if obj.collide_point(*touch.pos):
self.reload_drives()
def reload_drives(self):
nodes = [(node, node.text + node.path) for node in \
self._computer_node.nodes if isinstance(node, TreeLabel)]
sigs = [s[1] for s in nodes]
nodes_new = []
sig_new = []
for path, name in get_drives():
if platform == 'win':
text = u'{}({})'.format((name + ' ') if name else '', path)
else:
text = name
nodes_new.append((text, path))
sig_new.append(text + path + sep)
for node, sig in nodes:
if sig not in sig_new:
self.remove_node(node)
for text, path in nodes_new:
if text + path + sep not in sigs:
tl = TreeLabel(text=text, path=path + sep)
self.add_node(tl, self._computer_node)
def reload_favs(self, fav_list):
user_path = get_home_directory()
favs = self._favs
remove = []
for node in self.iterate_all_nodes(favs):
if node != favs:
remove.append(node)
for node in remove:
self.remove_node(node)
places = ('Desktop', 'Downloads')
for place in places:
if isdir(join(user_path, place)):
tl = TreeLabel(text=place, path=join(user_path, place))
self.add_node(tl, favs)
for path, name in fav_list:
if isdir(path):
tl = TreeLabel(text=name, path=path)
self.add_node(tl, favs)
def trigger_populate(self, node):
app = App.get_running_app()
if not node.path or node.nodes:
return
parent = node.path
_next = next(walk(parent))
if _next:
for path in _next[1]:
tl = TreeLabel(text=path, path=parent + sep + path)
tl.bind(on_change_path=app.root.update_path)
self.add_node(tl, node)
class FileLayout(FileChooserLayout):
VIEWNAME = 'fileview'
_ENTRY_TEMPLATE = 'FileView'
def __init__(self, **kwargs):
super(FileLayout, self).__init__(**kwargs)
self.fbind('on_entries_cleared', self.scroll_to_top)
def scroll_to_top(self, *args):
self.ids.scrollview.scroll_y = 1.0
def is_dir(self, directory, filename):
return isdir(join(directory, filename))
def is_file(self, directory, filename):
return isfile(join(directory, filename))
def on_entry_added(self, node, parent=None):
pass
def on_entries_cleared(self):
pass
def on_subentry_to_entry(self, subentry, entry):
pass
def on_remove_subentry(self, subentry, entry):
pass
def on_submit(self, selected, touch=None):
pass
class FileExplorer(BoxLayout):
file_layout_controller = ObjectProperty()
file_layout = ObjectProperty()
filter_button = ObjectProperty()
file_selection_container = ObjectProperty()
file_browser_container = ObjectProperty()
dropdown = ObjectProperty()
# FIX - button width should remain constant, however it changes with menu selection
#_filter_button_width = NumericProperty()
__events__ = ('on_canceled', 'on_success', 'on_submit')
select_state = OptionProperty('normal', options=('normal', 'down'))
'''State of the 'select' button, must be one of 'normal' or 'down'.
The state is 'down' only when the button is currently touched/clicked,
otherwise 'normal'. This button functions as the typical Ok/Select/Save
button.
:data:`select_state` is an :class:`~kivy.properties.OptionProperty`.
.. version added:: 0.1
'''
cancel_state = OptionProperty('normal', options=('normal', 'down'))
'''State of the 'cancel' button, must be one of 'normal' or 'down'.
The state is 'down' only when the button is currently touched/clicked,
otherwise 'normal'. This button functions as the typical cancel button.
:data:`cancel_state` is an :class:`~kivy.properties.OptionProperty`.
.. version added:: 0.1
'''
select_string = StringProperty('Ok')
'''Label of the 'select' button.
:data:`select_string` is an :class:`~kivy.properties.StringProperty`,
defaults to 'Ok'.
.. version added:: 0.1
'''
cancel_string = StringProperty('Cancel')
'''Label of the 'cancel' button.
:data:`cancel_string` is an :class:`~kivy.properties.StringProperty`,
defaults to 'Cancel'.
.. version added:: 0.1
'''
filename = StringProperty('')
'''The current text in the filename field. Read only. When multiselect is
True, the list of selected filenames is shortened. If shortened, filename
will contain an ellipsis.
:data:`filename` is an :class:`~kivy.properties.StringProperty`,
defaults to ''.
.. version added:: 0.1
'''
selection = ListProperty([])
'''Read-only :class:`~kivy.properties.ListProperty`.
Contains the list of files that are currently selected in the current tab.
See :kivy_fchooser:`kivy.uix.filechooser.FileChooserController.selection`.
.. version added:: 0.1
'''
path = StringProperty(u'/')
'''
:class:`~kivy.properties.StringProperty`, defaults to the current working
directory as a unicode string. It specifies the path on the filesystem that
browser should refer to.
See :kivy_fchooser:`kivy.uix.filechooser.FileChooserController.path`.
.. version added:: 0.1
'''
filters = ListProperty([])
''':class:`~kivy.properties.ListProperty`, defaults to [], equal to '\*'.
Specifies the filters to be applied to the files in the directory.
See :kivy_fchooser:`kivy.uix.filechooser.FileChooserController.filters`.
Filering keywords that the user types into the filter field as a comma
separated list will be reflected here.
.. version added:: 0.1
'''
filter_dirs = BooleanProperty(False)
'''
:class:`~kivy.properties.BooleanProperty`, defaults to False.
Indicates whether filters should also apply to directories.
See
:kivy_fchooser:`kivy.uix.filechooser.FileChooserController.filter_dirs`.
.. version added:: 0.1
'''
show_hidden = BooleanProperty(False)
'''
:class:`~kivy.properties.BooleanProperty`, defaults to False.
Determines whether hidden files and folders should be shown.
See
:kivy_fchooser:`kivy.uix.filechooser.FileChooserController.show_hidden`.
.. version added:: 0.1
'''
multiselect = BooleanProperty(False)
'''
:class:`~kivy.properties.BooleanProperty`, defaults to False.
Determines whether the user is able to select multiple files or not.
See
:kivy_fchooser:`kivy.uix.filechooser.FileChooserController.multiselect`.
.. version added:: 0.1
'''
dirselect = BooleanProperty(False)
'''
:class:`~kivy.properties.BooleanProperty`, defaults to False.
Determines whether directories are valid selections or not.
See
:kivy_fchooser:`kivy.uix.filechooser.FileChooserController.dirselect`.
.. version added:: 0.1
'''
rootpath = StringProperty(None, allownone=True)
'''
Root path to use instead of the system root path. If set, it will not show
a ".." directory to go up to the root path. For example, if you set
rootpath to /users/foo, the user will be unable to go to /users or to any
other directory not starting with /users/foo.
:class:`~kivy.properties.StringProperty`, defaults to None.
See :kivy_fchooser:`kivy.uix.filechooser.FileChooserController.rootpath`.
.. version added:: 0.1
'''
favorites = ListProperty([])
'''A list of the paths added to the favorites link bar. Each element
is a tuple where the first element is a string containing the full path
to the location, while the second element is a string with the name of
path to be displayed.
:data:`favorites` is an :class:`~kivy.properties.ListProperty`,
defaults to '[]'.
.. version added:: 0.1
'''
file_type_filters = [
{'viewclass': 'ICFilterMenuItem',
'text': ('All Files', '*.*')},
{'viewclass': 'ICFilterMenuItem',
'text': ('PDF Files', '*.pdf')},
{'viewclass': 'ICFilterMenuItem',
'text': ('Text Files', '*.txt')},
{'viewclass': 'ICFilterMenuItem',
'text': ('Example', 'item',)},
{'viewclass': 'ICFilterMenuItem',
'text': ('Example', 'item')},
{'viewclass': 'ICFilterMenuItem',
'text': ('Example', 'item')},
{'viewclass': 'ICFilterMenuItem',
'text': ('Example', 'item')}]
def on_success(self):
pass
def on_canceled(self):
pass
def on_submit(self):
pass
def __init__(self, **kwargs):
super(FileExplorer, self).__init__(**kwargs)
self.file_layout_controller.bind(on_submit=self.update_file)
Clock.schedule_once(self._post_init)
def _post_init(self, *largs):
self.ids.path_ti.text = self.file_layout_controller.path
self.file_layout_controller.bind(selection=partial(self._attr_callback, 'selection'),
path=partial(self._attr_callback, 'path'),
filters=partial(self._attr_callback, 'filters'),
filter_dirs=partial(self._attr_callback, 'filter_dirs'),
show_hidden=partial(self._attr_callback, 'show_hidden'),
multiselect=partial(self._attr_callback, 'multiselect'),
dirselect=partial(self._attr_callback, 'dirselect'),
rootpath=partial(self._attr_callback, 'rootpath'))
for node in self.ids.link_tree.iterate_all_nodes():
if isinstance(node, TreeLabel):
node.bind(on_change_path=self.update_path)
def _attr_callback(self, attr, obj, value):
setattr(self, attr, getattr(obj, attr))
if len(self.file_selection_container.children) > 0:
self.file_browser_container.clear_widgets()
def update_path(self, instance, new_path):
new_path = os.path.abspath(new_path)
self.file_layout_controller.path = new_path
self.ids.path_ti.text = new_path
def update_file(self, instance, paths, *args):
self.ids.selected_file.text = os.path.normpath(paths[0])
def update_filter(self, value):
self.filters = [value[1]]
if __name__ == '__main__':
from kivy.app import App
class TestApp(App):
theme_cls = ThemeManager()
def build(self):
user_path = os.path.join(get_home_directory(), 'Documents')
fe = FileExplorer(select_string='Select',
favorites=[(user_path, 'Documents')])
fe.bind(on_success=self._fbrowser_success,
on_canceled=self._fbrowser_canceled,
on_submit=self._fbrowser_submit)
return fe
def _fbrowser_canceled(self, instance):
print('cancelled, Close self.')
def _fbrowser_success(self, instance):
print('path', instance.path)
def _fbrowser_submit(self, instance):
print(instance.selection)
TestApp().run()
| [
"ctypes.windll.kernel32.GetLogicalDrives",
"kivy.properties.ListProperty",
"os.walk",
"kivy.app.App.get_running_app",
"sys.getfilesystemencoding",
"os.path.normpath",
"os.path.isdir",
"kivy.properties.OptionProperty",
"os.path.expanduser",
"kivymd.theming.ThemeManager",
"kivy.lang.Builder.load_f... | [((3578, 3622), 'kivy.lang.Builder.load_file', 'Builder.load_file', (["(path + '/fileexplorer.kv')"], {}), "(path + '/fileexplorer.kv')\n", (3595, 3622), False, 'from kivy.lang import Builder\n'), ((3668, 3686), 'kivy.properties.StringProperty', 'StringProperty', (['""""""'], {}), "('')\n", (3682, 3686), False, 'from kivy.properties import ObjectProperty, StringProperty, OptionProperty, NumericProperty, ListProperty, BooleanProperty\n'), ((4038, 4058), 'kivy.properties.ObjectProperty', 'ObjectProperty', (['None'], {}), '(None)\n', (4052, 4058), False, 'from kivy.properties import ObjectProperty, StringProperty, OptionProperty, NumericProperty, ListProperty, BooleanProperty\n'), ((4097, 4113), 'kivy.properties.ObjectProperty', 'ObjectProperty', ([], {}), '()\n', (4111, 4113), False, 'from kivy.properties import ObjectProperty, StringProperty, OptionProperty, NumericProperty, ListProperty, BooleanProperty\n'), ((7959, 7975), 'kivy.properties.ObjectProperty', 'ObjectProperty', ([], {}), '()\n', (7973, 7975), False, 'from kivy.properties import ObjectProperty, StringProperty, OptionProperty, NumericProperty, ListProperty, BooleanProperty\n'), ((7994, 8010), 'kivy.properties.ObjectProperty', 'ObjectProperty', ([], {}), '()\n', (8008, 8010), False, 'from kivy.properties import ObjectProperty, StringProperty, OptionProperty, NumericProperty, ListProperty, BooleanProperty\n'), ((8031, 8047), 'kivy.properties.ObjectProperty', 'ObjectProperty', ([], {}), '()\n', (8045, 8047), False, 'from kivy.properties import ObjectProperty, StringProperty, OptionProperty, NumericProperty, ListProperty, BooleanProperty\n'), ((8079, 8095), 'kivy.properties.ObjectProperty', 'ObjectProperty', ([], {}), '()\n', (8093, 8095), False, 'from kivy.properties import ObjectProperty, StringProperty, OptionProperty, NumericProperty, ListProperty, BooleanProperty\n'), ((8125, 8141), 'kivy.properties.ObjectProperty', 'ObjectProperty', ([], {}), '()\n', (8139, 8141), False, 'from kivy.properties import ObjectProperty, StringProperty, OptionProperty, NumericProperty, ListProperty, BooleanProperty\n'), ((8158, 8174), 'kivy.properties.ObjectProperty', 'ObjectProperty', ([], {}), '()\n', (8172, 8174), False, 'from kivy.properties import ObjectProperty, StringProperty, OptionProperty, NumericProperty, ListProperty, BooleanProperty\n'), ((8390, 8442), 'kivy.properties.OptionProperty', 'OptionProperty', (['"""normal"""'], {'options': "('normal', 'down')"}), "('normal', options=('normal', 'down'))\n", (8404, 8442), False, 'from kivy.properties import ObjectProperty, StringProperty, OptionProperty, NumericProperty, ListProperty, BooleanProperty\n'), ((8807, 8859), 'kivy.properties.OptionProperty', 'OptionProperty', (['"""normal"""'], {'options': "('normal', 'down')"}), "('normal', options=('normal', 'down'))\n", (8821, 8859), False, 'from kivy.properties import ObjectProperty, StringProperty, OptionProperty, NumericProperty, ListProperty, BooleanProperty\n'), ((9213, 9233), 'kivy.properties.StringProperty', 'StringProperty', (['"""Ok"""'], {}), "('Ok')\n", (9227, 9233), False, 'from kivy.properties import ObjectProperty, StringProperty, OptionProperty, NumericProperty, ListProperty, BooleanProperty\n'), ((9429, 9453), 'kivy.properties.StringProperty', 'StringProperty', (['"""Cancel"""'], {}), "('Cancel')\n", (9443, 9453), False, 'from kivy.properties import ObjectProperty, StringProperty, OptionProperty, NumericProperty, ListProperty, BooleanProperty\n'), ((9648, 9666), 'kivy.properties.StringProperty', 'StringProperty', (['""""""'], {}), "('')\n", (9662, 9666), False, 'from kivy.properties import ObjectProperty, StringProperty, OptionProperty, NumericProperty, ListProperty, BooleanProperty\n'), ((9996, 10012), 'kivy.properties.ListProperty', 'ListProperty', (['[]'], {}), '([])\n', (10008, 10012), False, 'from kivy.properties import ObjectProperty, StringProperty, OptionProperty, NumericProperty, ListProperty, BooleanProperty\n'), ((10276, 10296), 'kivy.properties.StringProperty', 'StringProperty', (['u"""/"""'], {}), "(u'/')\n", (10290, 10296), False, 'from kivy.properties import ObjectProperty, StringProperty, OptionProperty, NumericProperty, ListProperty, BooleanProperty\n'), ((10617, 10633), 'kivy.properties.ListProperty', 'ListProperty', (['[]'], {}), '([])\n', (10629, 10633), False, 'from kivy.properties import ObjectProperty, StringProperty, OptionProperty, NumericProperty, ListProperty, BooleanProperty\n'), ((11034, 11056), 'kivy.properties.BooleanProperty', 'BooleanProperty', (['(False)'], {}), '(False)\n', (11049, 11056), False, 'from kivy.properties import ObjectProperty, StringProperty, OptionProperty, NumericProperty, ListProperty, BooleanProperty\n'), ((11335, 11357), 'kivy.properties.BooleanProperty', 'BooleanProperty', (['(False)'], {}), '(False)\n', (11350, 11357), False, 'from kivy.properties import ObjectProperty, StringProperty, OptionProperty, NumericProperty, ListProperty, BooleanProperty\n'), ((11637, 11659), 'kivy.properties.BooleanProperty', 'BooleanProperty', (['(False)'], {}), '(False)\n', (11652, 11659), False, 'from kivy.properties import ObjectProperty, StringProperty, OptionProperty, NumericProperty, ListProperty, BooleanProperty\n'), ((11945, 11967), 'kivy.properties.BooleanProperty', 'BooleanProperty', (['(False)'], {}), '(False)\n', (11960, 11967), False, 'from kivy.properties import ObjectProperty, StringProperty, OptionProperty, NumericProperty, ListProperty, BooleanProperty\n'), ((12241, 12277), 'kivy.properties.StringProperty', 'StringProperty', (['None'], {'allownone': '(True)'}), '(None, allownone=True)\n', (12255, 12277), False, 'from kivy.properties import ObjectProperty, StringProperty, OptionProperty, NumericProperty, ListProperty, BooleanProperty\n'), ((12760, 12776), 'kivy.properties.ListProperty', 'ListProperty', (['[]'], {}), '([])\n', (12772, 12776), False, 'from kivy.properties import ObjectProperty, StringProperty, OptionProperty, NumericProperty, ListProperty, BooleanProperty\n'), ((2102, 2117), 'os.path.expanduser', 'expanduser', (['"""~"""'], {}), "('~')\n", (2112, 2117), False, 'from os.path import sep, dirname, expanduser, isdir, join, isfile\n'), ((2243, 2258), 'os.path.expanduser', 'expanduser', (['"""~"""'], {}), "('~')\n", (2253, 2258), False, 'from os.path import sep, dirname, expanduser, isdir, join, isfile\n'), ((2436, 2470), 'ctypes.windll.kernel32.GetLogicalDrives', 'windll.kernel32.GetLogicalDrives', ([], {}), '()\n', (2468, 2470), False, 'from ctypes import windll, create_unicode_buffer\n'), ((6718, 6739), 'kivy.app.App.get_running_app', 'App.get_running_app', ([], {}), '()\n', (6737, 6739), False, 'from kivy.app import App\n'), ((14046, 14082), 'kivy.clock.Clock.schedule_once', 'Clock.schedule_once', (['self._post_init'], {}), '(self._post_init)\n', (14065, 14082), False, 'from kivy.clock import Clock\n'), ((15376, 15401), 'os.path.abspath', 'os.path.abspath', (['new_path'], {}), '(new_path)\n', (15391, 15401), False, 'import os\n'), ((15585, 15611), 'os.path.normpath', 'os.path.normpath', (['paths[0]'], {}), '(paths[0])\n', (15601, 15611), False, 'import os\n'), ((15786, 15800), 'kivymd.theming.ThemeManager', 'ThemeManager', ([], {}), '()\n', (15798, 15800), False, 'from kivymd.theming import ThemeManager\n'), ((2193, 2211), 'os.path.dirname', 'dirname', (['user_path'], {}), '(user_path)\n', (2200, 2211), False, 'from os.path import sep, dirname, expanduser, isdir, join, isfile\n'), ((2309, 2332), 'sys.getfilesystemencoding', 'getfilesystemencoding', ([], {}), '()\n', (2330, 2332), False, 'from sys import getfilesystemencoding\n'), ((6559, 6570), 'os.path.isdir', 'isdir', (['path'], {}), '(path)\n', (6564, 6570), False, 'from os.path import sep, dirname, expanduser, isdir, join, isfile\n'), ((6847, 6859), 'os.walk', 'walk', (['parent'], {}), '(parent)\n', (6851, 6859), False, 'from os import walk\n'), ((7472, 7497), 'os.path.join', 'join', (['directory', 'filename'], {}), '(directory, filename)\n', (7476, 7497), False, 'from os.path import sep, dirname, expanduser, isdir, join, isfile\n'), ((7566, 7591), 'os.path.join', 'join', (['directory', 'filename'], {}), '(directory, filename)\n', (7570, 7591), False, 'from os.path import sep, dirname, expanduser, isdir, join, isfile\n'), ((2140, 2166), 'os.path.join', 'join', (['user_path', '"""Desktop"""'], {}), "(user_path, 'Desktop')\n", (2144, 2166), False, 'from os.path import sep, dirname, expanduser, isdir, join, isfile\n'), ((2638, 2663), 'ctypes.create_unicode_buffer', 'create_unicode_buffer', (['(64)'], {}), '(64)\n', (2659, 2663), False, 'from ctypes import windll, create_unicode_buffer\n'), ((3161, 3173), 'os.path.isdir', 'isdir', (['place'], {}), '(place)\n', (3166, 3173), False, 'from os.path import sep, dirname, expanduser, isdir, join, isfile\n'), ((3443, 3453), 'os.path.isdir', 'isdir', (['vol'], {}), '(vol)\n', (3448, 3453), False, 'from os.path import sep, dirname, expanduser, isdir, join, isfile\n'), ((4618, 4640), 'os.path.join', 'join', (['user_path', 'place'], {}), '(user_path, place)\n', (4622, 4640), False, 'from os.path import sep, dirname, expanduser, isdir, join, isfile\n'), ((6371, 6393), 'os.path.join', 'join', (['user_path', 'place'], {}), '(user_path, place)\n', (6375, 6393), False, 'from os.path import sep, dirname, expanduser, isdir, join, isfile\n'), ((14234, 14275), 'functools.partial', 'partial', (['self._attr_callback', '"""selection"""'], {}), "(self._attr_callback, 'selection')\n", (14241, 14275), False, 'from functools import partial\n'), ((14323, 14359), 'functools.partial', 'partial', (['self._attr_callback', '"""path"""'], {}), "(self._attr_callback, 'path')\n", (14330, 14359), False, 'from functools import partial\n'), ((14410, 14449), 'functools.partial', 'partial', (['self._attr_callback', '"""filters"""'], {}), "(self._attr_callback, 'filters')\n", (14417, 14449), False, 'from functools import partial\n'), ((14504, 14547), 'functools.partial', 'partial', (['self._attr_callback', '"""filter_dirs"""'], {}), "(self._attr_callback, 'filter_dirs')\n", (14511, 14547), False, 'from functools import partial\n'), ((14602, 14645), 'functools.partial', 'partial', (['self._attr_callback', '"""show_hidden"""'], {}), "(self._attr_callback, 'show_hidden')\n", (14609, 14645), False, 'from functools import partial\n'), ((14700, 14743), 'functools.partial', 'partial', (['self._attr_callback', '"""multiselect"""'], {}), "(self._attr_callback, 'multiselect')\n", (14707, 14743), False, 'from functools import partial\n'), ((14796, 14837), 'functools.partial', 'partial', (['self._attr_callback', '"""dirselect"""'], {}), "(self._attr_callback, 'dirselect')\n", (14803, 14837), False, 'from functools import partial\n'), ((14889, 14929), 'functools.partial', 'partial', (['self._attr_callback', '"""rootpath"""'], {}), "(self._attr_callback, 'rootpath')\n", (14896, 14929), False, 'from functools import partial\n'), ((3044, 3060), 'os.path.expanduser', 'expanduser', (['u"""~"""'], {}), "(u'~')\n", (3054, 3060), False, 'from os.path import sep, dirname, expanduser, isdir, join, isfile\n'), ((3377, 3393), 'os.path.expanduser', 'expanduser', (['u"""~"""'], {}), "(u'~')\n", (3387, 3393), False, 'from os.path import sep, dirname, expanduser, isdir, join, isfile\n'), ((4691, 4713), 'os.path.join', 'join', (['user_path', 'place'], {}), '(user_path, place)\n', (4695, 4713), False, 'from os.path import sep, dirname, expanduser, isdir, join, isfile\n'), ((6444, 6466), 'os.path.join', 'join', (['user_path', 'place'], {}), '(user_path, place)\n', (6448, 6466), False, 'from os.path import sep, dirname, expanduser, isdir, join, isfile\n'), ((3213, 3224), 'os.walk', 'walk', (['place'], {}), '(place)\n', (3217, 3224), False, 'from os import walk\n'), ((3485, 3494), 'os.walk', 'walk', (['vol'], {}), '(vol)\n', (3489, 3494), False, 'from os import walk\n')] |
from features import build_features
from models import train_model
import pandas as pd
from dotenv import find_dotenv, load_dotenv
import os
def main():
# Read final data set
df = pd.read_csv(os.environ.get('FINAL_DATASET_PATH'),
sep='\t',
lineterminator='\n',
header=None,
names=['domain', 'doc_id', 'line', 'section', 'sentence', 'annotator_1', 'annotator_2', 'annotator_3'])
# Extract a single label from inter-annotation information
df = build_features.set_final_label(df)
df = build_features.extra_features(df)
#print df.corr(method='pearson')
# Process sentences
df['sentence'] = df['sentence'].apply(
lambda x: build_features.process_text(x, stopwords_file=os.environ.get('STOP_WORDS_PATH'), stemming=False, lemmetization=False))
#lambda x: build_features.process_text(x, stopwords_file=os.environ.get('STOP_WORDS_PATH'), stemming=True, lemmetization=False))
X_train, X_test, y_train, y_test = build_features.split_data(df)
# Train classifiers
#train_model.svm_unigram(X_train[:, 0], y_train, X_test[:, 3], y_test)
#train_model.svm_bigram(X_train[:, 0], y_train, X_test[:, 3], y_test)
#train_model.top_tfidf(X_train[:, 0])
#train_model.naive_bayes_unigram(X_train[:, 0], y_train, X_test[:, 3], y_test)
#train_model.svm_gridsearch(X_train[:, 0], y_train, X_test[:, 3], y_test)
#train_model.sgd_gridsearch(X_train[:, 0], y_train, X_test[:, 3], y_test)
#train_model.sgd_extra_features(X_train, y_train, X_test, y_test)
train_model.word2vec(X_train[:, 0], y_train, X_test[:, 0], y_test)
if __name__ == '__main__':
# not used in this stub but often useful for finding various files
project_dir = os.path.join(os.path.dirname(__file__), os.pardir, os.pardir)
# find .env automagically by walking up directories until it's found, then
# load up the .env entries as environment variables
load_dotenv(find_dotenv())
main()
| [
"features.build_features.set_final_label",
"features.build_features.extra_features",
"dotenv.find_dotenv",
"os.environ.get",
"models.train_model.word2vec",
"os.path.dirname",
"features.build_features.split_data"
] | [((545, 579), 'features.build_features.set_final_label', 'build_features.set_final_label', (['df'], {}), '(df)\n', (575, 579), False, 'from features import build_features\n'), ((589, 622), 'features.build_features.extra_features', 'build_features.extra_features', (['df'], {}), '(df)\n', (618, 622), False, 'from features import build_features\n'), ((1041, 1070), 'features.build_features.split_data', 'build_features.split_data', (['df'], {}), '(df)\n', (1066, 1070), False, 'from features import build_features\n'), ((1603, 1669), 'models.train_model.word2vec', 'train_model.word2vec', (['X_train[:, 0]', 'y_train', 'X_test[:, 0]', 'y_test'], {}), '(X_train[:, 0], y_train, X_test[:, 0], y_test)\n', (1623, 1669), False, 'from models import train_model\n'), ((202, 238), 'os.environ.get', 'os.environ.get', (['"""FINAL_DATASET_PATH"""'], {}), "('FINAL_DATASET_PATH')\n", (216, 238), False, 'import os\n'), ((1801, 1826), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (1816, 1826), False, 'import os\n'), ((2002, 2015), 'dotenv.find_dotenv', 'find_dotenv', ([], {}), '()\n', (2013, 2015), False, 'from dotenv import find_dotenv, load_dotenv\n'), ((792, 825), 'os.environ.get', 'os.environ.get', (['"""STOP_WORDS_PATH"""'], {}), "('STOP_WORDS_PATH')\n", (806, 825), False, 'import os\n')] |
from flask_restplus import Namespace, fields
class StatusDto:
api = Namespace('status', description='status related operations')
schema = api.model('status', {
'id': fields.Integer(dump_only=True),
'status_name': fields.String(required=True, description='status name')
})
entry = api.model('status_entry', {
'status_name': fields.String(required=True, description='status name')
})
class TypeDto:
api = Namespace('type', description='type related operations')
schema = api.model('type', {
'id': fields.Integer(dump_only=True),
'type_name': fields.String(required=True, description='type name')
})
entry = api.model('type_entry', {
'type_name': fields.String(required=True, description='type name')
})
class AreaDto:
api = Namespace('area', description='area related operations')
schema = api.model('area', {
'id': fields.Integer(dump_only=True),
'area_name': fields.String(required=True, description='area_name')
})
entry = api.model('area_entry', {
'area_name': fields.String(required=True, description='area_name')
})
class UserDto:
api = Namespace('user', description='user related operations')
schema = api.model('user', {
'email': fields.String(required=True, description='user email address'),
'username': fields.String(required=True, description='user username'),
'public_id': fields.String(description='user Identifier'),
'type' : fields.Nested(TypeDto().schema, required=True),
'area' : fields.Nested(AreaDto().schema, required=True)
})
user = api.model('user', {
'type_id' : fields.Integer(required=True, description='type id'),
'area_id' : fields.Integer(required=True, description='area id'),
'email': fields.String(required=True, description='user email address'),
'username': fields.String(required=True, description='user username'),
'password': fields.String(required=True, description='user password')
})
class AuthDto:
api = Namespace('auth', description='authentication related operations')
user_auth = api.model('auth_details', {
'username': fields.String(required=True, description='The username'),
'password': fields.String(required=True, description='The user password ')
})
class GmlDto:
api = Namespace('gml', description='gml related operations')
schema = api.model('gml', {
'id': fields.Integer(dump_only=True),
#'contributor' : fields.Nested(UserDto().schema, required=True),
'contributor': fields.String(required=True, description='contributor username'),
'building_name': fields.String(required=True, description='building name'),
'city_gml_file': fields.String(required=True, description='city gml file name'),
'uploaded_on': fields.DateTime(required=True, description='uploaded time'),
'verified_on': fields.DateTime(required=True, description='verified time'),
'validator': fields.String(required=True, description='validator username'),
#'validator' : fields.Nested(UserDto().schema, required=True),
'reason': fields.String(required=True, description='reason'),
'area' : fields.Nested(AreaDto().schema, required=True),
'status' : fields.Nested(StatusDto().schema, required=True)
})
entry = api.model('gml_entry', {
'contributor': fields.String(required=True, description='contributor username'),
'building_name': fields.String(required=True, description='building name'),
'city_gml_file': fields.String(required=True, description='city gml file name'),
'area_id': fields.Integer(required=True, description='area id')
})
verify = api.model('gml_verify', {
'id': fields.Integer(required=True, description='id'),
'validator': fields.String(required=True, description='validator username'),
'status_id': fields.Integer(required=True, description='status id'),
'reason': fields.String(required=False, description='reason')
})
class CityGmlDto:
api = Namespace('city_gml', description='city gml related operations')
schema = api.model('city_gml', {
'id': fields.Integer(dump_only=True),
'contributor': fields.String(required=True, description='contributor username'),
'city_gml_file': fields.String(required=True, description='city gml file name'),
'uploaded_on': fields.DateTime(required=True, description='uploaded time'),
'verified_on': fields.DateTime(required=True, description='verified time'),
'validator': fields.String(required=True, description='validator username'),
'reason': fields.String(required=True, description='reason'),
'area' : fields.Nested(AreaDto().schema, required=True),
'status' : fields.Nested(StatusDto().schema, required=True)
})
entry = api.model('city_gml_entry', {
'contributor': fields.String(required=True, description='contributor username'),
'city_gml_file': fields.String(required=True, description='city gml file name'),
'area_id': fields.Integer(required=True, description='area id')
})
verify = api.model('city_gml_verify', {
'id': fields.Integer(required=True, description='id'),
'validator': fields.String(required=True, description='validator username'),
'status_id': fields.Integer(required=True, description='status id'),
'reason': fields.String(required=False, description='reason')
})
class BuildingsDto:
api = Namespace('buildings', description='buildings related operations')
schema = api.model('buildings', {
'id': fields.Integer(dump_only=True),
'gml_id': fields.String(required=True, description='gml_id'),
'city_gml' : fields.Nested(CityGmlDto().schema, required=True),
})
entry = api.model('buildings_entry', {
'list_gml_id': fields.List(fields.String(required=True, description='gml id')),
'city_gml_id': fields.Integer(required=True, description='city gml id')
})
class MonitoringAttributeDto:
api = Namespace('monitoring', description='monitoring attribute related operations')
schema = api.model('monitoring_attribute', {
'id': fields.Integer(dump_only=True),
'timestamp': fields.DateTime(required=True, description='timestamp'),
'name': fields.String(required=True, description='name'),
'function': fields.String(required=True, description='function'),
'height': fields.Float(required=True, description='height'),
'buildings' : fields.Nested(BuildingsDto().schema, required=True),
'contributor': fields.String(required=True, description='contributor username'),
'verified_on': fields.DateTime(required=True, description='verified time'),
'validator': fields.String(required=True, description='validator username'),
'reason': fields.String(required=True, description='reason'),
'status' : fields.Nested(StatusDto().schema, required=True)
})
entry = api.model('monitoring_attribute_entry', {
'name': fields.String(required=True, description='name'),
'function': fields.String(required=True, description='function'),
'height': fields.Float(required=True, description='height'),
'user_id' : fields.String(required=True, description='user public id'),
'gml_id' : fields.String(required=True, description='building gml id')
})
verify = api.model('monitoring_attribute_verify', {
'id': fields.Integer(required=True, description='id'),
'validator': fields.String(required=True, description='validator username'),
'status_id': fields.Integer(required=True, description='status id'),
'reason': fields.String(required=False, description='reason')
})
class MonitoringPhotoDto:
api = Namespace('monitoring', description='monitoring photo related operations')
schema = api.model('monitoring_photo', {
'id': fields.Integer(dump_only=True),
'timestamp': fields.DateTime(required=True, description='timestamp'),
'photo_file': fields.String(required=True, description='photo_file'),
'buildings' : fields.Nested(BuildingsDto().schema, required=True),
'contributor': fields.String(required=True, description='contributor username'),
'verified_on': fields.DateTime(required=True, description='verified time'),
'validator': fields.String(required=True, description='validator username'),
'reason': fields.String(required=True, description='reason'),
'status' : fields.Nested(StatusDto().schema, required=True)
})
entry = api.model('monitoring_photo_entry', {
'photo_file': fields.String(required=True, description='photo_file'),
'user_id' : fields.String(required=True, description='user public id'),
'gml_id' : fields.String(required=True, description='building gml id')
})
verify = api.model('monitoring_photo_verify', {
'id': fields.Integer(required=True, description='id'),
'validator': fields.String(required=True, description='validator username'),
'status_id': fields.Integer(required=True, description='status id'),
'reason': fields.String(required=False, description='reason')
})
class MonitoringVideoDto:
api = Namespace('monitoring', description='monitoring video related operations')
schema = api.model('monitoring_video', {
'id': fields.Integer(dump_only=True),
'timestamp': fields.DateTime(required=True, description='timestamp'),
'video_file': fields.String(required=True, description='video_file'),
'buildings' : fields.Nested(BuildingsDto().schema, required=True),
'contributor': fields.String(required=True, description='contributor username'),
'verified_on': fields.DateTime(required=True, description='verified time'),
'validator': fields.String(required=True, description='validator username'),
'reason': fields.String(required=True, description='reason'),
'status' : fields.Nested(StatusDto().schema, required=True)
})
entry = api.model('monitoring_video_entry', {
'video_file': fields.String(required=True, description='video_file'),
'user_id' : fields.String(required=True, description='user public id'),
'gml_id' : fields.String(required=True, description='building gml id')
})
verify = api.model('monitoring_video_verify', {
'id': fields.Integer(required=True, description='id'),
'validator': fields.String(required=True, description='validator username'),
'status_id': fields.Integer(required=True, description='status id'),
'reason': fields.String(required=False, description='reason')
})
class CityObjectDto:
api = Namespace('cityobject', description='city object related operations') | [
"flask_restplus.Namespace",
"flask_restplus.fields.Float",
"flask_restplus.fields.Integer",
"flask_restplus.fields.String",
"flask_restplus.fields.DateTime"
] | [((75, 135), 'flask_restplus.Namespace', 'Namespace', (['"""status"""'], {'description': '"""status related operations"""'}), "('status', description='status related operations')\n", (84, 135), False, 'from flask_restplus import Namespace, fields\n'), ((457, 513), 'flask_restplus.Namespace', 'Namespace', (['"""type"""'], {'description': '"""type related operations"""'}), "('type', description='type related operations')\n", (466, 513), False, 'from flask_restplus import Namespace, fields\n'), ((821, 877), 'flask_restplus.Namespace', 'Namespace', (['"""area"""'], {'description': '"""area related operations"""'}), "('area', description='area related operations')\n", (830, 877), False, 'from flask_restplus import Namespace, fields\n'), ((1186, 1242), 'flask_restplus.Namespace', 'Namespace', (['"""user"""'], {'description': '"""user related operations"""'}), "('user', description='user related operations')\n", (1195, 1242), False, 'from flask_restplus import Namespace, fields\n'), ((2078, 2144), 'flask_restplus.Namespace', 'Namespace', (['"""auth"""'], {'description': '"""authentication related operations"""'}), "('auth', description='authentication related operations')\n", (2087, 2144), False, 'from flask_restplus import Namespace, fields\n'), ((2384, 2438), 'flask_restplus.Namespace', 'Namespace', (['"""gml"""'], {'description': '"""gml related operations"""'}), "('gml', description='gml related operations')\n", (2393, 2438), False, 'from flask_restplus import Namespace, fields\n'), ((4135, 4199), 'flask_restplus.Namespace', 'Namespace', (['"""city_gml"""'], {'description': '"""city gml related operations"""'}), "('city_gml', description='city gml related operations')\n", (4144, 4199), False, 'from flask_restplus import Namespace, fields\n'), ((5601, 5667), 'flask_restplus.Namespace', 'Namespace', (['"""buildings"""'], {'description': '"""buildings related operations"""'}), "('buildings', description='buildings related operations')\n", (5610, 5667), False, 'from flask_restplus import Namespace, fields\n'), ((6161, 6239), 'flask_restplus.Namespace', 'Namespace', (['"""monitoring"""'], {'description': '"""monitoring attribute related operations"""'}), "('monitoring', description='monitoring attribute related operations')\n", (6170, 6239), False, 'from flask_restplus import Namespace, fields\n'), ((7920, 7994), 'flask_restplus.Namespace', 'Namespace', (['"""monitoring"""'], {'description': '"""monitoring photo related operations"""'}), "('monitoring', description='monitoring photo related operations')\n", (7929, 7994), False, 'from flask_restplus import Namespace, fields\n'), ((9400, 9474), 'flask_restplus.Namespace', 'Namespace', (['"""monitoring"""'], {'description': '"""monitoring video related operations"""'}), "('monitoring', description='monitoring video related operations')\n", (9409, 9474), False, 'from flask_restplus import Namespace, fields\n'), ((10874, 10943), 'flask_restplus.Namespace', 'Namespace', (['"""cityobject"""'], {'description': '"""city object related operations"""'}), "('cityobject', description='city object related operations')\n", (10883, 10943), False, 'from flask_restplus import Namespace, fields\n'), ((185, 215), 'flask_restplus.fields.Integer', 'fields.Integer', ([], {'dump_only': '(True)'}), '(dump_only=True)\n', (199, 215), False, 'from flask_restplus import Namespace, fields\n'), ((240, 295), 'flask_restplus.fields.String', 'fields.String', ([], {'required': '(True)', 'description': '"""status name"""'}), "(required=True, description='status name')\n", (253, 295), False, 'from flask_restplus import Namespace, fields\n'), ((366, 421), 'flask_restplus.fields.String', 'fields.String', ([], {'required': '(True)', 'description': '"""status name"""'}), "(required=True, description='status name')\n", (379, 421), False, 'from flask_restplus import Namespace, fields\n'), ((561, 591), 'flask_restplus.fields.Integer', 'fields.Integer', ([], {'dump_only': '(True)'}), '(dump_only=True)\n', (575, 591), False, 'from flask_restplus import Namespace, fields\n'), ((614, 667), 'flask_restplus.fields.String', 'fields.String', ([], {'required': '(True)', 'description': '"""type name"""'}), "(required=True, description='type name')\n", (627, 667), False, 'from flask_restplus import Namespace, fields\n'), ((734, 787), 'flask_restplus.fields.String', 'fields.String', ([], {'required': '(True)', 'description': '"""type name"""'}), "(required=True, description='type name')\n", (747, 787), False, 'from flask_restplus import Namespace, fields\n'), ((925, 955), 'flask_restplus.fields.Integer', 'fields.Integer', ([], {'dump_only': '(True)'}), '(dump_only=True)\n', (939, 955), False, 'from flask_restplus import Namespace, fields\n'), ((978, 1031), 'flask_restplus.fields.String', 'fields.String', ([], {'required': '(True)', 'description': '"""area_name"""'}), "(required=True, description='area_name')\n", (991, 1031), False, 'from flask_restplus import Namespace, fields\n'), ((1098, 1151), 'flask_restplus.fields.String', 'fields.String', ([], {'required': '(True)', 'description': '"""area_name"""'}), "(required=True, description='area_name')\n", (1111, 1151), False, 'from flask_restplus import Namespace, fields\n'), ((1293, 1355), 'flask_restplus.fields.String', 'fields.String', ([], {'required': '(True)', 'description': '"""user email address"""'}), "(required=True, description='user email address')\n", (1306, 1355), False, 'from flask_restplus import Namespace, fields\n'), ((1377, 1434), 'flask_restplus.fields.String', 'fields.String', ([], {'required': '(True)', 'description': '"""user username"""'}), "(required=True, description='user username')\n", (1390, 1434), False, 'from flask_restplus import Namespace, fields\n'), ((1457, 1501), 'flask_restplus.fields.String', 'fields.String', ([], {'description': '"""user Identifier"""'}), "(description='user Identifier')\n", (1470, 1501), False, 'from flask_restplus import Namespace, fields\n'), ((1684, 1736), 'flask_restplus.fields.Integer', 'fields.Integer', ([], {'required': '(True)', 'description': '"""type id"""'}), "(required=True, description='type id')\n", (1698, 1736), False, 'from flask_restplus import Namespace, fields\n'), ((1752, 1804), 'flask_restplus.fields.Integer', 'fields.Integer', ([], {'required': '(True)', 'description': '"""area id"""'}), "(required=True, description='area id')\n", (1766, 1804), False, 'from flask_restplus import Namespace, fields\n'), ((1823, 1885), 'flask_restplus.fields.String', 'fields.String', ([], {'required': '(True)', 'description': '"""user email address"""'}), "(required=True, description='user email address')\n", (1836, 1885), False, 'from flask_restplus import Namespace, fields\n'), ((1907, 1964), 'flask_restplus.fields.String', 'fields.String', ([], {'required': '(True)', 'description': '"""user username"""'}), "(required=True, description='user username')\n", (1920, 1964), False, 'from flask_restplus import Namespace, fields\n'), ((1986, 2043), 'flask_restplus.fields.String', 'fields.String', ([], {'required': '(True)', 'description': '"""user password"""'}), "(required=True, description='user password')\n", (1999, 2043), False, 'from flask_restplus import Namespace, fields\n'), ((2209, 2265), 'flask_restplus.fields.String', 'fields.String', ([], {'required': '(True)', 'description': '"""The username"""'}), "(required=True, description='The username')\n", (2222, 2265), False, 'from flask_restplus import Namespace, fields\n'), ((2287, 2349), 'flask_restplus.fields.String', 'fields.String', ([], {'required': '(True)', 'description': '"""The user password """'}), "(required=True, description='The user password ')\n", (2300, 2349), False, 'from flask_restplus import Namespace, fields\n'), ((2485, 2515), 'flask_restplus.fields.Integer', 'fields.Integer', ([], {'dump_only': '(True)'}), '(dump_only=True)\n', (2499, 2515), False, 'from flask_restplus import Namespace, fields\n'), ((2613, 2677), 'flask_restplus.fields.String', 'fields.String', ([], {'required': '(True)', 'description': '"""contributor username"""'}), "(required=True, description='contributor username')\n", (2626, 2677), False, 'from flask_restplus import Namespace, fields\n'), ((2704, 2761), 'flask_restplus.fields.String', 'fields.String', ([], {'required': '(True)', 'description': '"""building name"""'}), "(required=True, description='building name')\n", (2717, 2761), False, 'from flask_restplus import Namespace, fields\n'), ((2788, 2850), 'flask_restplus.fields.String', 'fields.String', ([], {'required': '(True)', 'description': '"""city gml file name"""'}), "(required=True, description='city gml file name')\n", (2801, 2850), False, 'from flask_restplus import Namespace, fields\n'), ((2875, 2934), 'flask_restplus.fields.DateTime', 'fields.DateTime', ([], {'required': '(True)', 'description': '"""uploaded time"""'}), "(required=True, description='uploaded time')\n", (2890, 2934), False, 'from flask_restplus import Namespace, fields\n'), ((2959, 3018), 'flask_restplus.fields.DateTime', 'fields.DateTime', ([], {'required': '(True)', 'description': '"""verified time"""'}), "(required=True, description='verified time')\n", (2974, 3018), False, 'from flask_restplus import Namespace, fields\n'), ((3041, 3103), 'flask_restplus.fields.String', 'fields.String', ([], {'required': '(True)', 'description': '"""validator username"""'}), "(required=True, description='validator username')\n", (3054, 3103), False, 'from flask_restplus import Namespace, fields\n'), ((3194, 3244), 'flask_restplus.fields.String', 'fields.String', ([], {'required': '(True)', 'description': '"""reason"""'}), "(required=True, description='reason')\n", (3207, 3244), False, 'from flask_restplus import Namespace, fields\n'), ((3446, 3510), 'flask_restplus.fields.String', 'fields.String', ([], {'required': '(True)', 'description': '"""contributor username"""'}), "(required=True, description='contributor username')\n", (3459, 3510), False, 'from flask_restplus import Namespace, fields\n'), ((3537, 3594), 'flask_restplus.fields.String', 'fields.String', ([], {'required': '(True)', 'description': '"""building name"""'}), "(required=True, description='building name')\n", (3550, 3594), False, 'from flask_restplus import Namespace, fields\n'), ((3621, 3683), 'flask_restplus.fields.String', 'fields.String', ([], {'required': '(True)', 'description': '"""city gml file name"""'}), "(required=True, description='city gml file name')\n", (3634, 3683), False, 'from flask_restplus import Namespace, fields\n'), ((3704, 3756), 'flask_restplus.fields.Integer', 'fields.Integer', ([], {'required': '(True)', 'description': '"""area id"""'}), "(required=True, description='area id')\n", (3718, 3756), False, 'from flask_restplus import Namespace, fields\n'), ((3817, 3864), 'flask_restplus.fields.Integer', 'fields.Integer', ([], {'required': '(True)', 'description': '"""id"""'}), "(required=True, description='id')\n", (3831, 3864), False, 'from flask_restplus import Namespace, fields\n'), ((3887, 3949), 'flask_restplus.fields.String', 'fields.String', ([], {'required': '(True)', 'description': '"""validator username"""'}), "(required=True, description='validator username')\n", (3900, 3949), False, 'from flask_restplus import Namespace, fields\n'), ((3972, 4026), 'flask_restplus.fields.Integer', 'fields.Integer', ([], {'required': '(True)', 'description': '"""status id"""'}), "(required=True, description='status id')\n", (3986, 4026), False, 'from flask_restplus import Namespace, fields\n'), ((4046, 4097), 'flask_restplus.fields.String', 'fields.String', ([], {'required': '(False)', 'description': '"""reason"""'}), "(required=False, description='reason')\n", (4059, 4097), False, 'from flask_restplus import Namespace, fields\n'), ((4251, 4281), 'flask_restplus.fields.Integer', 'fields.Integer', ([], {'dump_only': '(True)'}), '(dump_only=True)\n', (4265, 4281), False, 'from flask_restplus import Namespace, fields\n'), ((4306, 4370), 'flask_restplus.fields.String', 'fields.String', ([], {'required': '(True)', 'description': '"""contributor username"""'}), "(required=True, description='contributor username')\n", (4319, 4370), False, 'from flask_restplus import Namespace, fields\n'), ((4397, 4459), 'flask_restplus.fields.String', 'fields.String', ([], {'required': '(True)', 'description': '"""city gml file name"""'}), "(required=True, description='city gml file name')\n", (4410, 4459), False, 'from flask_restplus import Namespace, fields\n'), ((4484, 4543), 'flask_restplus.fields.DateTime', 'fields.DateTime', ([], {'required': '(True)', 'description': '"""uploaded time"""'}), "(required=True, description='uploaded time')\n", (4499, 4543), False, 'from flask_restplus import Namespace, fields\n'), ((4568, 4627), 'flask_restplus.fields.DateTime', 'fields.DateTime', ([], {'required': '(True)', 'description': '"""verified time"""'}), "(required=True, description='verified time')\n", (4583, 4627), False, 'from flask_restplus import Namespace, fields\n'), ((4650, 4712), 'flask_restplus.fields.String', 'fields.String', ([], {'required': '(True)', 'description': '"""validator username"""'}), "(required=True, description='validator username')\n", (4663, 4712), False, 'from flask_restplus import Namespace, fields\n'), ((4732, 4782), 'flask_restplus.fields.String', 'fields.String', ([], {'required': '(True)', 'description': '"""reason"""'}), "(required=True, description='reason')\n", (4745, 4782), False, 'from flask_restplus import Namespace, fields\n'), ((4989, 5053), 'flask_restplus.fields.String', 'fields.String', ([], {'required': '(True)', 'description': '"""contributor username"""'}), "(required=True, description='contributor username')\n", (5002, 5053), False, 'from flask_restplus import Namespace, fields\n'), ((5080, 5142), 'flask_restplus.fields.String', 'fields.String', ([], {'required': '(True)', 'description': '"""city gml file name"""'}), "(required=True, description='city gml file name')\n", (5093, 5142), False, 'from flask_restplus import Namespace, fields\n'), ((5163, 5215), 'flask_restplus.fields.Integer', 'fields.Integer', ([], {'required': '(True)', 'description': '"""area id"""'}), "(required=True, description='area id')\n", (5177, 5215), False, 'from flask_restplus import Namespace, fields\n'), ((5281, 5328), 'flask_restplus.fields.Integer', 'fields.Integer', ([], {'required': '(True)', 'description': '"""id"""'}), "(required=True, description='id')\n", (5295, 5328), False, 'from flask_restplus import Namespace, fields\n'), ((5351, 5413), 'flask_restplus.fields.String', 'fields.String', ([], {'required': '(True)', 'description': '"""validator username"""'}), "(required=True, description='validator username')\n", (5364, 5413), False, 'from flask_restplus import Namespace, fields\n'), ((5436, 5490), 'flask_restplus.fields.Integer', 'fields.Integer', ([], {'required': '(True)', 'description': '"""status id"""'}), "(required=True, description='status id')\n", (5450, 5490), False, 'from flask_restplus import Namespace, fields\n'), ((5510, 5561), 'flask_restplus.fields.String', 'fields.String', ([], {'required': '(False)', 'description': '"""reason"""'}), "(required=False, description='reason')\n", (5523, 5561), False, 'from flask_restplus import Namespace, fields\n'), ((5720, 5750), 'flask_restplus.fields.Integer', 'fields.Integer', ([], {'dump_only': '(True)'}), '(dump_only=True)\n', (5734, 5750), False, 'from flask_restplus import Namespace, fields\n'), ((5770, 5820), 'flask_restplus.fields.String', 'fields.String', ([], {'required': '(True)', 'description': '"""gml_id"""'}), "(required=True, description='gml_id')\n", (5783, 5820), False, 'from flask_restplus import Namespace, fields\n'), ((6055, 6111), 'flask_restplus.fields.Integer', 'fields.Integer', ([], {'required': '(True)', 'description': '"""city gml id"""'}), "(required=True, description='city gml id')\n", (6069, 6111), False, 'from flask_restplus import Namespace, fields\n'), ((6303, 6333), 'flask_restplus.fields.Integer', 'fields.Integer', ([], {'dump_only': '(True)'}), '(dump_only=True)\n', (6317, 6333), False, 'from flask_restplus import Namespace, fields\n'), ((6356, 6411), 'flask_restplus.fields.DateTime', 'fields.DateTime', ([], {'required': '(True)', 'description': '"""timestamp"""'}), "(required=True, description='timestamp')\n", (6371, 6411), False, 'from flask_restplus import Namespace, fields\n'), ((6429, 6477), 'flask_restplus.fields.String', 'fields.String', ([], {'required': '(True)', 'description': '"""name"""'}), "(required=True, description='name')\n", (6442, 6477), False, 'from flask_restplus import Namespace, fields\n'), ((6499, 6551), 'flask_restplus.fields.String', 'fields.String', ([], {'required': '(True)', 'description': '"""function"""'}), "(required=True, description='function')\n", (6512, 6551), False, 'from flask_restplus import Namespace, fields\n'), ((6571, 6620), 'flask_restplus.fields.Float', 'fields.Float', ([], {'required': '(True)', 'description': '"""height"""'}), "(required=True, description='height')\n", (6583, 6620), False, 'from flask_restplus import Namespace, fields\n'), ((6714, 6778), 'flask_restplus.fields.String', 'fields.String', ([], {'required': '(True)', 'description': '"""contributor username"""'}), "(required=True, description='contributor username')\n", (6727, 6778), False, 'from flask_restplus import Namespace, fields\n'), ((6803, 6862), 'flask_restplus.fields.DateTime', 'fields.DateTime', ([], {'required': '(True)', 'description': '"""verified time"""'}), "(required=True, description='verified time')\n", (6818, 6862), False, 'from flask_restplus import Namespace, fields\n'), ((6885, 6947), 'flask_restplus.fields.String', 'fields.String', ([], {'required': '(True)', 'description': '"""validator username"""'}), "(required=True, description='validator username')\n", (6898, 6947), False, 'from flask_restplus import Namespace, fields\n'), ((6967, 7017), 'flask_restplus.fields.String', 'fields.String', ([], {'required': '(True)', 'description': '"""reason"""'}), "(required=True, description='reason')\n", (6980, 7017), False, 'from flask_restplus import Namespace, fields\n'), ((7166, 7214), 'flask_restplus.fields.String', 'fields.String', ([], {'required': '(True)', 'description': '"""name"""'}), "(required=True, description='name')\n", (7179, 7214), False, 'from flask_restplus import Namespace, fields\n'), ((7236, 7288), 'flask_restplus.fields.String', 'fields.String', ([], {'required': '(True)', 'description': '"""function"""'}), "(required=True, description='function')\n", (7249, 7288), False, 'from flask_restplus import Namespace, fields\n'), ((7308, 7357), 'flask_restplus.fields.Float', 'fields.Float', ([], {'required': '(True)', 'description': '"""height"""'}), "(required=True, description='height')\n", (7320, 7357), False, 'from flask_restplus import Namespace, fields\n'), ((7379, 7437), 'flask_restplus.fields.String', 'fields.String', ([], {'required': '(True)', 'description': '"""user public id"""'}), "(required=True, description='user public id')\n", (7392, 7437), False, 'from flask_restplus import Namespace, fields\n'), ((7458, 7517), 'flask_restplus.fields.String', 'fields.String', ([], {'required': '(True)', 'description': '"""building gml id"""'}), "(required=True, description='building gml id')\n", (7471, 7517), False, 'from flask_restplus import Namespace, fields\n'), ((7595, 7642), 'flask_restplus.fields.Integer', 'fields.Integer', ([], {'required': '(True)', 'description': '"""id"""'}), "(required=True, description='id')\n", (7609, 7642), False, 'from flask_restplus import Namespace, fields\n'), ((7665, 7727), 'flask_restplus.fields.String', 'fields.String', ([], {'required': '(True)', 'description': '"""validator username"""'}), "(required=True, description='validator username')\n", (7678, 7727), False, 'from flask_restplus import Namespace, fields\n'), ((7750, 7804), 'flask_restplus.fields.Integer', 'fields.Integer', ([], {'required': '(True)', 'description': '"""status id"""'}), "(required=True, description='status id')\n", (7764, 7804), False, 'from flask_restplus import Namespace, fields\n'), ((7824, 7875), 'flask_restplus.fields.String', 'fields.String', ([], {'required': '(False)', 'description': '"""reason"""'}), "(required=False, description='reason')\n", (7837, 7875), False, 'from flask_restplus import Namespace, fields\n'), ((8054, 8084), 'flask_restplus.fields.Integer', 'fields.Integer', ([], {'dump_only': '(True)'}), '(dump_only=True)\n', (8068, 8084), False, 'from flask_restplus import Namespace, fields\n'), ((8107, 8162), 'flask_restplus.fields.DateTime', 'fields.DateTime', ([], {'required': '(True)', 'description': '"""timestamp"""'}), "(required=True, description='timestamp')\n", (8122, 8162), False, 'from flask_restplus import Namespace, fields\n'), ((8186, 8240), 'flask_restplus.fields.String', 'fields.String', ([], {'required': '(True)', 'description': '"""photo_file"""'}), "(required=True, description='photo_file')\n", (8199, 8240), False, 'from flask_restplus import Namespace, fields\n'), ((8334, 8398), 'flask_restplus.fields.String', 'fields.String', ([], {'required': '(True)', 'description': '"""contributor username"""'}), "(required=True, description='contributor username')\n", (8347, 8398), False, 'from flask_restplus import Namespace, fields\n'), ((8423, 8482), 'flask_restplus.fields.DateTime', 'fields.DateTime', ([], {'required': '(True)', 'description': '"""verified time"""'}), "(required=True, description='verified time')\n", (8438, 8482), False, 'from flask_restplus import Namespace, fields\n'), ((8505, 8567), 'flask_restplus.fields.String', 'fields.String', ([], {'required': '(True)', 'description': '"""validator username"""'}), "(required=True, description='validator username')\n", (8518, 8567), False, 'from flask_restplus import Namespace, fields\n'), ((8587, 8637), 'flask_restplus.fields.String', 'fields.String', ([], {'required': '(True)', 'description': '"""reason"""'}), "(required=True, description='reason')\n", (8600, 8637), False, 'from flask_restplus import Namespace, fields\n'), ((8786, 8840), 'flask_restplus.fields.String', 'fields.String', ([], {'required': '(True)', 'description': '"""photo_file"""'}), "(required=True, description='photo_file')\n", (8799, 8840), False, 'from flask_restplus import Namespace, fields\n'), ((8862, 8920), 'flask_restplus.fields.String', 'fields.String', ([], {'required': '(True)', 'description': '"""user public id"""'}), "(required=True, description='user public id')\n", (8875, 8920), False, 'from flask_restplus import Namespace, fields\n'), ((8941, 9000), 'flask_restplus.fields.String', 'fields.String', ([], {'required': '(True)', 'description': '"""building gml id"""'}), "(required=True, description='building gml id')\n", (8954, 9000), False, 'from flask_restplus import Namespace, fields\n'), ((9074, 9121), 'flask_restplus.fields.Integer', 'fields.Integer', ([], {'required': '(True)', 'description': '"""id"""'}), "(required=True, description='id')\n", (9088, 9121), False, 'from flask_restplus import Namespace, fields\n'), ((9144, 9206), 'flask_restplus.fields.String', 'fields.String', ([], {'required': '(True)', 'description': '"""validator username"""'}), "(required=True, description='validator username')\n", (9157, 9206), False, 'from flask_restplus import Namespace, fields\n'), ((9229, 9283), 'flask_restplus.fields.Integer', 'fields.Integer', ([], {'required': '(True)', 'description': '"""status id"""'}), "(required=True, description='status id')\n", (9243, 9283), False, 'from flask_restplus import Namespace, fields\n'), ((9303, 9354), 'flask_restplus.fields.String', 'fields.String', ([], {'required': '(False)', 'description': '"""reason"""'}), "(required=False, description='reason')\n", (9316, 9354), False, 'from flask_restplus import Namespace, fields\n'), ((9534, 9564), 'flask_restplus.fields.Integer', 'fields.Integer', ([], {'dump_only': '(True)'}), '(dump_only=True)\n', (9548, 9564), False, 'from flask_restplus import Namespace, fields\n'), ((9587, 9642), 'flask_restplus.fields.DateTime', 'fields.DateTime', ([], {'required': '(True)', 'description': '"""timestamp"""'}), "(required=True, description='timestamp')\n", (9602, 9642), False, 'from flask_restplus import Namespace, fields\n'), ((9666, 9720), 'flask_restplus.fields.String', 'fields.String', ([], {'required': '(True)', 'description': '"""video_file"""'}), "(required=True, description='video_file')\n", (9679, 9720), False, 'from flask_restplus import Namespace, fields\n'), ((9814, 9878), 'flask_restplus.fields.String', 'fields.String', ([], {'required': '(True)', 'description': '"""contributor username"""'}), "(required=True, description='contributor username')\n", (9827, 9878), False, 'from flask_restplus import Namespace, fields\n'), ((9903, 9962), 'flask_restplus.fields.DateTime', 'fields.DateTime', ([], {'required': '(True)', 'description': '"""verified time"""'}), "(required=True, description='verified time')\n", (9918, 9962), False, 'from flask_restplus import Namespace, fields\n'), ((9985, 10047), 'flask_restplus.fields.String', 'fields.String', ([], {'required': '(True)', 'description': '"""validator username"""'}), "(required=True, description='validator username')\n", (9998, 10047), False, 'from flask_restplus import Namespace, fields\n'), ((10067, 10117), 'flask_restplus.fields.String', 'fields.String', ([], {'required': '(True)', 'description': '"""reason"""'}), "(required=True, description='reason')\n", (10080, 10117), False, 'from flask_restplus import Namespace, fields\n'), ((10266, 10320), 'flask_restplus.fields.String', 'fields.String', ([], {'required': '(True)', 'description': '"""video_file"""'}), "(required=True, description='video_file')\n", (10279, 10320), False, 'from flask_restplus import Namespace, fields\n'), ((10342, 10400), 'flask_restplus.fields.String', 'fields.String', ([], {'required': '(True)', 'description': '"""user public id"""'}), "(required=True, description='user public id')\n", (10355, 10400), False, 'from flask_restplus import Namespace, fields\n'), ((10421, 10480), 'flask_restplus.fields.String', 'fields.String', ([], {'required': '(True)', 'description': '"""building gml id"""'}), "(required=True, description='building gml id')\n", (10434, 10480), False, 'from flask_restplus import Namespace, fields\n'), ((10554, 10601), 'flask_restplus.fields.Integer', 'fields.Integer', ([], {'required': '(True)', 'description': '"""id"""'}), "(required=True, description='id')\n", (10568, 10601), False, 'from flask_restplus import Namespace, fields\n'), ((10624, 10686), 'flask_restplus.fields.String', 'fields.String', ([], {'required': '(True)', 'description': '"""validator username"""'}), "(required=True, description='validator username')\n", (10637, 10686), False, 'from flask_restplus import Namespace, fields\n'), ((10709, 10763), 'flask_restplus.fields.Integer', 'fields.Integer', ([], {'required': '(True)', 'description': '"""status id"""'}), "(required=True, description='status id')\n", (10723, 10763), False, 'from flask_restplus import Namespace, fields\n'), ((10783, 10834), 'flask_restplus.fields.String', 'fields.String', ([], {'required': '(False)', 'description': '"""reason"""'}), "(required=False, description='reason')\n", (10796, 10834), False, 'from flask_restplus import Namespace, fields\n'), ((5979, 6029), 'flask_restplus.fields.String', 'fields.String', ([], {'required': '(True)', 'description': '"""gml id"""'}), "(required=True, description='gml id')\n", (5992, 6029), False, 'from flask_restplus import Namespace, fields\n')] |
# Importing all required libraries for the code to function
import tkinter
from matplotlib.backends.backend_tkagg import (FigureCanvasTkAgg)
from matplotlib import pyplot as plt, animation
from mpl_toolkits import mplot3d
from stl import mesh
import numpy as np
import serial
from serial.tools import list_ports
import time
import csv
import matplotlib
import seaborn as sns
matplotlib.use("TkAgg")
# Setting the matplotlib style with the seaborn module
sns.set_style("whitegrid")
with sns.axes_style("whitegrid"):
fig = plt.subplots()
for param in ['figure.facecolor', 'axes.facecolor', 'savefig.facecolor']:
plt.rcParams[param] = '141417' # bluish dark grey
for param in ['text.color', 'axes.labelcolor', 'xtick.color', 'ytick.color']:
plt.rcParams[param] = '0.9' # very light grey
# Creating the matplotlib figure with subplots and axes
fig = plt.figure()
fig.set_tight_layout(True)
ax1 = fig.add_subplot(1,2,2,projection="3d")
ax2 = fig.add_subplot(3,2,1)
ax3 = fig.add_subplot(3,2,3)
ax4 = fig.add_subplot(3,2,5)
# Defining the USB serial port for the Arduino
COM = "/dev/cu.usbmodem14101"
# Importing the stl file for the 3D graph
data = mesh.Mesh.from_file('RocketFast.stl')
# Fieldnames to be written on the CSV file
fieldnames = ["Time", "Yaw", "Pitch", "Roll", "Pressure", "Altitude", "R_Altitude", "B_Temp", "AccelX", "AccelY",
"AccelZ", "GyroX", "GyroY", "GyroZ", "A_Temp"]
# Creating or opening the data.csv file and writing the fieldnames
# If the file existed it will be truncated before being used again.
with open('data.csv', 'w', newline='') as csv_file:
csv_file.truncate()
csv_writer = csv.DictWriter(csv_file, fieldnames=fieldnames)
csv_writer.writeheader()
# Lists all serial ports
ports = list_ports.comports()
for port in ports:
print(port)
# Try statement within infinite loop to attempt connection to the Arduino until connection
while True:
try:
serialCom = serial.Serial(port=COM, baudrate=115200, timeout=0.1)
break
except:
print('Could not connect!')
print('Retrying...')
time.sleep(0.1)
# Creating all required empty NumPy arrays
time_x = np.empty([1, 1])
ori_x = np.empty([1, 1])
ori_y = np.empty([1, 1])
ori_z = np.empty([1, 1])
accel_x = np.empty([1, 1])
accel_y = np.empty([1, 1])
accel_z = np.empty([1, 1])
gyro_x = np.empty([1, 1])
gyro_y = np.empty([1, 1])
gyro_z = np.empty([1, 1])
alt = np.empty([1, 1])
r_alt = np.empty([1, 1])
a_temp = np.empty([1, 1])
b_temp = np.empty([1, 1])
# Main processing class
class Processing():
def animate(self, i):
ctr = 0
# Gets data from Arduino
try:
while serialCom.inWaiting() > 0:
# Read the serial line
s_bytes = serialCom.readline()
# Decode serial data
decoded_bytes = s_bytes.decode("utf-8").strip('\r\n')
# print(decoded_bytes)
# Place serial data in list
ori = [float(x) for x in decoded_bytes.split()]
# Parse the line
# First line may be read in the middle, so the data would be incomplete.
if ctr == 0:
ctr = ctr + 1
else:
values = [float(x) for x in decoded_bytes.split()]
print(values)
# Write to data to CSV
with open('data.csv', 'a') as csv_file:
csv_writer = csv.DictWriter(csv_file, fieldnames=fieldnames)
info = {
"Time": values[0],
"Yaw": values[1],
"Pitch": values[2],
"Roll": values[3],
"Pressure": values[4],
"Altitude": values[5],
"R_Altitude": values[6],
"B_Temp": values[7],
"AccelX": values[8],
"AccelY": values[9],
"AccelZ": values[10],
"GyroX": values[11],
"GyroY": values[12],
"GyroZ": values[13],
"A_Temp": values[14],
}
csv_writer.writerow(info)
csv_file.close()
except:
# Exit program if communication is lost
# In real life you would want logic that would reattempt after certain delay.
print('Communication lost...')
print('Exiting Program...')
exit()
try:
# Gets data for other graphs by appending to numpy arrays from list.
np.append(time_x, float(ori[0])/1000)
np.append(ori_x, float(ori[1]))
np.append(ori_y, float(ori[2]))
np.append(ori_z, float(ori[3]))
np.append(alt, float(ori[5]))
np.append(r_alt, float(ori[6]))
np.append(b_temp, float(ori[7]))
np.append(accel_x, float(ori[8]))
np.append(accel_y, float(ori[9]))
np.append(accel_z, float(ori[10]))
np.append(gyro_x, float(ori[11]))
np.append(gyro_y, float(ori[12]))
np.append(gyro_z, float(ori[13]))
np.append(a_temp, float(ori[14]))
except:
return 1
# Checks to see if orientation has changed in any axis
# This is as this process would take a few cycles to compute,
# if we can skip it when not necessary the program will be faster.
if ori_y[ori_y.size-2] != ori[1] or ori_x[ori_x.size-2] != ori[1] or ori_z[ori_z.size-2] != ori[1]:
# change the rotation of the 3 sides
ax1.clear()
data.rotate([1, 0, 0], np.radians(ori_y[ori_y.size-2]-float(ori[2])))
data.rotate([0, 1, 0], np.radians(-ori_x[ori_x.size-2]+float(ori[1])))
data.rotate([0, 0, 1], np.radians(-ori_z[ori_z.size-2]+float(ori[3])))
# Graph the STL file onto the graph.
collection = mplot3d.art3d.Poly3DCollection(data.vectors)
collection.set_facecolor('#17205B')
ax1.add_collection3d(collection)
scale = data.points.flatten("A")
ax1.auto_scale_xyz(scale, scale, scale)
# If the size of the array has become larger than 50, delete the first index
if time_x.size > 50:
np.resize(time_x, (1, 50))
np.resize(ori_x, (1, 50))
np.resize(ori_y, (1, 50))
np.resize(ori_z, (1, 50))
np.resize(alt, (1, 50))
np.resize(r_alt, (1, 50))
np.resize(accel_x, (1, 50))
np.resize(accel_y, (1, 50))
np.resize(accel_z, (1, 50))
np.resize(gyro_x, (1, 50))
np.resize(gyro_y, (1, 50))
np.resize(gyro_z, (1, 50))
np.resize(a_temp, (1, 50))
np.resize(b_temp, (1, 50))
# Deals with plotting the orientation outputs
ax2.clear()
ax2.plot(time_x, ori_x, label="X-axis")
ax2.plot(time_x, ori_y, label="Y-axis")
ax2.plot(time_x, ori_z, label="Z-axis")
ax2.set_ylabel("Orientation (deg)")
ax2.set_xticklabels([])
ax2.grid(b=True)
# Deals with plotting altitude
ax3.clear()
ax3.plot(time_x, accel_x, label="X")
ax3.plot(time_x, accel_y, label="Y")
ax3.plot(time_x, accel_z, label="Z")
ax3.set_ylabel("Acceleration (m/s^2)")
ax3.set_xticklabels([])
ax3.grid(b=True)
# Deals with plotting temperature
ax4.clear()
ax4.plot(time_x, gyro_x, label="X")
ax4.plot(time_x, gyro_y, label="Y")
ax4.plot(time_x, gyro_z, label="Z")
ax4.set_xlabel("Time")
ax4.set_ylabel("Angular Rates (deg/s)")
ax4.grid(b=True)
# Sets the legend to be above the first graph
ax2.legend(bbox_to_anchor=[0.5, 1.2], loc='upper center', ncol=3, mode="tight", borderaxespad=0)
return 1
# Creates the tkinter window and calls the main procesing class.
class Window():
def __init__(self):
root = tkinter.Tk()
root.geometry("1500x735")
root.wm_title("Graphical User Interface")
canvas = FigureCanvasTkAgg(fig, master=root)
canvas.draw()
canvas.get_tk_widget().pack(side=tkinter.TOP, fill=tkinter.BOTH, expand=1)
process = Processing()
anim = animation.FuncAnimation(fig, process.animate)
root.mainloop()
# Run the file while preventing accidental invokes
if __name__ == "__main__":
win = Window()
| [
"csv.DictWriter",
"mpl_toolkits.mplot3d.art3d.Poly3DCollection",
"serial.tools.list_ports.comports",
"matplotlib.backends.backend_tkagg.FigureCanvasTkAgg",
"matplotlib.use",
"matplotlib.animation.FuncAnimation",
"time.sleep",
"seaborn.set_style",
"matplotlib.pyplot.figure",
"tkinter.Tk",
"numpy.... | [((375, 398), 'matplotlib.use', 'matplotlib.use', (['"""TkAgg"""'], {}), "('TkAgg')\n", (389, 398), False, 'import matplotlib\n'), ((455, 481), 'seaborn.set_style', 'sns.set_style', (['"""whitegrid"""'], {}), "('whitegrid')\n", (468, 481), True, 'import seaborn as sns\n'), ((864, 876), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (874, 876), True, 'from matplotlib import pyplot as plt, animation\n'), ((1164, 1201), 'stl.mesh.Mesh.from_file', 'mesh.Mesh.from_file', (['"""RocketFast.stl"""'], {}), "('RocketFast.stl')\n", (1183, 1201), False, 'from stl import mesh\n'), ((1761, 1782), 'serial.tools.list_ports.comports', 'list_ports.comports', ([], {}), '()\n', (1780, 1782), False, 'from serial.tools import list_ports\n'), ((2173, 2189), 'numpy.empty', 'np.empty', (['[1, 1]'], {}), '([1, 1])\n', (2181, 2189), True, 'import numpy as np\n'), ((2198, 2214), 'numpy.empty', 'np.empty', (['[1, 1]'], {}), '([1, 1])\n', (2206, 2214), True, 'import numpy as np\n'), ((2223, 2239), 'numpy.empty', 'np.empty', (['[1, 1]'], {}), '([1, 1])\n', (2231, 2239), True, 'import numpy as np\n'), ((2248, 2264), 'numpy.empty', 'np.empty', (['[1, 1]'], {}), '([1, 1])\n', (2256, 2264), True, 'import numpy as np\n'), ((2275, 2291), 'numpy.empty', 'np.empty', (['[1, 1]'], {}), '([1, 1])\n', (2283, 2291), True, 'import numpy as np\n'), ((2302, 2318), 'numpy.empty', 'np.empty', (['[1, 1]'], {}), '([1, 1])\n', (2310, 2318), True, 'import numpy as np\n'), ((2329, 2345), 'numpy.empty', 'np.empty', (['[1, 1]'], {}), '([1, 1])\n', (2337, 2345), True, 'import numpy as np\n'), ((2355, 2371), 'numpy.empty', 'np.empty', (['[1, 1]'], {}), '([1, 1])\n', (2363, 2371), True, 'import numpy as np\n'), ((2381, 2397), 'numpy.empty', 'np.empty', (['[1, 1]'], {}), '([1, 1])\n', (2389, 2397), True, 'import numpy as np\n'), ((2407, 2423), 'numpy.empty', 'np.empty', (['[1, 1]'], {}), '([1, 1])\n', (2415, 2423), True, 'import numpy as np\n'), ((2430, 2446), 'numpy.empty', 'np.empty', (['[1, 1]'], {}), '([1, 1])\n', (2438, 2446), True, 'import numpy as np\n'), ((2455, 2471), 'numpy.empty', 'np.empty', (['[1, 1]'], {}), '([1, 1])\n', (2463, 2471), True, 'import numpy as np\n'), ((2481, 2497), 'numpy.empty', 'np.empty', (['[1, 1]'], {}), '([1, 1])\n', (2489, 2497), True, 'import numpy as np\n'), ((2507, 2523), 'numpy.empty', 'np.empty', (['[1, 1]'], {}), '([1, 1])\n', (2515, 2523), True, 'import numpy as np\n'), ((487, 514), 'seaborn.axes_style', 'sns.axes_style', (['"""whitegrid"""'], {}), "('whitegrid')\n", (501, 514), True, 'import seaborn as sns\n'), ((526, 540), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (538, 540), True, 'from matplotlib import pyplot as plt, animation\n'), ((1650, 1697), 'csv.DictWriter', 'csv.DictWriter', (['csv_file'], {'fieldnames': 'fieldnames'}), '(csv_file, fieldnames=fieldnames)\n', (1664, 1697), False, 'import csv\n'), ((1951, 2004), 'serial.Serial', 'serial.Serial', ([], {'port': 'COM', 'baudrate': '(115200)', 'timeout': '(0.1)'}), '(port=COM, baudrate=115200, timeout=0.1)\n', (1964, 2004), False, 'import serial\n'), ((8295, 8307), 'tkinter.Tk', 'tkinter.Tk', ([], {}), '()\n', (8305, 8307), False, 'import tkinter\n'), ((8410, 8445), 'matplotlib.backends.backend_tkagg.FigureCanvasTkAgg', 'FigureCanvasTkAgg', (['fig'], {'master': 'root'}), '(fig, master=root)\n', (8427, 8445), False, 'from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg\n'), ((8599, 8644), 'matplotlib.animation.FuncAnimation', 'animation.FuncAnimation', (['fig', 'process.animate'], {}), '(fig, process.animate)\n', (8622, 8644), False, 'from matplotlib import pyplot as plt, animation\n'), ((2104, 2119), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (2114, 2119), False, 'import time\n'), ((6183, 6227), 'mpl_toolkits.mplot3d.art3d.Poly3DCollection', 'mplot3d.art3d.Poly3DCollection', (['data.vectors'], {}), '(data.vectors)\n', (6213, 6227), False, 'from mpl_toolkits import mplot3d\n'), ((6546, 6572), 'numpy.resize', 'np.resize', (['time_x', '(1, 50)'], {}), '(time_x, (1, 50))\n', (6555, 6572), True, 'import numpy as np\n'), ((6585, 6610), 'numpy.resize', 'np.resize', (['ori_x', '(1, 50)'], {}), '(ori_x, (1, 50))\n', (6594, 6610), True, 'import numpy as np\n'), ((6623, 6648), 'numpy.resize', 'np.resize', (['ori_y', '(1, 50)'], {}), '(ori_y, (1, 50))\n', (6632, 6648), True, 'import numpy as np\n'), ((6661, 6686), 'numpy.resize', 'np.resize', (['ori_z', '(1, 50)'], {}), '(ori_z, (1, 50))\n', (6670, 6686), True, 'import numpy as np\n'), ((6699, 6722), 'numpy.resize', 'np.resize', (['alt', '(1, 50)'], {}), '(alt, (1, 50))\n', (6708, 6722), True, 'import numpy as np\n'), ((6735, 6760), 'numpy.resize', 'np.resize', (['r_alt', '(1, 50)'], {}), '(r_alt, (1, 50))\n', (6744, 6760), True, 'import numpy as np\n'), ((6773, 6800), 'numpy.resize', 'np.resize', (['accel_x', '(1, 50)'], {}), '(accel_x, (1, 50))\n', (6782, 6800), True, 'import numpy as np\n'), ((6813, 6840), 'numpy.resize', 'np.resize', (['accel_y', '(1, 50)'], {}), '(accel_y, (1, 50))\n', (6822, 6840), True, 'import numpy as np\n'), ((6853, 6880), 'numpy.resize', 'np.resize', (['accel_z', '(1, 50)'], {}), '(accel_z, (1, 50))\n', (6862, 6880), True, 'import numpy as np\n'), ((6893, 6919), 'numpy.resize', 'np.resize', (['gyro_x', '(1, 50)'], {}), '(gyro_x, (1, 50))\n', (6902, 6919), True, 'import numpy as np\n'), ((6932, 6958), 'numpy.resize', 'np.resize', (['gyro_y', '(1, 50)'], {}), '(gyro_y, (1, 50))\n', (6941, 6958), True, 'import numpy as np\n'), ((6971, 6997), 'numpy.resize', 'np.resize', (['gyro_z', '(1, 50)'], {}), '(gyro_z, (1, 50))\n', (6980, 6997), True, 'import numpy as np\n'), ((7010, 7036), 'numpy.resize', 'np.resize', (['a_temp', '(1, 50)'], {}), '(a_temp, (1, 50))\n', (7019, 7036), True, 'import numpy as np\n'), ((7049, 7075), 'numpy.resize', 'np.resize', (['b_temp', '(1, 50)'], {}), '(b_temp, (1, 50))\n', (7058, 7075), True, 'import numpy as np\n'), ((3500, 3547), 'csv.DictWriter', 'csv.DictWriter', (['csv_file'], {'fieldnames': 'fieldnames'}), '(csv_file, fieldnames=fieldnames)\n', (3514, 3547), False, 'import csv\n')] |
# -*- coding: utf-8 -*-
import argparse
import os
from .helpers.aspherical_lens import check_point as check_point_asph
from .helpers.spherical_lens import check_point as check_point_sph
from .helpers.plate import check_point as check_point_plate
from .helpers.dimensions import mm2px
from .helpers.config import generate_config
from .helpers.output import output_array_to_file, output_png, output_sig_eps_png
parser = argparse.ArgumentParser()
parser.add_argument("--lam_px", help="Number of pixels per wavelength")
parser.add_argument("--delta", help="Distance between sphere and aspheric lens")
parser.add_argument("--num", help="Number of iterations")
args = parser.parse_args()
lam_px = int(args.lam_px)
num = int(args.num)
delta = float(args.delta)
size_x = 8000
size_y = 6000
lam = 0.6 # mm
n_sph = 3.4
n_asph = 1.5
d_asph = mm2px(15, lam, lam_px)
x_pos_asph = mm2px(2 * lam, lam, lam_px)
y_pos_asph = round(size_y / 2)
diameter_asph = mm2px(25.2, lam, lam_px)
lens_sph_f = mm2px(15, lam, lam_px)
radius_sph = mm2px(5, lam, lam_px)
d_sph = mm2px(5 - lam / 2, lam, lam_px)
x_pos_sph = x_pos_asph + d_asph + delta
y_pos_sph = round(size_y / 2)
print('''
Creating lens 2 with params:
delta={delta}
radius_sph={radius_sph}
d_sph={d_sph}
x_pos_sph={x_pos_sph}
y_pos_sph={y_pos_sph}
'''.format(
delta=delta,
radius_sph=radius_sph,
d_sph=d_sph + d_sph - 5 * lam_px,
x_pos_sph=x_pos_sph,
y_pos_sph=y_pos_sph,
))
d_plate = lam_px
x_pos_plate = x_pos_sph + d_sph
y_pos_plate = y_pos_sph
size_x_plate = mm2px(0.25, lam, lam_px)
size_y_plate = mm2px(20, lam, lam_px)
n_plate = n_sph
eps = []
print('Creating config')
base_path = './lenses_v2_delta={delta}_lam=100'.format(delta=int(delta))
if not os.path.exists(base_path):
os.makedirs(base_path)
generate_config(
base_path=base_path,
size_x=size_x,
size_y=size_y,
type='TM',
num=num,
x_sphere=x_pos_sph,
lam_px=lam_px,
)
print('Start proccessing epsilon')
for y in range(size_y):
print('Proccessing [{}/{}]'.format(y, size_y - 1), end="\r")
for x in range(size_x):
if check_point_asph(x - x_pos_asph, y - y_pos_asph, d_asph, diameter_asph, lam, lam_px):
eps.append(n_asph)
elif check_point_sph(x - x_pos_sph, y - y_pos_sph, radius_sph, d_sph, 10000000):
eps.append(n_sph)
elif check_point_plate(x - x_pos_plate, y - y_pos_plate, size_x_plate, size_y_plate):
eps.append(n_plate)
else:
eps.append(1)
output_sig_eps_png(eps, [], "./world", 4, size_x, size_y)
print('Finish proccessing epsilon')
print('Start writing epsilon')
output_array_to_file(eps, base_path + '/epsilon.txt', size_x)
print('Finish writing epsilon')
| [
"os.makedirs",
"os.path.exists",
"argparse.ArgumentParser"
] | [((421, 446), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (444, 446), False, 'import argparse\n'), ((1737, 1762), 'os.path.exists', 'os.path.exists', (['base_path'], {}), '(base_path)\n', (1751, 1762), False, 'import os\n'), ((1768, 1790), 'os.makedirs', 'os.makedirs', (['base_path'], {}), '(base_path)\n', (1779, 1790), False, 'import os\n')] |
import os
import json
import subprocess
import mobile_de
import autoscout24_ch
import anibis_ch
if __name__ == '__main__':
try:
#autoscout24_ch.scrape_makes()
#autoscout24_ch.scrape_models()
#mobile_de.scrape_makes()
#mobile_de.scrape_models()
anibis_ch.scrape_makes()
# check "models": []
except KeyboardInterrupt:
exit(0)
except json.decoder.JSONDecodeError:
os.remove('makes.json')
subprocess.run('python test.py')
| [
"subprocess.run",
"anibis_ch.scrape_makes",
"os.remove"
] | [((293, 317), 'anibis_ch.scrape_makes', 'anibis_ch.scrape_makes', ([], {}), '()\n', (315, 317), False, 'import anibis_ch\n'), ((442, 465), 'os.remove', 'os.remove', (['"""makes.json"""'], {}), "('makes.json')\n", (451, 465), False, 'import os\n'), ((474, 506), 'subprocess.run', 'subprocess.run', (['"""python test.py"""'], {}), "('python test.py')\n", (488, 506), False, 'import subprocess\n')] |
import random
import cocotb
from cocotb.decorators import coroutine
from cocotb.result import TestFailure, ReturnValue
from cocotb.triggers import RisingEdge, Edge
from cocotblib.misc import log2Up, BoolRandomizer, assertEquals, waitClockedCond, randSignal
class Apb3:
def __init__(self, dut, name, clk = None):
self.clk = clk
self.PADDR = dut.__getattr__(name + "_PADDR")
self.PSEL = dut.__getattr__(name + "_PSEL")
self.PENABLE = dut.__getattr__(name + "_PENABLE")
self.PREADY = dut.__getattr__(name + "_PREADY")
self.PWRITE = dut.__getattr__(name + "_PWRITE")
self.PWDATA = dut.__getattr__(name + "_PWDATA")
self.PRDATA = dut.__getattr__(name + "_PRDATA")
def idle(self):
self.PSEL <= 0
@coroutine
def delay(self, cycle):
for i in range(cycle):
yield RisingEdge(self.clk)
@coroutine
def write(self, address, data, sel = 1):
self.PADDR <= address
self.PSEL <= sel
self.PENABLE <= False
self.PWRITE <= True
self.PWDATA <= data
yield RisingEdge(self.clk)
self.PENABLE <= True
yield waitClockedCond(self.clk, lambda : self.PREADY == True)
randSignal(self.PADDR)
self.PSEL <= 0
randSignal(self.PENABLE)
randSignal(self.PWRITE)
randSignal(self.PWDATA)
@coroutine
def writeMasked(self, address, data, mask, sel = 1):
readThread = self.read(address,sel)
yield readThread
yield self.write(address,(readThread.retval & ~mask) | (data & mask),sel)
@coroutine
def read(self, address, sel=1):
self.PADDR <= address
self.PSEL <= sel
self.PENABLE <= False
self.PWRITE <= False
randSignal(self.PWDATA)
yield RisingEdge(self.clk)
self.PENABLE <= True
yield waitClockedCond(self.clk, lambda: self.PREADY == True)
randSignal(self.PADDR)
self.PSEL <= 0
randSignal(self.PENABLE)
randSignal(self.PWRITE)
raise ReturnValue(int(self.PRDATA))
@coroutine
def readAssert(self, address, data, sel=1):
readThread = self.read(address,sel)
yield readThread
assertEquals(int(readThread.retval), data," APB readAssert failure")
@coroutine
def readAssertMasked(self, address, data, mask, sel=1):
readThread = self.read(address,sel)
yield readThread
assertEquals(int(readThread.retval) & mask, data," APB readAssert failure")
@coroutine
def pull(self, address, dataValue, dataMask, sel=1):
while True:
readThread = self.read(address, sel)
yield readThread
if (int(readThread.retval) & dataMask) == dataValue:
break | [
"cocotb.triggers.RisingEdge",
"cocotblib.misc.randSignal",
"cocotblib.misc.waitClockedCond"
] | [((1256, 1278), 'cocotblib.misc.randSignal', 'randSignal', (['self.PADDR'], {}), '(self.PADDR)\n', (1266, 1278), False, 'from cocotblib.misc import log2Up, BoolRandomizer, assertEquals, waitClockedCond, randSignal\n'), ((1310, 1334), 'cocotblib.misc.randSignal', 'randSignal', (['self.PENABLE'], {}), '(self.PENABLE)\n', (1320, 1334), False, 'from cocotblib.misc import log2Up, BoolRandomizer, assertEquals, waitClockedCond, randSignal\n'), ((1343, 1366), 'cocotblib.misc.randSignal', 'randSignal', (['self.PWRITE'], {}), '(self.PWRITE)\n', (1353, 1366), False, 'from cocotblib.misc import log2Up, BoolRandomizer, assertEquals, waitClockedCond, randSignal\n'), ((1375, 1398), 'cocotblib.misc.randSignal', 'randSignal', (['self.PWDATA'], {}), '(self.PWDATA)\n', (1385, 1398), False, 'from cocotblib.misc import log2Up, BoolRandomizer, assertEquals, waitClockedCond, randSignal\n'), ((1797, 1820), 'cocotblib.misc.randSignal', 'randSignal', (['self.PWDATA'], {}), '(self.PWDATA)\n', (1807, 1820), False, 'from cocotblib.misc import log2Up, BoolRandomizer, assertEquals, waitClockedCond, randSignal\n'), ((1962, 1984), 'cocotblib.misc.randSignal', 'randSignal', (['self.PADDR'], {}), '(self.PADDR)\n', (1972, 1984), False, 'from cocotblib.misc import log2Up, BoolRandomizer, assertEquals, waitClockedCond, randSignal\n'), ((2016, 2040), 'cocotblib.misc.randSignal', 'randSignal', (['self.PENABLE'], {}), '(self.PENABLE)\n', (2026, 2040), False, 'from cocotblib.misc import log2Up, BoolRandomizer, assertEquals, waitClockedCond, randSignal\n'), ((2049, 2072), 'cocotblib.misc.randSignal', 'randSignal', (['self.PWRITE'], {}), '(self.PWRITE)\n', (2059, 2072), False, 'from cocotblib.misc import log2Up, BoolRandomizer, assertEquals, waitClockedCond, randSignal\n'), ((1128, 1148), 'cocotb.triggers.RisingEdge', 'RisingEdge', (['self.clk'], {}), '(self.clk)\n', (1138, 1148), False, 'from cocotb.triggers import RisingEdge, Edge\n'), ((1192, 1247), 'cocotblib.misc.waitClockedCond', 'waitClockedCond', (['self.clk', '(lambda : self.PREADY == True)'], {}), '(self.clk, lambda : self.PREADY == True)\n', (1207, 1247), False, 'from cocotblib.misc import log2Up, BoolRandomizer, assertEquals, waitClockedCond, randSignal\n'), ((1835, 1855), 'cocotb.triggers.RisingEdge', 'RisingEdge', (['self.clk'], {}), '(self.clk)\n', (1845, 1855), False, 'from cocotb.triggers import RisingEdge, Edge\n'), ((1899, 1954), 'cocotblib.misc.waitClockedCond', 'waitClockedCond', (['self.clk', '(lambda : self.PREADY == True)'], {}), '(self.clk, lambda : self.PREADY == True)\n', (1914, 1954), False, 'from cocotblib.misc import log2Up, BoolRandomizer, assertEquals, waitClockedCond, randSignal\n'), ((891, 911), 'cocotb.triggers.RisingEdge', 'RisingEdge', (['self.clk'], {}), '(self.clk)\n', (901, 911), False, 'from cocotb.triggers import RisingEdge, Edge\n')] |
import re
import torch
import torch_geometric
from torch_geometric.data import Data, Batch
class MultiScaleData(Data):
r"""A plain old python object modeling a singlemulti scale graph with various
(optional) attributes:
Args:
x (Tensor, optional): Node feature matrix with shape :obj:`[num_nodes,
num_node_features]`. (default: :obj:`None`)
edge_index (LongTensor, optional): Graph connectivity in COO format
with shape :obj:`[2, num_edges]`. (default: :obj:`None`)
edge_attr (Tensor, optional): Edge feature matrix with shape
:obj:`[num_edges, num_edge_features]`. (default: :obj:`None`)
y (Tensor, optional): Graph or node targets with arbitrary shape.
(default: :obj:`None`)
pos (Tensor, optional): Node position matrix with shape
:obj:`[num_nodes, num_dimensions]`. (default: :obj:`None`)
norm (Tensor, optional): Normal vector matrix with shape
:obj:`[num_nodes, num_dimensions]`. (default: :obj:`None`)
face (LongTensor, optional): Face adjacency matrix with shape
:obj:`[3, num_faces]`. (default: :obj:`None`)
The data object is not restricted to these attributes and can be extented
by any other additional data.
Modify the apply: Now we can apply a function in a list of Tensors
"""
def __init__(self, x=None, edge_index=None, edge_attr=None, y=None,
pos=None, norm=None, face=None, **kwargs):
Data.__init__(self, x=x, edge_index=edge_index,
edge_attr=edge_attr,
y=y, pos=pos,
norm=norm, face=face,
**kwargs)
def apply(self, func, *keys):
for key, item in self(*keys):
if(torch.is_tensor(item)):
self[key] = func(item)
if(isinstance(item, list)):
for i in range(len(item)):
if(torch.is_tensor(item[i])):
self[key][i] = func(item[i])
return self
class MultiScaleBatch(MultiScaleData, Batch):
def __init__(self, batch=None, **kwargs):
MultiScaleData.__init__(self, **kwargs)
self.batch = batch
self.__data_class__ = Data
self.__slices__ = None
@staticmethod
def from_data_list(data_list, follow_batch=[]):
r"""Constructs a batch object from a python list holding
:class:`torch_geometric.data.Data` objects.
The assignment vector :obj:`batch` is created on the fly.
Additionally, creates assignment batch vectors for each key in
:obj:`follow_batch`."""
keys = [set(data.keys) for data in data_list]
keys = list(set.union(*keys))
assert 'batch' not in keys
batch = MultiScaleBatch()
batch.__data_class__ = data_list[0].__class__
batch.__slices__ = {key: [0] for key in keys}
for key in keys:
batch[key] = []
for key in follow_batch:
batch['{}_batch'.format(key)] = []
cumsum = {key: 0 for key in keys}
cumsum4list = {key: [] for key in keys}
batch.batch = []
batch.list_batch = []
for i, data in enumerate(data_list):
for key in data.keys:
item = data[key]
if torch.is_tensor(item) and item.dtype != torch.bool:
item = item + cumsum[key]
if torch.is_tensor(item):
size = item.size(data.__cat_dim__(key, data[key]))
# Here particular case for this kind of list of tensors
# process the neighbors
if bool(re.search('(neigh|pool|upsample)', key)):
if isinstance(item, list):
if(len(cumsum4list[key]) == 0): # for the first time
cumsum4list[key] = torch.zeros(len(item), dtype=torch.long)
for j in range(len(item)):
if(torch.is_tensor(item[j]) and item[j].dtype != torch.bool):
item[j][item[j] > 0] += cumsum4list[key][j]
# print(key, data["{}_size".format(key)][j])
cumsum4list[key][j] += data["{}_size".format(key)][j]
else:
size = 1
batch.__slices__[key].append(size + batch.__slices__[key][-1])
cumsum[key] += data.__inc__(key, item)
batch[key].append(item)
if key in follow_batch:
item = torch.full((size, ), i, dtype=torch.long)
batch['{}_batch'.format(key)].append(item)
num_nodes = data.num_nodes
if num_nodes is not None:
item = torch.full((num_nodes, ), i, dtype=torch.long)
batch.batch.append(item)
# indice of the batch at each scale
if(data.points is not None):
list_batch = []
for j in range(len(data['points'])):
size = len(data.points[j])
item = torch.full((size, ), i, dtype=torch.long)
list_batch.append(item)
batch.list_batch.append(list_batch)
if num_nodes is None:
batch.batch = None
for key in batch.keys:
item = batch[key][0]
if torch.is_tensor(item):
batch[key] = torch.cat(batch[key],
dim=data_list[0].__cat_dim__(key, item))
elif isinstance(item, int) or isinstance(item, float):
batch[key] = torch.tensor(batch[key])
elif isinstance(item, list):
item = batch[key]
res = []
for j in range(len(item[0])):
col = [f[j] for f in batch[key]]
res.append(torch.cat(col,
dim=data_list[0].__cat_dim__(key, col)))
batch[key] = res
# print('item', item)
else:
raise ValueError('{} is an Unsupported attribute type'.format(type(item)))
# Copy custom data functions to batch (does not work yet):
# if data_list.__class__ != Data:
# org_funcs = set(Data.__dict__.keys())
# funcs = set(data_list[0].__class__.__dict__.keys())
# batch.__custom_funcs__ = funcs.difference(org_funcs)
# for func in funcs.difference(org_funcs):
# setattr(batch, func, getattr(data_list[0], func))
if torch_geometric.is_debug_enabled():
batch.debug()
return batch.contiguous()
| [
"torch.full",
"torch.is_tensor",
"torch_geometric.is_debug_enabled",
"torch_geometric.data.Data.__init__",
"torch.tensor",
"re.search"
] | [((1507, 1625), 'torch_geometric.data.Data.__init__', 'Data.__init__', (['self'], {'x': 'x', 'edge_index': 'edge_index', 'edge_attr': 'edge_attr', 'y': 'y', 'pos': 'pos', 'norm': 'norm', 'face': 'face'}), '(self, x=x, edge_index=edge_index, edge_attr=edge_attr, y=y,\n pos=pos, norm=norm, face=face, **kwargs)\n', (1520, 1625), False, 'from torch_geometric.data import Data, Batch\n'), ((6661, 6695), 'torch_geometric.is_debug_enabled', 'torch_geometric.is_debug_enabled', ([], {}), '()\n', (6693, 6695), False, 'import torch_geometric\n'), ((1798, 1819), 'torch.is_tensor', 'torch.is_tensor', (['item'], {}), '(item)\n', (1813, 1819), False, 'import torch\n'), ((5447, 5468), 'torch.is_tensor', 'torch.is_tensor', (['item'], {}), '(item)\n', (5462, 5468), False, 'import torch\n'), ((3462, 3483), 'torch.is_tensor', 'torch.is_tensor', (['item'], {}), '(item)\n', (3477, 3483), False, 'import torch\n'), ((4827, 4872), 'torch.full', 'torch.full', (['(num_nodes,)', 'i'], {'dtype': 'torch.long'}), '((num_nodes,), i, dtype=torch.long)\n', (4837, 4872), False, 'import torch\n'), ((1967, 1991), 'torch.is_tensor', 'torch.is_tensor', (['item[i]'], {}), '(item[i])\n', (1982, 1991), False, 'import torch\n'), ((3345, 3366), 'torch.is_tensor', 'torch.is_tensor', (['item'], {}), '(item)\n', (3360, 3366), False, 'import torch\n'), ((3693, 3732), 're.search', 're.search', (['"""(neigh|pool|upsample)"""', 'key'], {}), "('(neigh|pool|upsample)', key)\n", (3702, 3732), False, 'import re\n'), ((4621, 4661), 'torch.full', 'torch.full', (['(size,)', 'i'], {'dtype': 'torch.long'}), '((size,), i, dtype=torch.long)\n', (4631, 4661), False, 'import torch\n'), ((5164, 5204), 'torch.full', 'torch.full', (['(size,)', 'i'], {'dtype': 'torch.long'}), '((size,), i, dtype=torch.long)\n', (5174, 5204), False, 'import torch\n'), ((5697, 5721), 'torch.tensor', 'torch.tensor', (['batch[key]'], {}), '(batch[key])\n', (5709, 5721), False, 'import torch\n'), ((4029, 4053), 'torch.is_tensor', 'torch.is_tensor', (['item[j]'], {}), '(item[j])\n', (4044, 4053), False, 'import torch\n')] |
"""
This script adds a specific column to the `bug_typedata_projectname_ss` tables. The added column contains the nesting depth (>=0) of each line. (BT stands for bug_type)
"""
import os, sys, psycopg2, ntpath, traceback, subprocess
from pprint import pprint
#--------------------------------------------------------------------------------------------------------------------------
def get_BT_data(project_name):
BT_ss_table_name = "err_corr_c.bug_typedata_" + project_name + "_ss"
BT_ss_table_name = BT_ss_table_name.replace('-', '_')
BT_data = []
try:
con = psycopg2.connect(database='saheel', user='saheel')
cur = con.cursor()
cur.execute("SELECT file_name, sha, line_num, parents_all FROM " + BT_ss_table_name)
BT_data = list(cur.fetchall())
except Exception as e:
print(traceback.print_exc())
print(str(e))
raise e
if con:
con.close()
# Make it a list of lists instead of list of tuples
for index, BT_tuple in enumerate(BT_data):
BT_data[index] = list(BT_tuple)
return BT_data
#--------------------------------------------------------------------------------------------------------------------------
def dump_BT_prime_table(BT_data, project_name):
BT_prime_table_name = "err_corr_c.BT_prime_" + project_name
BT_prime_table_name = BT_prime_table_name.replace('-', '_')
try:
con = psycopg2.connect(database='saheel', user='saheel')
cur = con.cursor()
cur.execute("DROP TABLE IF EXISTS " + BT_prime_table_name + " ")
query = """
CREATE TABLE """ + BT_prime_table_name + """ (file_name text,
sha varchar(42),
line_num integer,
parents_all text,
depth integer)
"""
cur.execute(query)
query = "INSERT INTO " + BT_prime_table_name + " (file_name, sha, line_num, parents_all, depth) VALUES (%s, %s, %s, %s, %s)"
cur.executemany(query, BT_data)
con.commit()
except Exception as e:
print(traceback.print_exc())
print(str(e))
raise e
if con:
con.close()
#--------------------------------------------------------------------------------------------------------------------------
def join_BT_ss_and_BT_prime(project_name):
BT_ss_table_name = "err_corr_c.bug_typedata_" + project_name + "_ss"
BT_ss_table_name = BT_ss_table_name.replace('-', '_')
BT_prime_table_name = "err_corr_c.BT_prime_" + project_name
BT_prime_table_name = BT_prime_table_name.replace('-', '_')
BT_merged_table_name = "err_corr_c.bug_typedata_" + project_name + "_ss_wd"
BT_merged_table_name = BT_merged_table_name.replace('-', '_')
try:
con = psycopg2.connect(database='saheel', user='saheel')
cur = con.cursor()
cur.execute("ALTER TABLE " + BT_ss_table_name + " DROP COLUMN IF EXISTS depth")
query = """
SELECT ss.*, prime.depth
INTO """ + BT_merged_table_name + """
FROM """ + BT_ss_table_name + """ as ss
JOIN """ + BT_prime_table_name + """ as prime
ON (ss.file_name = prime.file_name AND
ss.sha = prime.sha AND
ss.line_num = prime.line_num)
"""
cur.execute(query)
con.commit()
cur.execute("DROP TABLE " + BT_prime_table_name)
cur.execute("DROP TABLE " + BT_ss_table_name)
cur.execute("ALTER TABLE " + BT_merged_table_name + " RENAME TO " + BT_ss_table_name.split('.')[1])
con.commit()
except Exception as e:
print(traceback.print_exc())
print(str(e))
raise e
if con:
con.close()
#--------------------------------------------------------------------------------------------------------------------------
if __name__ == "__main__":
if len(sys.argv) != 2:
print("\nUsage: python add_depth_to_BT_table.py <project_name>")
print("\nSample usage: python add_depth_to_BT_table.py libgit2")
raise ValueError("Incorrect input arguments. Aborting...")
project_name = sys.argv[1]
# depth_dict = get_depth_data(project_name)
# if not depth_dict:
# raise ValueError("`get_depth_data` returned an empty `depth_dict` dictionary. Aborting...")
print("\nNow fetching BT_ss_data...")
# BT_data is a list of lists; each element list = [file_name, sha, line_num, parents_all]
BT_data = get_BT_data(project_name)
if not BT_data:
raise ValueError("`get_BT_data` returned an empty `BT_data` list. Aborting...")
print("\nNow creating BT_prime_data, i.e., table with `depth` appended to BT_ss_data...")
# We will add `depth` attribute to each row in BT_data
error_count = 0
for index, BT_tuple in enumerate(BT_data):
# `depth` = number of parents as given in `parents_all` column of BT table
depth = BT_tuple[3].count('-') + 1
if BT_tuple[3] == '':
BT_data[index].append(0)
else:
BT_data[index].append(depth)
print("\nNow dumping the temporary table BT_prime. This may take approx. 3-4 min per million LOC...")
dump_BT_prime_table(BT_data, project_name)
print("\nNow joining BT_ss and BT_prime to get desired table. This takes about 2 min per million LOC...")
join_BT_ss_and_BT_prime(project_name)
#--------------------------------------------------------------------------------------------------------------------------
| [
"psycopg2.connect",
"traceback.print_exc"
] | [((587, 637), 'psycopg2.connect', 'psycopg2.connect', ([], {'database': '"""saheel"""', 'user': '"""saheel"""'}), "(database='saheel', user='saheel')\n", (603, 637), False, 'import os, sys, psycopg2, ntpath, traceback, subprocess\n'), ((1446, 1496), 'psycopg2.connect', 'psycopg2.connect', ([], {'database': '"""saheel"""', 'user': '"""saheel"""'}), "(database='saheel', user='saheel')\n", (1462, 1496), False, 'import os, sys, psycopg2, ntpath, traceback, subprocess\n'), ((2999, 3049), 'psycopg2.connect', 'psycopg2.connect', ([], {'database': '"""saheel"""', 'user': '"""saheel"""'}), "(database='saheel', user='saheel')\n", (3015, 3049), False, 'import os, sys, psycopg2, ntpath, traceback, subprocess\n'), ((848, 869), 'traceback.print_exc', 'traceback.print_exc', ([], {}), '()\n', (867, 869), False, 'import os, sys, psycopg2, ntpath, traceback, subprocess\n'), ((2306, 2327), 'traceback.print_exc', 'traceback.print_exc', ([], {}), '()\n', (2325, 2327), False, 'import os, sys, psycopg2, ntpath, traceback, subprocess\n'), ((3942, 3963), 'traceback.print_exc', 'traceback.print_exc', ([], {}), '()\n', (3961, 3963), False, 'import os, sys, psycopg2, ntpath, traceback, subprocess\n')] |
""" Cisco_IOS_XR_ipv6_nd_subscriber_cfg
This module contains a collection of YANG definitions
for Cisco IOS\-XR ipv6\-nd\-subscriber package configuration.
This YANG module augments the
Cisco\-IOS\-XR\-subscriber\-infra\-tmplmgr\-cfg
module with configuration data.
Copyright (c) 2013\-2018 by Cisco Systems, Inc.
All rights reserved.
"""
from collections import OrderedDict
from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64
from ydk.filters import YFilter
from ydk.errors import YError, YModelError
from ydk.errors.error_handler import handle_type_error as _handle_type_error
class Ipv6NdRouterPrefTemplate(Enum):
"""
Ipv6NdRouterPrefTemplate (Enum Class)
Ipv6 nd router pref template
.. data:: high = 1
High preference
.. data:: medium = 2
Medium preference
.. data:: low = 3
Low preference
"""
high = Enum.YLeaf(1, "high")
medium = Enum.YLeaf(2, "medium")
low = Enum.YLeaf(3, "low")
| [
"ydk.types.Enum.YLeaf"
] | [((948, 969), 'ydk.types.Enum.YLeaf', 'Enum.YLeaf', (['(1)', '"""high"""'], {}), "(1, 'high')\n", (958, 969), False, 'from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64\n'), ((984, 1007), 'ydk.types.Enum.YLeaf', 'Enum.YLeaf', (['(2)', '"""medium"""'], {}), "(2, 'medium')\n", (994, 1007), False, 'from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64\n'), ((1019, 1039), 'ydk.types.Enum.YLeaf', 'Enum.YLeaf', (['(3)', '"""low"""'], {}), "(3, 'low')\n", (1029, 1039), False, 'from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64\n')] |
from django.contrib import admin
from .models import Job
from .models import JobCandidate
class JobAdmin(admin.ModelAdmin):
ordering = ['date_created']
search_fields = ['title']
admin.site.register(Job, JobAdmin)
admin.site.register(JobCandidate)
| [
"django.contrib.admin.site.register"
] | [((190, 224), 'django.contrib.admin.site.register', 'admin.site.register', (['Job', 'JobAdmin'], {}), '(Job, JobAdmin)\n', (209, 224), False, 'from django.contrib import admin\n'), ((225, 258), 'django.contrib.admin.site.register', 'admin.site.register', (['JobCandidate'], {}), '(JobCandidate)\n', (244, 258), False, 'from django.contrib import admin\n')] |
"""Module providing functions to build an init script
The :func:`mkinit` function will generate a ``/init`` script.
``do_foo()`` functions write a string performing the foo action into a
stream. This stream should be the init script.
``_fun_foo()`` functions write a string defining the foo function into a
stream. This stream should be the init script.
Helper functions available in the init script that can be used by
:class:`cmkinitramfs.data.Data` classes:
- :func:`_fun_die`: Fatal error handler (does not return).
- :func:`_fun_log`: Logging functions (always successful).
Special helper function available in the init script:
:func:`_fun_rescue_shell`, :func:`_fun_panic`.
Init environment variables:
- ``HOME``
- ``PATH``
Init global variables:
- ``PRINTK``: Initial kernel log level.
- ``INIT``: Program to run as init process after the initramfs.
- ``RD_XXX``: If set, feature *XXX* is enabled.
- ``RD_BREAK_XXX``: If set, breakpoint *XXX* is enabled.
The init script should never rely on global variables being set or unset,
it should always assign a default value if not set.
"""
from __future__ import annotations
import itertools
import locale
from enum import Enum, auto
from shlex import quote
from typing import Iterable, IO, Mapping, Optional
from .data import Data
#: Global Busybox applet dependencies
BUSYBOX_COMMON_DEPS = {
'[', 'cat', 'cut', 'echo', 'env', 'exec', 'exit', 'export', 'kill',
'mount', 'return', 'set', 'shift', 'switch_root', 'sync', 'test',
'umount', 'uname',
}
#: Keymap loading Busybox applet dependencies
BUSYBOX_KEYMAP_DEPS = {'loadkmap', 'kbd_mode'}
#: Kernel module loading Busybox applet dependencies
BUSYBOX_KMOD_DEPS = {'depmod', 'modprobe'}
#: Get a quoted TAB character
TAB = '"$(printf \'\\t\')"'
#: Get a quoted EOL character
EOL = '"$(printf \'\\n\\b\')"'
class Breakpoint(Enum):
"""Breakpoint in the boot process
Breakpoints can be enabled by adding rd.break to the kernel command-line
(e.g. ``./kernel.img foo rd.break=init``).
Setting ``rd.break=foo,bar`` will enable both ``foo`` and ``bar``.
Environment variables can also be set to enable them
(e.g. ``./kernel.img foo RD_BREAK_EARLY=true``).
"""
#: Early break: break before any action, including command-line parsing.
#: Can be set with the ``RD_BREAK_EARLY`` environment variable.
EARLY = auto()
#: ``init``: Break after initramfs initialization.
#: Can also be set with the ``RD_BREAK_INIT`` environment variable.
INIT = auto()
#: ``module``: Break after loading kernel modules.
#: Can also be set with the ``RD_BREAK_MODULE`` environment variable.
#: Alias: ``modules``.
MODULE = auto()
#: ``rootfs``: Break after mounting the root filesystem.
#: Can also be set with the ``RD_BREAK_ROOTFS`` environment variable.
ROOTFS = auto()
#: ``mount``: Break after mounting all filesystems.
#: Can also be set with the ``RD_BREAK_MOUNT`` environment variable.
#: Alias: ``mounts``.
MOUNT = auto()
def _fun_rescue_shell(out: IO[str]) -> None:
"""Define the rescue_shell function
``rescue_shell`` drop the user to ``/bin/sh``.
Arguments: none.
This function *should not* be called from a subshell.
This function *does not* return.
:param out: Stream to write into
"""
out.writelines((
"rescue_shell()\n",
"{\n",
"\tlog 0 'Dropping into a shell'\n",
"\texec /bin/sh 0<>/dev/console 1<>/dev/console 2<>/dev/console\n",
"\temerg 'Failed to start rescue shell'\n",
"\tpanic\n"
"}\n\n",
))
def _fun_panic(out: IO[str]) -> None:
"""Define the panic function
``panic`` causes a kernel panic by exiting ``/init``.
Arguments: none.
This function *should not* be called from a subshell.
This function *does not* return.
:param out: Stream to write into
"""
out.writelines((
"panic()\n",
"{\n",
"\tlog 0 'Terminating init'\n",
"\tsync\n",
"\texit\n",
"}\n\n",
))
def _fun_die(out: IO[str]) -> None:
"""Define the die function
``die`` will either start a rescue shell or cause a kernel panic,
wether ``RD_PANIC`` is set or not.
Arguments: error message.
This function *should not* be called from a subshell.
This function *does not* return.
:param out: Stream to write into
"""
out.writelines((
"die()\n",
"{\n",
"\temerg \"$@\"\n",
"\tkill -TERM -1 || err \'Failed to kill all processes\'\n",
"\t[ -n \"${RD_PANIC+x}\" ] && panic || rescue_shell\n",
"}\n\n",
))
def _fun_log(out: IO[str]) -> None:
"""Define the logging functions
``log``: log a message.
- Argument 1: syslog level number, from 0 to 7.
- Additionnal arguments: message to log.
Logs printed to stderr:
- Level ≤ 4: always
- 5 ≤ level ≤ 6: if debug enabled or quiet disabled
- Level = 7: if debug enabled
Helper functions:
- ``emerg``: log a message for a panic condition.
The message is prepended by 'FATAL:'.
- ``alert``: log a critical error message requiring immediate action.
The message is prepended by 'ERROR:'.
- ``crit``: log a critical error message.
The message is prepended by 'ERROR:'.
- ``err``: log an error message.
The message is prepended by 'ERROR:'.
- ``warn``: log a warning message.
The message is prepended by 'WARNING:'.
- ``notice``: log a significant/unusual informational message.
- ``info``: log an informational message.
- ``debug``: log a debug-level message.
Helper functions will call ``log`` with the coresponding syslog level.
Logging functions always return successfully.
:param out: Stream to write into
"""
out.writelines((
'log()\n',
'{\n',
'\t[ "${1-}" -lt 8 ] && lvl="$1" && shift || lvl=1\n',
'\t[ $# -ge 1 ] || return 0\n',
'\techo "<$((24 | lvl))>initramfs:" "$@" 1>/dev/kmsg\n',
'\tif [ "${lvl}" -eq 5 ] || [ "${lvl}" -eq 6 ] ',
'&& [ -z "${RD_QUIET+x}" ] || [ -n "${RD_DEBUG+x}" ] ',
'|| [ "${lvl}" -le 4 ]\n',
'\tthen echo "$@" 1>&2\n',
'\tfi\n',
'\treturn 0\n',
'}\n',
'\n',
'emerg() { log 0 \'FATAL:\' "$@" ; }\n',
'alert() { log 1 \'ERROR:\' "$@" ; }\n',
'crit() { log 2 \'ERROR:\' "$@" ; }\n',
'err() { log 3 \'ERROR:\' "$@" ; }\n',
'warn() { log 4 \'ERROR:\' "$@" ; }\n',
'notice() { log 5 "$@" ; }\n',
'info() { log 6 "$@" ; }\n',
'debug() { log 7 "$@" ; }\n',
'\n',
))
def do_header(out: IO[str], home: str = '/root', path: str = '/bin:/sbin') \
-> None:
"""Create the /init header
- Create the shebang ``/bin/sh``
- Configure environment variables
- Define global functions (``panic``, logging, ...)
:param out: Stream to write into
:param home: ``HOME`` environment variable
:param path: ``PATH`` environment variable
"""
out.writelines((
"#!/bin/sh\n",
"\n",
f"HOME={quote(home)}\n",
"export HOME\n",
f"PATH={quote(path)}\n",
"export PATH\n",
"\n",
))
_fun_rescue_shell(out)
_fun_panic(out)
_fun_die(out)
_fun_log(out)
def do_init(out: IO[str]) -> None:
"""Initialize the init environment
- Check the current PID is 1
- Mount ``/proc``, ``/sys``, ``/dev``
- Set the kernel log level to 4 (``KERN_ERR`` and higher priority)
:param out: Stream to write into
"""
out.writelines((
"debug 'Initialization'\n",
"test $$ -eq 1 || die 'init expects to be run as PID 1'\n",
"mount -t proc none /proc || die 'Failed to mount /proc'\n",
"mount -t sysfs none /sys || die 'Failed to mount /sys'\n",
"mount -t devtmpfs none /dev || die 'Failed to mount /dev'\n",
'PRINTK="$(cut -d', TAB, ' -f1 -s /proc/sys/kernel/printk)"\n',
"echo 4 1>/proc/sys/kernel/printk || ",
'err \'Failed to set kernel log level to 4\'\n'
'[ ! -d "/lib/modules/$(uname -r)" ] || depmod || ',
'warn \'Failed to generate modules.dep\'\n',
"\n",
))
def do_cmdline(out: IO[str]) -> None:
"""Parse the kernel command line for known parameters
Note: the command line is parsed up to "--", arguments after this
are passed through to the final init process.
Parsed parameters:
- ``init=<path to init>``: Set the program to run as init process
after the initramfs.
- ``debug``: Enable debugging, see ``rd.debug``.
- ``quiet``: Enable quiet mode, see ``rd.quiet``.
- ``rd.break={init|rootfs|mount}``: Stops the boot process,
defaults to ``rootfs``. See :class:`Breakpoint`.
- ``rd.debug``: Enable debugging mode: output verbose informations.
If quiet mode is disabled, enable shell trace (with ``set -x``).
- ``rd.panic``: On fatal error: cause a kernel panic rather than
dropping into a shell.
- ``rd.quiet``: Enable quiet mode: reduce verbosity.
:param out: Stream to write into
"""
out.writelines((
"debug 'Parsing command-line'\n",
"for cmdline in $(cat /proc/cmdline); do\n",
"\tcase \"${cmdline}\" in\n",
"\t--) break ;;\n",
"\tinit=*) INIT=\"${cmdline#*=}\" ;;\n"
"\tdebug) RD_DEBUG=true ;;\n",
"\tquiet) RD_QUIET=true ;;\n",
"\trd.break) RD_BREAK_ROOTFS=true ;;\n",
"\trd.break=*)\n",
"\t\told_ifs=\"${IFS}\"\n",
"\t\tIFS=','\n",
"\t\tfor bpoint in ${cmdline#*=}; do\n",
"\t\t\tcase \"${bpoint}\" in\n",
"\t\t\tinit) RD_BREAK_INIT=true ;;\n",
"\t\t\tmodule|modules) RD_BREAK_MODULE=true ;;\n",
"\t\t\trootfs) RD_BREAK_ROOTFS=true ;;\n",
"\t\t\tmount|mounts) RD_BREAK_MOUNT=true ;;\n",
"\t\t\t*) err \"Unknown breakpoint ${bpoint}\" ;;\n",
"\t\t\tesac\n",
"\t\tdone\n",
"\t\tIFS=\"${old_ifs}\"\n",
"\t\t;;\n",
"\trd.debug) RD_DEBUG=true ;;\n",
"\trd.panic) RD_PANIC=true ;;\n",
"\trd.quiet) RD_QUIET=true ;;\n",
"\t*) unknown_cmd=\"${unknown_cmd-}${unknown_cmd+ }${cmdline}\" ;;\n",
"\tesac\n",
"done\n",
"\n",
"[ -n \"${RD_DEBUG+x}\" ] && [ -z \"${RD_QUIET+x}\" ] ",
"&& PS4='+ $0:$LINENO: ' && set -x\n",
"[ -n \"${unknown_cmd+x}\" ] ",
"&& debug \"Skipped unknown cmdlines: ${unknown_cmd}\"\n",
"unset unknown_cmd\n",
"[ -n \"${RD_DEBUG+x}\" ] || exec 1<>/dev/null || ",
"err 'Failed to redirect stdout to /dev/null'\n",
"\n",
))
def do_keymap(out: IO[str], keymap_file: str, unicode: bool = True) -> None:
"""Load a keymap
:param out: Stream to write into
:param keymap_file: Absolute path of the file to load
:param unicode: Set the keyboard in unicode mode (rather than ASCII)
"""
mode = 'unicode' if unicode else 'ASCII'
out.writelines((
"info 'Loading keymap'\n",
f"[ -f {quote(keymap_file)} ] || err ",
quote(f'Keymap file {keymap_file} not found'), '\n',
f"kbd_mode {'-u' if unicode else '-a'} || crit ",
quote(f'Failed to set keyboard mode to {mode}'), '\n',
f"loadkmap <{quote(keymap_file)} || crit ",
quote(f'Failed to load keymap {keymap_file}'), '\n',
"\n",
))
def do_module(out: IO[str], module: str, *args: str) -> None:
"""Load a kernel module
:param out: Stream to write into
:param module: Name of the module to load
:param args: Arguments for the module (passed to ``modprobe``)
"""
quoted_args = (f'{quote(arg)} ' for arg in args)
search = quote(f'/{module.replace("_", "-")}\\.ko')
out.writelines((
'if cat "/lib/modules/$(uname -r)/modules.builtin" 2>/dev/null | '
f'tr _ - | grep -q {search}; then\n',
f"\tinfo 'Loading kernel module {module}'\n",
f"\tmodprobe {quote(module)} ", *quoted_args, '|| crit ',
quote(f'Failed to load module {module}'), '\n',
'fi\n',
'\n',
))
def do_break(out: IO[str], breakpoint_: Breakpoint,
scripts: Iterable[str] = ()) -> None:
"""Drop into a shell if rd.break is set
:param out: Stream to write into
:param breakpoint_: Which breakpoint to check
:param scripts: User commands to run before the breakpoint
"""
if breakpoint_ is Breakpoint.EARLY:
breakname = 'RD_BREAK_EARLY'
elif breakpoint_ is Breakpoint.INIT:
breakname = 'RD_BREAK_INIT'
elif breakpoint_ is Breakpoint.MODULE:
breakname = 'RD_BREAK_MODULE'
elif breakpoint_ is Breakpoint.ROOTFS:
breakname = 'RD_BREAK_ROOTFS'
elif breakpoint_ is Breakpoint.MOUNT:
breakname = 'RD_BREAK_MOUNT'
else:
raise ValueError(f"Unknown breakpoint: {breakpoint_}")
if scripts:
out.write(f"info 'Running user scripts for {breakpoint_}'\n")
for script in scripts:
out.writelines((script, "\n"))
out.write("\n")
out.writelines((
"[ -n \"${", breakname, "+x}\" ] && notice ",
quote(f"Reached {breakpoint_}"), " && rescue_shell\n"
"\n",
))
def do_switch_root(out: IO[str], newroot: Data, init: str = '/sbin/init') \
-> None:
"""Cleanup and switch root
- Print debugging information
- Restore kernel log level (set to boot-time default if not possible)
- Kill all processes
- Unmount ``/dev``, ``/sys``, ``/proc``
- Switch root
:param out: Stream to write into
:param newroot: Data to use as new root
:param init: Init process to execute from the new root
"""
out.writelines((
'[ -z "${INIT+x}" ] && INIT=', quote(init), '\n',
'info "Run ${INIT} as init process"\n',
'debug \' with arguments:\'\n',
'for arg in "${INIT}" "$@"; do debug " ${arg}"; done\n',
'debug \' with environment:\'\n',
'old_ifs="${IFS}"\n',
'IFS=', EOL, '\n',
'for var in $(env); do debug " ${var}"; done\n',
'IFS="${old_ifs}"\n',
'\n',
'[ -z "${PRINTK+x}" ] && PRINTK=',
'"$(cut -d', TAB, ' -f4 -s /proc/sys/kernel/printk)"\n',
'echo "${PRINTK}" 1>/proc/sys/kernel/printk || ',
'err "Failed to restore kernel log level to ${PRINTK}"\n',
'exec 0<>/dev/console 1<>/dev/console 2<>/dev/console || ',
'err \'Failed to restore input/output to console\'\n',
'kill -TERM -1 || err \'Failed to kill all processes\'\n',
"umount -l /dev || err 'Failed to unmount /dev'\n",
"umount -l /proc || err 'Failed to unmount /proc'\n",
"umount -l /sys || err 'Failed to unmount /sys'\n",
'exec switch_root ', newroot.path(), ' "${INIT}" "$@"\n',
"die 'Failed to switch root'\n",
"\n",
))
def mkinit(
out: IO[str],
root: Data,
mounts: Iterable[Data] = (),
keymap: Optional[str] = None,
modules: Optional[Mapping[str, Iterable[str]]] = None,
scripts: Optional[Mapping[Breakpoint, Iterable[str]]] = None,
) -> None: # noqa: E123
"""Create the init script
:param out: Stream to write into
:param root: :class:`Data` to use as rootfs
:param mounts: :class:`Data` needed in addition of rootfs
:param keymap: Path of the keymap to load, :data:`None` means no keymap
:param modules: Kernel modules to be loaded in the initramfs:
``{module: (arg, ...)}``. ``module`` is the module name string,
and ``(arg, ...)``` is the iterable with the module parameters.
:param scripts: User commands to run. ``{breakpoint: commands}``:
``breakpoint`` is the :class:`Breakpoint` where the commands will
be run. ``commands`` is the iterable with the commands.
"""
datatypes = set()
for data in itertools.chain((root,), mounts):
datatypes.add(type(data))
for dep in data.iter_all_deps():
datatypes.add(type(dep))
if modules is None:
modules = {}
if scripts is None:
scripts = {}
do_header(out)
do_break(out, Breakpoint.EARLY, scripts.get(Breakpoint.EARLY, ()))
do_init(out)
do_cmdline(out)
if keymap is not None:
do_keymap(out, keymap,
unicode=(locale.getdefaultlocale()[1] == 'UTF-8'))
for datatype in datatypes:
datatype.initialize(out)
do_break(out, Breakpoint.INIT, scripts.get(Breakpoint.INIT, ()))
for mod_name, mod_args in modules.items():
do_module(out, mod_name, *mod_args)
do_break(out, Breakpoint.MODULE, scripts.get(Breakpoint.MODULE, ()))
root.load(out)
do_break(out, Breakpoint.ROOTFS, scripts.get(Breakpoint.ROOTFS, ()))
for mount in mounts:
mount.load(out)
do_break(out, Breakpoint.MOUNT, scripts.get(Breakpoint.MOUNT, ()))
do_switch_root(out, root)
| [
"itertools.chain",
"locale.getdefaultlocale",
"enum.auto",
"shlex.quote"
] | [((2381, 2387), 'enum.auto', 'auto', ([], {}), '()\n', (2385, 2387), False, 'from enum import Enum, auto\n'), ((2526, 2532), 'enum.auto', 'auto', ([], {}), '()\n', (2530, 2532), False, 'from enum import Enum, auto\n'), ((2702, 2708), 'enum.auto', 'auto', ([], {}), '()\n', (2706, 2708), False, 'from enum import Enum, auto\n'), ((2857, 2863), 'enum.auto', 'auto', ([], {}), '()\n', (2861, 2863), False, 'from enum import Enum, auto\n'), ((3031, 3037), 'enum.auto', 'auto', ([], {}), '()\n', (3035, 3037), False, 'from enum import Enum, auto\n'), ((16058, 16090), 'itertools.chain', 'itertools.chain', (['(root,)', 'mounts'], {}), '((root,), mounts)\n', (16073, 16090), False, 'import itertools\n'), ((11237, 11282), 'shlex.quote', 'quote', (['f"""Keymap file {keymap_file} not found"""'], {}), "(f'Keymap file {keymap_file} not found')\n", (11242, 11282), False, 'from shlex import quote\n'), ((11356, 11403), 'shlex.quote', 'quote', (['f"""Failed to set keyboard mode to {mode}"""'], {}), "(f'Failed to set keyboard mode to {mode}')\n", (11361, 11403), False, 'from shlex import quote\n'), ((11471, 11516), 'shlex.quote', 'quote', (['f"""Failed to load keymap {keymap_file}"""'], {}), "(f'Failed to load keymap {keymap_file}')\n", (11476, 11516), False, 'from shlex import quote\n'), ((12176, 12216), 'shlex.quote', 'quote', (['f"""Failed to load module {module}"""'], {}), "(f'Failed to load module {module}')\n", (12181, 12216), False, 'from shlex import quote\n'), ((13305, 13336), 'shlex.quote', 'quote', (['f"""Reached {breakpoint_}"""'], {}), "(f'Reached {breakpoint_}')\n", (13310, 13336), False, 'from shlex import quote\n'), ((13921, 13932), 'shlex.quote', 'quote', (['init'], {}), '(init)\n', (13926, 13932), False, 'from shlex import quote\n'), ((11818, 11828), 'shlex.quote', 'quote', (['arg'], {}), '(arg)\n', (11823, 11828), False, 'from shlex import quote\n'), ((7202, 7213), 'shlex.quote', 'quote', (['home'], {}), '(home)\n', (7207, 7213), False, 'from shlex import quote\n'), ((7260, 7271), 'shlex.quote', 'quote', (['path'], {}), '(path)\n', (7265, 7271), False, 'from shlex import quote\n'), ((11197, 11215), 'shlex.quote', 'quote', (['keymap_file'], {}), '(keymap_file)\n', (11202, 11215), False, 'from shlex import quote\n'), ((11432, 11450), 'shlex.quote', 'quote', (['keymap_file'], {}), '(keymap_file)\n', (11437, 11450), False, 'from shlex import quote\n'), ((12124, 12137), 'shlex.quote', 'quote', (['module'], {}), '(module)\n', (12129, 12137), False, 'from shlex import quote\n'), ((16507, 16532), 'locale.getdefaultlocale', 'locale.getdefaultlocale', ([], {}), '()\n', (16530, 16532), False, 'import locale\n')] |
import datetime
import utils
import glob
import os
import numpy as np
import pandas as pd
if __name__ == '__main__':
loaddir = "E:/Data/h5/"
labels = ['https', 'netflix']
max_packet_length = 1514
for label in labels:
print("Starting label: " + label)
savedir = loaddir + label + "/"
now = datetime.datetime.now()
savename = "payload_%s-%.2d%.2d_%.2d%.2d" % (label, now.day, now.month, now.hour, now.minute)
filelist = glob.glob(loaddir + label + '*.h5')
# Try only one of each file
fullname = filelist[0]
# for fullname in filelist:
load_dir, filename = os.path.split(fullname)
print("Loading: {0}".format(filename))
df = utils.load_h5(load_dir, filename)
packets = df['bytes'].values
payloads = []
labels = []
filenames = []
for packet in packets:
if len(packet) == max_packet_length:
# Extract the payload from the packet should have length 1460
payload = packet[54:]
p = np.fromstring(payload, dtype=np.uint8)
payloads.append(p)
labels.append(label)
filenames.append(filename)
d = {'filename': filenames, 'bytes': payloads, 'label': labels}
dataframe = pd.DataFrame(data=d)
key = savename.split('-')[0]
dataframe.to_hdf(savedir + savename + '.h5', key=key, mode='w')
# utils.saveextractedheaders(loaddir, savedir, savename, num_headers=headersize)
print("Done with label: " + label)
| [
"os.path.split",
"datetime.datetime.now",
"pandas.DataFrame",
"numpy.fromstring",
"utils.load_h5",
"glob.glob"
] | [((331, 354), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (352, 354), False, 'import datetime\n'), ((476, 511), 'glob.glob', 'glob.glob', (["(loaddir + label + '*.h5')"], {}), "(loaddir + label + '*.h5')\n", (485, 511), False, 'import glob\n'), ((644, 667), 'os.path.split', 'os.path.split', (['fullname'], {}), '(fullname)\n', (657, 667), False, 'import os\n'), ((728, 761), 'utils.load_h5', 'utils.load_h5', (['load_dir', 'filename'], {}), '(load_dir, filename)\n', (741, 761), False, 'import utils\n'), ((1326, 1346), 'pandas.DataFrame', 'pd.DataFrame', ([], {'data': 'd'}), '(data=d)\n', (1338, 1346), True, 'import pandas as pd\n'), ((1080, 1118), 'numpy.fromstring', 'np.fromstring', (['payload'], {'dtype': 'np.uint8'}), '(payload, dtype=np.uint8)\n', (1093, 1118), True, 'import numpy as np\n')] |
import logging
import os
import shutil
from os import path
import numpy as np
import nvtabular as nvt
import pandas as pd
from nvtabular import ops
import merlin.io
# Get dataframe library - cudf or pandas
from merlin.core.dispatch import get_lib
from merlin.core.utils import download_file
df_lib = get_lib()
logging.basicConfig()
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
def get_movielens(path=None, variant="ml-25m"):
"""Gets the movielens dataset for use with merlin-models
This function will return a tuple of train/test merlin.io.Dataset objects for the
movielens dataset. This will download the movielens dataset locally if needed,
and run a ETL pipeline with NVTabular to make this dataset ready for use with
merlin-models.
Parameters
----------
path : str
The path to download the files locally to. If not set will default to
the 'merlin-models-data` directory in your home folder
variant : "ml-25m" or "ml-100k"
Which variant of the movielens dataset to use. Must be either "ml-25m" or "ml-100k"
Returns
-------
tuple
A tuple consisting of a merlin.io.Dataset for the training dataset and validation dataset
"""
if path is None:
path = os.environ.get(
"INPUT_DATA_DIR", os.path.expanduser("~/merlin-models-data/movielens/")
)
variant_path = os.path.join(path, variant)
if not os.path.exists(variant_path):
os.makedirs(variant_path)
movielens_download_etl(path, variant)
train = merlin.io.Dataset(os.path.join(variant_path, "train"), engine="parquet")
valid = merlin.io.Dataset(os.path.join(variant_path, "valid"), engine="parquet")
return train, valid
def movielens_download_etl(local_filename, name="ml-25m", outputdir=None):
"""This funct does the preliminary preprocessing on movielens dataset
and converts the csv files to parquet files and saves to disk. Then,
using NVTabular, it does feature engineering on the parquet files
and saves the processed files to disk.
Parameters
----------
local_filename : str
path for downloading the raw dataset in and storing the converted
parquet files.
name : str
The name of the Movielens dataset. Currently Movielens 25M and
Movielens 100k datasets are supported.
outputdir : str, optional
path for saving the processed parquet files generated from NVTabular
workflow. If not provided, local_filename is used as outputdir.
"""
local_filename = os.path.abspath(local_filename)
if outputdir is None:
outputdir = local_filename
if name == "ml-25m":
download_file(
"http://files.grouplens.org/datasets/movielens/ml-25m.zip",
os.path.join(local_filename, "ml-25m.zip"),
)
movies = df_lib.read_csv(os.path.join(local_filename, name, "movies.csv"))
movies["genres"] = movies["genres"].str.split("|")
movies.to_parquet(os.path.join(local_filename, name, "movies_converted.parquet"))
ratings = df_lib.read_csv(os.path.join(local_filename, name, "ratings.csv"))
# shuffle the dataset
ratings = ratings.sample(len(ratings), replace=False)
# split the train_df as training and validation data sets.
num_valid = int(len(ratings) * 0.2)
train = ratings[:-num_valid]
valid = ratings[-num_valid:]
train.to_parquet(os.path.join(local_filename, name, "train.parquet"))
valid.to_parquet(os.path.join(local_filename, name, "valid.parquet"))
logger.info("starting ETL..")
# NVTabular pipeline
movies = df_lib.read_parquet(os.path.join(local_filename, name, "movies_converted.parquet"))
joined = ["userId", "movieId"] >> ops.JoinExternal(movies, on=["movieId"])
cat_features = joined >> ops.Categorify(dtype="int32")
label = nvt.ColumnSelector(["rating"])
# Columns to apply to
cats = nvt.ColumnSelector(["movieId"])
# Target Encode movieId column
te_features = cats >> ops.TargetEncoding(label, kfold=5, p_smooth=20)
te_features_norm = te_features >> ops.NormalizeMinMax()
# count encode `userId`
count_logop_feat = (
["userId"] >> ops.JoinGroupby(cont_cols=["movieId"], stats=["count"]) >> ops.LogOp()
)
feats_item = cat_features["movieId"] >> ops.AddMetadata(tags=["item_id", "item"])
feats_user = cat_features["userId"] >> ops.AddMetadata(tags=["user_id", "user"])
feats_genres = cat_features["genres"] >> ops.AddMetadata(tags=["item"])
feats_target = (
nvt.ColumnSelector(["rating"])
>> ops.LambdaOp(lambda col: (col > 3).astype("int32"))
>> ops.AddMetadata(tags=["binary_classification", "target"])
>> nvt.ops.Rename(name="rating_binary")
)
target_orig = (
["rating"]
>> ops.LambdaOp(lambda col: col.astype("float32"))
>> ops.AddMetadata(tags=["regression", "target"])
)
workflow = nvt.Workflow(
feats_item
+ feats_user
+ feats_genres
+ te_features_norm
+ count_logop_feat
+ target_orig
+ feats_target
+ joined["title"]
)
elif name == "ml-100k":
download_file(
"http://files.grouplens.org/datasets/movielens/ml-100k.zip",
os.path.join(local_filename, "ml-100k.zip"),
)
logger.info("starting ETL..")
ratings = pd.read_csv(
os.path.join(local_filename, "ml-100k/u.data"),
names=["userId", "movieId", "rating", "timestamp"],
sep="\t",
)
user_features = pd.read_csv(
os.path.join(local_filename, "ml-100k/u.user"),
names=["userId", "age", "gender", "occupation", "zip_code"],
sep="|",
)
user_features.to_parquet(os.path.join(local_filename, "ml-100k/user_features.parquet"))
cols = [
"movieId",
"title",
"release_date",
"video_release_date",
"imdb_URL",
"unknown",
"Action",
"Adventure",
"Animation",
"Childrens",
"Comedy",
"Crime",
"Documentary",
"Drama",
"Fantasy",
"Film_Noir",
"Horror",
"Musical",
"Mystery",
"Romance",
"Sci-Fi",
"Thriller",
"War",
"Western",
]
genres_ = [
"unknown",
"Action",
"Adventure",
"Animation",
"Childrens",
"Comedy",
"Crime",
"Documentary",
"Drama",
"Fantasy",
"Film_Noir",
"Horror",
"Musical",
"Mystery",
"Romance",
"Sci-Fi",
"Thriller",
"War",
"Western",
]
movies = pd.read_csv(
os.path.join(local_filename, "ml-100k/u.item"), names=cols, sep="|", encoding="latin1"
)
for col in genres_:
movies[col] = movies[col].replace(1, col)
movies[col] = movies[col].replace(0, np.nan)
s = movies[genres_]
s.notnull()
movies["genres"] = s.notnull().dot(s.columns + ",").str[:-1]
movies_converted = movies[
["movieId", "title", "release_date", "video_release_date", "genres", "imdb_URL"]
]
movies_converted.to_parquet(
os.path.join(local_filename, "ml-100k/movies_converted.parquet")
)
train = pd.read_csv(
os.path.join(local_filename, "ml-100k/ua.base"),
names=["userId", "movieId", "rating", "timestamp"],
sep="\t",
)
valid = pd.read_csv(
os.path.join(local_filename, "ml-100k/ua.test"),
names=["userId", "movieId", "rating", "timestamp"],
sep="\t",
)
train = train.merge(user_features, on="userId", how="left")
train = train.merge(movies_converted, on="movieId", how="left")
valid = valid.merge(user_features, on="userId", how="left")
valid = valid.merge(movies_converted, on="movieId", how="left")
train.to_parquet(os.path.join(local_filename, "ml-100k/train.parquet"))
valid.to_parquet(os.path.join(local_filename, "ml-100k/valid.parquet"))
cat_features = [
"userId",
"movieId",
"gender",
"occupation",
"zip_code",
"genres",
] >> nvt.ops.Categorify(dtype="int32")
cont_names = ["age"]
boundaries = {"age": [0, 10, 20, 30, 40, 50, 60, 70, 80, 90]}
age_bucket = cont_names >> ops.Bucketize(boundaries) >> ops.AddMetadata(tags=["user"])
label = nvt.ColumnSelector(["rating"])
# Target Encode movieId column
te_features = ["movieId"] >> ops.TargetEncoding(label, kfold=5, p_smooth=20)
te_features_norm = te_features >> ops.NormalizeMinMax()
# count encode `userId`
count_logop_feat = (
["userId"] >> ops.JoinGroupby(cont_cols=["movieId"], stats=["count"]) >> ops.LogOp()
)
feats_item = cat_features["movieId"] >> ops.AddMetadata(tags=["item_id", "item"])
feats_user = cat_features["userId"] >> ops.AddMetadata(tags=["user_id", "user"])
feats_genres = cat_features["genres"] >> ops.AddMetadata(tags=["item"])
user_features = cat_features["gender", "zip_code"] >> ops.AddMetadata(tags=["user"])
feats_target = (
nvt.ColumnSelector(["rating"])
>> ops.LambdaOp(lambda col: (col > 3).astype("int32"))
>> ops.AddMetadata(tags=["binary_classification", "target"])
>> nvt.ops.Rename(name="rating_binary")
)
target_orig = ["rating"] >> ops.AddMetadata(tags=["regression", "target"])
workflow = nvt.Workflow(
feats_item
+ feats_user
+ feats_genres
+ te_features_norm
+ count_logop_feat
+ user_features
+ target_orig
+ feats_target
+ age_bucket
+ ["title"]
)
else:
raise ValueError(
"Unknown dataset name. Only Movielens 25M and 100k datasets are supported."
)
train_dataset = nvt.Dataset([os.path.join(local_filename, name, "train.parquet")])
valid_dataset = nvt.Dataset([os.path.join(local_filename, name, "valid.parquet")])
if path.exists(os.path.join(local_filename, name, "train")):
shutil.rmtree(os.path.join(local_filename, name, "train"))
if path.exists(os.path.join(local_filename, name, "valid")):
shutil.rmtree(os.path.join(local_filename, name, "valid"))
workflow.fit(train_dataset)
workflow.transform(train_dataset).to_parquet(
output_path=os.path.join(outputdir, name, "train"),
out_files_per_proc=1,
shuffle=False,
)
workflow.transform(valid_dataset).to_parquet(
output_path=os.path.join(outputdir, name, "valid"),
out_files_per_proc=1,
shuffle=False,
)
# Save the workflow
workflow.save(os.path.join(outputdir, name, "workflow"))
logger.info("saving the workflow..")
| [
"logging.getLogger",
"nvtabular.ops.JoinExternal",
"nvtabular.ops.AddMetadata",
"os.path.exists",
"nvtabular.ops.NormalizeMinMax",
"nvtabular.ops.Categorify",
"nvtabular.ops.TargetEncoding",
"nvtabular.ColumnSelector",
"os.path.expanduser",
"nvtabular.ops.Bucketize",
"logging.basicConfig",
"nv... | [((304, 313), 'merlin.core.dispatch.get_lib', 'get_lib', ([], {}), '()\n', (311, 313), False, 'from merlin.core.dispatch import get_lib\n'), ((315, 336), 'logging.basicConfig', 'logging.basicConfig', ([], {}), '()\n', (334, 336), False, 'import logging\n'), ((346, 373), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (363, 373), False, 'import logging\n'), ((1408, 1435), 'os.path.join', 'os.path.join', (['path', 'variant'], {}), '(path, variant)\n', (1420, 1435), False, 'import os\n'), ((2583, 2614), 'os.path.abspath', 'os.path.abspath', (['local_filename'], {}), '(local_filename)\n', (2598, 2614), False, 'import os\n'), ((1447, 1475), 'os.path.exists', 'os.path.exists', (['variant_path'], {}), '(variant_path)\n', (1461, 1475), False, 'import os\n'), ((1485, 1510), 'os.makedirs', 'os.makedirs', (['variant_path'], {}), '(variant_path)\n', (1496, 1510), False, 'import os\n'), ((1588, 1623), 'os.path.join', 'os.path.join', (['variant_path', '"""train"""'], {}), "(variant_path, 'train')\n", (1600, 1623), False, 'import os\n'), ((1673, 1708), 'os.path.join', 'os.path.join', (['variant_path', '"""valid"""'], {}), "(variant_path, 'valid')\n", (1685, 1708), False, 'import os\n'), ((3944, 3974), 'nvtabular.ColumnSelector', 'nvt.ColumnSelector', (["['rating']"], {}), "(['rating'])\n", (3962, 3974), True, 'import nvtabular as nvt\n'), ((4021, 4052), 'nvtabular.ColumnSelector', 'nvt.ColumnSelector', (["['movieId']"], {}), "(['movieId'])\n", (4039, 4052), True, 'import nvtabular as nvt\n'), ((5135, 5276), 'nvtabular.Workflow', 'nvt.Workflow', (["(feats_item + feats_user + feats_genres + te_features_norm +\n count_logop_feat + target_orig + feats_target + joined['title'])"], {}), "(feats_item + feats_user + feats_genres + te_features_norm +\n count_logop_feat + target_orig + feats_target + joined['title'])\n", (5147, 5276), True, 'import nvtabular as nvt\n'), ((10789, 10832), 'os.path.join', 'os.path.join', (['local_filename', 'name', '"""train"""'], {}), "(local_filename, name, 'train')\n", (10801, 10832), False, 'import os\n'), ((10921, 10964), 'os.path.join', 'os.path.join', (['local_filename', 'name', '"""valid"""'], {}), "(local_filename, name, 'valid')\n", (10933, 10964), False, 'import os\n'), ((11447, 11488), 'os.path.join', 'os.path.join', (['outputdir', 'name', '"""workflow"""'], {}), "(outputdir, name, 'workflow')\n", (11459, 11488), False, 'import os\n'), ((1324, 1377), 'os.path.expanduser', 'os.path.expanduser', (['"""~/merlin-models-data/movielens/"""'], {}), "('~/merlin-models-data/movielens/')\n", (1342, 1377), False, 'import os\n'), ((2808, 2850), 'os.path.join', 'os.path.join', (['local_filename', '"""ml-25m.zip"""'], {}), "(local_filename, 'ml-25m.zip')\n", (2820, 2850), False, 'import os\n'), ((2895, 2943), 'os.path.join', 'os.path.join', (['local_filename', 'name', '"""movies.csv"""'], {}), "(local_filename, name, 'movies.csv')\n", (2907, 2943), False, 'import os\n'), ((3030, 3092), 'os.path.join', 'os.path.join', (['local_filename', 'name', '"""movies_converted.parquet"""'], {}), "(local_filename, name, 'movies_converted.parquet')\n", (3042, 3092), False, 'import os\n'), ((3128, 3177), 'os.path.join', 'os.path.join', (['local_filename', 'name', '"""ratings.csv"""'], {}), "(local_filename, name, 'ratings.csv')\n", (3140, 3177), False, 'import os\n'), ((3481, 3532), 'os.path.join', 'os.path.join', (['local_filename', 'name', '"""train.parquet"""'], {}), "(local_filename, name, 'train.parquet')\n", (3493, 3532), False, 'import os\n'), ((3559, 3610), 'os.path.join', 'os.path.join', (['local_filename', 'name', '"""valid.parquet"""'], {}), "(local_filename, name, 'valid.parquet')\n", (3571, 3610), False, 'import os\n'), ((3718, 3780), 'os.path.join', 'os.path.join', (['local_filename', 'name', '"""movies_converted.parquet"""'], {}), "(local_filename, name, 'movies_converted.parquet')\n", (3730, 3780), False, 'import os\n'), ((3824, 3864), 'nvtabular.ops.JoinExternal', 'ops.JoinExternal', (['movies'], {'on': "['movieId']"}), "(movies, on=['movieId'])\n", (3840, 3864), False, 'from nvtabular import ops\n'), ((3898, 3927), 'nvtabular.ops.Categorify', 'ops.Categorify', ([], {'dtype': '"""int32"""'}), "(dtype='int32')\n", (3912, 3927), False, 'from nvtabular import ops\n'), ((4123, 4170), 'nvtabular.ops.TargetEncoding', 'ops.TargetEncoding', (['label'], {'kfold': '(5)', 'p_smooth': '(20)'}), '(label, kfold=5, p_smooth=20)\n', (4141, 4170), False, 'from nvtabular import ops\n'), ((4213, 4234), 'nvtabular.ops.NormalizeMinMax', 'ops.NormalizeMinMax', ([], {}), '()\n', (4232, 4234), False, 'from nvtabular import ops\n'), ((4382, 4393), 'nvtabular.ops.LogOp', 'ops.LogOp', ([], {}), '()\n', (4391, 4393), False, 'from nvtabular import ops\n'), ((4452, 4493), 'nvtabular.ops.AddMetadata', 'ops.AddMetadata', ([], {'tags': "['item_id', 'item']"}), "(tags=['item_id', 'item'])\n", (4467, 4493), False, 'from nvtabular import ops\n'), ((4541, 4582), 'nvtabular.ops.AddMetadata', 'ops.AddMetadata', ([], {'tags': "['user_id', 'user']"}), "(tags=['user_id', 'user'])\n", (4556, 4582), False, 'from nvtabular import ops\n'), ((4632, 4662), 'nvtabular.ops.AddMetadata', 'ops.AddMetadata', ([], {'tags': "['item']"}), "(tags=['item'])\n", (4647, 4662), False, 'from nvtabular import ops\n'), ((4887, 4923), 'nvtabular.ops.Rename', 'nvt.ops.Rename', ([], {'name': '"""rating_binary"""'}), "(name='rating_binary')\n", (4901, 4923), True, 'import nvtabular as nvt\n'), ((5059, 5105), 'nvtabular.ops.AddMetadata', 'ops.AddMetadata', ([], {'tags': "['regression', 'target']"}), "(tags=['regression', 'target'])\n", (5074, 5105), False, 'from nvtabular import ops\n'), ((9051, 9081), 'nvtabular.ColumnSelector', 'nvt.ColumnSelector', (["['rating']"], {}), "(['rating'])\n", (9069, 9081), True, 'import nvtabular as nvt\n'), ((10168, 10336), 'nvtabular.Workflow', 'nvt.Workflow', (["(feats_item + feats_user + feats_genres + te_features_norm +\n count_logop_feat + user_features + target_orig + feats_target +\n age_bucket + ['title'])"], {}), "(feats_item + feats_user + feats_genres + te_features_norm +\n count_logop_feat + user_features + target_orig + feats_target +\n age_bucket + ['title'])\n", (10180, 10336), True, 'import nvtabular as nvt\n'), ((10628, 10679), 'os.path.join', 'os.path.join', (['local_filename', 'name', '"""train.parquet"""'], {}), "(local_filename, name, 'train.parquet')\n", (10640, 10679), False, 'import os\n'), ((10715, 10766), 'os.path.join', 'os.path.join', (['local_filename', 'name', '"""valid.parquet"""'], {}), "(local_filename, name, 'valid.parquet')\n", (10727, 10766), False, 'import os\n'), ((10857, 10900), 'os.path.join', 'os.path.join', (['local_filename', 'name', '"""train"""'], {}), "(local_filename, name, 'train')\n", (10869, 10900), False, 'import os\n'), ((10989, 11032), 'os.path.join', 'os.path.join', (['local_filename', 'name', '"""valid"""'], {}), "(local_filename, name, 'valid')\n", (11001, 11032), False, 'import os\n'), ((11137, 11175), 'os.path.join', 'os.path.join', (['outputdir', 'name', '"""train"""'], {}), "(outputdir, name, 'train')\n", (11149, 11175), False, 'import os\n'), ((11306, 11344), 'os.path.join', 'os.path.join', (['outputdir', 'name', '"""valid"""'], {}), "(outputdir, name, 'valid')\n", (11318, 11344), False, 'import os\n'), ((4323, 4378), 'nvtabular.ops.JoinGroupby', 'ops.JoinGroupby', ([], {'cont_cols': "['movieId']", 'stats': "['count']"}), "(cont_cols=['movieId'], stats=['count'])\n", (4338, 4378), False, 'from nvtabular import ops\n'), ((4814, 4871), 'nvtabular.ops.AddMetadata', 'ops.AddMetadata', ([], {'tags': "['binary_classification', 'target']"}), "(tags=['binary_classification', 'target'])\n", (4829, 4871), False, 'from nvtabular import ops\n'), ((5516, 5559), 'os.path.join', 'os.path.join', (['local_filename', '"""ml-100k.zip"""'], {}), "(local_filename, 'ml-100k.zip')\n", (5528, 5559), False, 'import os\n'), ((5652, 5698), 'os.path.join', 'os.path.join', (['local_filename', '"""ml-100k/u.data"""'], {}), "(local_filename, 'ml-100k/u.data')\n", (5664, 5698), False, 'import os\n'), ((5845, 5891), 'os.path.join', 'os.path.join', (['local_filename', '"""ml-100k/u.user"""'], {}), "(local_filename, 'ml-100k/u.user')\n", (5857, 5891), False, 'import os\n'), ((6030, 6091), 'os.path.join', 'os.path.join', (['local_filename', '"""ml-100k/user_features.parquet"""'], {}), "(local_filename, 'ml-100k/user_features.parquet')\n", (6042, 6091), False, 'import os\n'), ((7200, 7246), 'os.path.join', 'os.path.join', (['local_filename', '"""ml-100k/u.item"""'], {}), "(local_filename, 'ml-100k/u.item')\n", (7212, 7246), False, 'import os\n'), ((7740, 7804), 'os.path.join', 'os.path.join', (['local_filename', '"""ml-100k/movies_converted.parquet"""'], {}), "(local_filename, 'ml-100k/movies_converted.parquet')\n", (7752, 7804), False, 'import os\n'), ((7856, 7903), 'os.path.join', 'os.path.join', (['local_filename', '"""ml-100k/ua.base"""'], {}), "(local_filename, 'ml-100k/ua.base')\n", (7868, 7903), False, 'import os\n'), ((8042, 8089), 'os.path.join', 'os.path.join', (['local_filename', '"""ml-100k/ua.test"""'], {}), "(local_filename, 'ml-100k/ua.test')\n", (8054, 8089), False, 'import os\n'), ((8492, 8545), 'os.path.join', 'os.path.join', (['local_filename', '"""ml-100k/train.parquet"""'], {}), "(local_filename, 'ml-100k/train.parquet')\n", (8504, 8545), False, 'import os\n'), ((8572, 8625), 'os.path.join', 'os.path.join', (['local_filename', '"""ml-100k/valid.parquet"""'], {}), "(local_filename, 'ml-100k/valid.parquet')\n", (8584, 8625), False, 'import os\n'), ((8805, 8838), 'nvtabular.ops.Categorify', 'nvt.ops.Categorify', ([], {'dtype': '"""int32"""'}), "(dtype='int32')\n", (8823, 8838), True, 'import nvtabular as nvt\n'), ((9003, 9033), 'nvtabular.ops.AddMetadata', 'ops.AddMetadata', ([], {'tags': "['user']"}), "(tags=['user'])\n", (9018, 9033), False, 'from nvtabular import ops\n'), ((9159, 9206), 'nvtabular.ops.TargetEncoding', 'ops.TargetEncoding', (['label'], {'kfold': '(5)', 'p_smooth': '(20)'}), '(label, kfold=5, p_smooth=20)\n', (9177, 9206), False, 'from nvtabular import ops\n'), ((9250, 9271), 'nvtabular.ops.NormalizeMinMax', 'ops.NormalizeMinMax', ([], {}), '()\n', (9269, 9271), False, 'from nvtabular import ops\n'), ((9419, 9430), 'nvtabular.ops.LogOp', 'ops.LogOp', ([], {}), '()\n', (9428, 9430), False, 'from nvtabular import ops\n'), ((9490, 9531), 'nvtabular.ops.AddMetadata', 'ops.AddMetadata', ([], {'tags': "['item_id', 'item']"}), "(tags=['item_id', 'item'])\n", (9505, 9531), False, 'from nvtabular import ops\n'), ((9579, 9620), 'nvtabular.ops.AddMetadata', 'ops.AddMetadata', ([], {'tags': "['user_id', 'user']"}), "(tags=['user_id', 'user'])\n", (9594, 9620), False, 'from nvtabular import ops\n'), ((9670, 9700), 'nvtabular.ops.AddMetadata', 'ops.AddMetadata', ([], {'tags': "['item']"}), "(tags=['item'])\n", (9685, 9700), False, 'from nvtabular import ops\n'), ((9763, 9793), 'nvtabular.ops.AddMetadata', 'ops.AddMetadata', ([], {'tags': "['user']"}), "(tags=['user'])\n", (9778, 9793), False, 'from nvtabular import ops\n'), ((10018, 10054), 'nvtabular.ops.Rename', 'nvt.ops.Rename', ([], {'name': '"""rating_binary"""'}), "(name='rating_binary')\n", (10032, 10054), True, 'import nvtabular as nvt\n'), ((10101, 10147), 'nvtabular.ops.AddMetadata', 'ops.AddMetadata', ([], {'tags': "['regression', 'target']"}), "(tags=['regression', 'target'])\n", (10116, 10147), False, 'from nvtabular import ops\n'), ((4701, 4731), 'nvtabular.ColumnSelector', 'nvt.ColumnSelector', (["['rating']"], {}), "(['rating'])\n", (4719, 4731), True, 'import nvtabular as nvt\n'), ((8974, 8999), 'nvtabular.ops.Bucketize', 'ops.Bucketize', (['boundaries'], {}), '(boundaries)\n', (8987, 8999), False, 'from nvtabular import ops\n'), ((9360, 9415), 'nvtabular.ops.JoinGroupby', 'ops.JoinGroupby', ([], {'cont_cols': "['movieId']", 'stats': "['count']"}), "(cont_cols=['movieId'], stats=['count'])\n", (9375, 9415), False, 'from nvtabular import ops\n'), ((9945, 10002), 'nvtabular.ops.AddMetadata', 'ops.AddMetadata', ([], {'tags': "['binary_classification', 'target']"}), "(tags=['binary_classification', 'target'])\n", (9960, 10002), False, 'from nvtabular import ops\n'), ((9832, 9862), 'nvtabular.ColumnSelector', 'nvt.ColumnSelector', (["['rating']"], {}), "(['rating'])\n", (9850, 9862), True, 'import nvtabular as nvt\n')] |
import logging
from pathlib import Path
from discord import Intents
from core.bot import KingBot
# LOGS
log = logging.getLogger('king')
log.setLevel(logging.INFO)
# INTENTS
intents = Intents.all()
# CONFIG YAML
config = Path("config.yaml")
bot = KingBot(
command_prefix=KingBot.get_prefix,
intents=intents,
config=config
)
if __name__ == '__main__':
bot.run()
| [
"logging.getLogger",
"core.bot.KingBot",
"discord.Intents.all",
"pathlib.Path"
] | [((112, 137), 'logging.getLogger', 'logging.getLogger', (['"""king"""'], {}), "('king')\n", (129, 137), False, 'import logging\n'), ((186, 199), 'discord.Intents.all', 'Intents.all', ([], {}), '()\n', (197, 199), False, 'from discord import Intents\n'), ((224, 243), 'pathlib.Path', 'Path', (['"""config.yaml"""'], {}), "('config.yaml')\n", (228, 243), False, 'from pathlib import Path\n'), ((252, 326), 'core.bot.KingBot', 'KingBot', ([], {'command_prefix': 'KingBot.get_prefix', 'intents': 'intents', 'config': 'config'}), '(command_prefix=KingBot.get_prefix, intents=intents, config=config)\n', (259, 326), False, 'from core.bot import KingBot\n')] |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'GUI_DATOS_PACIENTE.ui'
#
# Created by: PyQt5 UI code generator 5.14.1
#
# WARNING! All changes made in this file will be lost!
import os
from PyQt5.QtWidgets import QMessageBox
import subprocess
from PyQt5 import QtCore, QtGui, QtWidgets
import sys
import pandas as pd
class Ui_Datos_Paciente(object):
def setupUi(self, Datos_Paciente):
Datos_Paciente.setObjectName("Datos_Paciente")
Datos_Paciente.resize(881, 771)
self.centralwidget = QtWidgets.QWidget(Datos_Paciente)
self.centralwidget.setObjectName("centralwidget")
self.label = QtWidgets.QLabel(self.centralwidget)
self.label.setGeometry(QtCore.QRect(320, 0, 221, 41))
self.label.setObjectName("label")
self.label_2 = QtWidgets.QLabel(self.centralwidget)
self.label_2.setGeometry(QtCore.QRect(60, 60, 751, 71))
self.label_2.setObjectName("label_2")
self.label_3 = QtWidgets.QLabel(self.centralwidget)
self.label_3.setGeometry(QtCore.QRect(200, 160, 261, 16))
self.label_3.setObjectName("label_3")
self.textEdit = QtWidgets.QTextEdit(self.centralwidget)
self.textEdit.setGeometry(QtCore.QRect(480, 150, 151, 31))
self.textEdit.setStyleSheet("font: 14pt \".SF NS Text\";")
self.textEdit.setObjectName("textEdit")
self.label_4 = QtWidgets.QLabel(self.centralwidget)
self.label_4.setGeometry(QtCore.QRect(330, 260, 131, 16))
self.label_4.setObjectName("label_4")
self.textEdit_2 = QtWidgets.QTextEdit(self.centralwidget)
self.textEdit_2.setGeometry(QtCore.QRect(480, 260, 151, 31))
self.textEdit_2.setStyleSheet("font: 14pt \".SF NS Text\";")
self.textEdit_2.setObjectName("textEdit_2")
self.label_5 = QtWidgets.QLabel(self.centralwidget)
self.label_5.setGeometry(QtCore.QRect(300, 370, 161, 16))
self.label_5.setObjectName("label_5")
self.textEdit_3 = QtWidgets.QTextEdit(self.centralwidget)
self.textEdit_3.setGeometry(QtCore.QRect(480, 370, 151, 31))
self.textEdit_3.setStyleSheet("font: 14pt \".SF NS Text\";")
self.textEdit_3.setObjectName("textEdit_3")
self.comboBox = QtWidgets.QComboBox(self.centralwidget)
self.comboBox.setGeometry(QtCore.QRect(480, 200, 151, 41))
self.comboBox.setObjectName("comboBox")
self.comboBox.addItem("")
self.comboBox.addItem("")
self.comboBox.addItem("")
self.label_6 = QtWidgets.QLabel(self.centralwidget)
self.label_6.setGeometry(QtCore.QRect(310, 210, 141, 20))
self.label_6.setObjectName("label_6")
self.label_7 = QtWidgets.QLabel(self.centralwidget)
self.label_7.setGeometry(QtCore.QRect(130, 450, 371, 91))
self.label_7.setObjectName("label_7")
self.comboBox_2 = QtWidgets.QComboBox(self.centralwidget)
self.comboBox_2.setGeometry(QtCore.QRect(480, 470, 151, 41))
self.comboBox_2.setObjectName("comboBox_2")
self.comboBox_2.addItem("")
self.comboBox_2.addItem("")
self.label_8 = QtWidgets.QLabel(self.centralwidget)
self.label_8.setGeometry(QtCore.QRect(110, 610, 361, 61))
self.label_8.setObjectName("label_8")
self.radioButton = QtWidgets.QRadioButton(self.centralwidget)
self.radioButton.setGeometry(QtCore.QRect(510, 630, 51, 20))
self.radioButton.setObjectName("radioButton")
self.radioButton.clicked.connect(self.crear_paciente)
self.radioButton_2 = QtWidgets.QRadioButton(self.centralwidget)
self.radioButton_2.setGeometry(QtCore.QRect(570, 630, 51, 20))
self.radioButton_2.setObjectName("radioButton_2")
self.radioButton.setEnabled(False)
self.radioButton_2.setEnabled(False)
self.pushButton = QtWidgets.QPushButton(self.centralwidget)
self.pushButton.setEnabled(False)
self.pushButton.setGeometry(QtCore.QRect(520, 680, 113, 32))
self.pushButton.setObjectName("pushButton")
self.pushButton.clicked.connect(self.enviar_carpeta)
self.pushButton_2 = QtWidgets.QPushButton(self.centralwidget)
self.pushButton_2.setGeometry(QtCore.QRect(350, 680, 113, 32))
self.pushButton_2.setObjectName("pushButton_2")
self.pushButton_2.clicked.connect(self.close_application)
self.textEdit_4 = QtWidgets.QTextEdit(self.centralwidget)
self.textEdit_4.setGeometry(QtCore.QRect(480, 310, 151, 31))
self.textEdit_4.setStyleSheet("font: 14pt \".SF NS Text\";")
self.textEdit_4.setObjectName("textEdit_4")
self.label_9 = QtWidgets.QLabel(self.centralwidget)
self.label_9.setGeometry(QtCore.QRect(290, 310, 171, 20))
self.label_9.setObjectName("label_9")
self.comboBox_3 = QtWidgets.QComboBox(self.centralwidget)
self.comboBox_3.setGeometry(QtCore.QRect(480, 420, 151, 41))
self.comboBox_3.setObjectName("comboBox_3")
self.comboBox_3.addItem("")
self.comboBox_3.addItem("")
self.label_11 = QtWidgets.QLabel(self.centralwidget)
self.label_11.setGeometry(QtCore.QRect(210, 430, 261, 20))
self.label_11.setObjectName("label_11")
self.pushButton_3 = QtWidgets.QPushButton(self.centralwidget)
self.pushButton_3.setGeometry(QtCore.QRect(475, 540, 173, 25))
self.pushButton_3.setObjectName("pushButton_3")
self.pushButton_3.clicked.connect(self.showpdf)
self.label_12 = QtWidgets.QLabel(self.centralwidget)
self.label_12.setGeometry(QtCore.QRect(210, 545, 261, 20))
self.label_12.setObjectName("label_12")
self.label_13 = QtWidgets.QLabel(self.centralwidget)
self.label_13.setGeometry(QtCore.QRect(170, 580, 261, 20))
self.label_13.setObjectName("label_13")
self.radioButton_3 = QtWidgets.QRadioButton(self.centralwidget)
self.radioButton_3.setGeometry(QtCore.QRect(510, 580, 51, 20))
self.radioButton_3.setObjectName("radioButton")
self.radioButton_3.clicked.connect(self.consentimiento)
self.radioButton_4 = QtWidgets.QRadioButton(self.centralwidget)
self.radioButton_4.setGeometry(QtCore.QRect(570, 580, 51, 20))
self.radioButton_4.setObjectName("radioButton_2")
Datos_Paciente.setCentralWidget(self.centralwidget)
self.menubar = QtWidgets.QMenuBar(Datos_Paciente)
self.menubar.setGeometry(QtCore.QRect(0, 0, 881, 22))
self.menubar.setObjectName("menubar")
Datos_Paciente.setMenuBar(self.menubar)
self.statusbar = QtWidgets.QStatusBar(Datos_Paciente)
self.statusbar.setObjectName("statusbar")
Datos_Paciente.setStatusBar(self.statusbar)
self.retranslateUi(Datos_Paciente)
QtCore.QMetaObject.connectSlotsByName(Datos_Paciente)
def retranslateUi(self, Datos_Paciente):
_translate = QtCore.QCoreApplication.translate
Datos_Paciente.setWindowTitle(_translate("Datos_Paciente", "Información del paciente"))
self.label.setText(_translate("Datos_Paciente", "<html><head/><body><p align=\"center\"><span style=\" font-size:18pt; font-weight:600;\">Datos del Paciente</span></p></body></html>"))
self.label_2.setText(_translate("Datos_Paciente", "<html><head/><body><p align=\"justify\"><span style=\" font-size:14pt;\">A continuación debe ingresar una información básica que es relevanta para el estudio de las</span></p><p align=\"justify\"><span style=\" font-size:14pt;\">señales, por favor ingrese la siguiente información:</span></p></body></html>"))
self.label_3.setText(_translate("Datos_Paciente", "<html><head/><body><p><span style=\" font-size:10pt; font-weight:600;\">Número de identificación del estudio:</span></p></body></html>"))
self.label_4.setText(_translate("Datos_Paciente", "<html><head/><body><p><span style=\" font-size:10pt; font-weight:600;\">Edad del paciente:</span></p></body></html>"))
self.label_5.setText(_translate("Datos_Paciente", "<html><head/><body><p><span style=\" font-size:10pt; font-weight:600;\">Peso (Kg) del paciente:</span></p></body></html>"))
self.comboBox.setItemText(0, _translate("Datos_Paciente", "Femenino"))
self.comboBox.setItemText(1, _translate("Datos_Paciente", "Masculino"))
self.comboBox.setItemText(2, _translate("Datos_Paciente", "Prefiero no decir"))
self.label_6.setText(_translate("Datos_Paciente", "<html><head/><body><p><span style=\" font-size:10pt; font-weight:600;\">Género del paciente:</span></p></body></html>"))
self.label_7.setText(_translate("Datos_Paciente", "<html><head/><body><p><span style=\" font-size:10pt; font-weight:600;\">Presenta alguna condición de </span></p><p><span style=\" font-size:10pt; font-weight:600;\">discapacidad motora o perdida de miembro superior :</span></p></body></html>"))
self.comboBox_2.setItemText(0, _translate("Datos_Paciente", "Si"))
self.comboBox_2.setItemText(1, _translate("Datos_Paciente", "No"))
self.label_8.setText(_translate("Datos_Paciente", "<html><head/><body><p align=\"justify\"><span style=\" font-weight:600;\">Autoriza al equipo de investigador exponer sus datos </span></p><p align=\"justify\"><span style=\" font-weight:600;\">consignados con anterioridad a la comunidad cientifica: </span></p><p align=\"justify\"><span style=\" font-weight:600;\"><br/></span></p></body></html>"))
self.radioButton.setText(_translate("Datos_Paciente", "Si"))
self.radioButton_2.setText(_translate("Datos_Paciente", "No"))
self.radioButton_3.setText(_translate("Datos_Paciente", "Si"))
self.radioButton_4.setText(_translate("Datos_Paciente", "No"))
self.pushButton.setText(_translate("Datos_Paciente", "SIGUIENTE"))
self.pushButton_2.setText(_translate("Datos_Paciente", "CANCELAR"))
self.pushButton_3.setText(_translate("Datos_Paciente", "VER CONSENTIMIENTO"))
self.label_9.setText(_translate("Datos_Paciente", "<html><head/><body><p><span style=\" font-size:10pt; font-weight:600;\">Altura del paciente (cm):</span></p></body></html>"))
self.comboBox_3.setItemText(0, _translate("Datos_Paciente", "Derecho"))
self.comboBox_3.setItemText(1, _translate("Datos_Paciente", "Izquierdo"))
self.label_11.setText(_translate("Datos_Paciente", "<html><head/><body><p><span style=\" font-size:10pt; font-weight:600;\">Ubicación del brazalete en el paciente:</span></p></body></html>"))
self.label_12.setText(_translate("Datos_Paciente", "<html><head/><body><p><span style=\" font-size:10pt; font-weight:600;\">Consentimiento informado del estudio:</span></p></body></html>"))
self.label_13.setText(_translate("Datos_Paciente", "<html><head/><body><p><span style=\" font-size:10pt; font-weight:600;\">Leyó con cuidado el consentimiento informado:</span></p></body></html>"))
def close_application(self):
sys.exit()
def closeEvent(self, event):
pass
def consentimiento(self):
self.radioButton.setEnabled(True)
self.radioButton_2.setEnabled(True)
def showpdf(self):
subprocess.call(['open', "consentimiento.pdf"])
def enviar_carpeta(self):
global carpetaPaciente
with open(os.path.join( "dato_carpeta.csv"), 'w') as fp:
fp.write(carpetaPaciente)
sys.exit()
def crear_paciente(self):
if not (self.textEdit_3.toPlainText() and self.textEdit.toPlainText() and self.textEdit_2.toPlainText() and self.textEdit_4.toPlainText()) :
print("El QLineEdit esta vacio")
msg = QMessageBox()
msg.setWindowTitle("Control")
msg.setText("Atención!")
msg.setIcon(QMessageBox.Warning)
msg.setInformativeText("Por favor ingrese todos los campos!")
msg.exec()
self.radioButton.setAutoExclusive(False)
self.radioButton.setChecked(False)
else:
bmi_paciente = (int(self.textEdit_3.toPlainText())/ (int(self.textEdit_4.toPlainText())/100)**2)
global carpetaPaciente
print(self.comboBox.currentText())
carpetaPaciente = f"Paciente_ID_{self.textEdit.toPlainText()}" #Creacion de carpetas para guarda archivos si no existe
if not os.path.exists(carpetaPaciente):
os.mkdir(carpetaPaciente)
with open(os.path.join(carpetaPaciente, "datos_paciente.txt"), 'w') as fp:
fp.write("Número de identificación del estudio: %s" %self.textEdit.toPlainText())
fp.write("\n")
fp.write("Género del paciente: %s" %self.comboBox.currentText())
fp.write("\n")
fp.write("Edad del paciente: %s" %self.textEdit_2.toPlainText())
fp.write("\n")
fp.write("Altura del paciente (cm): %s" %self.textEdit_4.toPlainText())
fp.write("\n")
fp.write("Peso (Kg) del paciente: %s" %self.textEdit_3.toPlainText())
fp.write("\n")
fp.write("BMI del paciente: %s" %str(bmi_paciente))
fp.write("\n")
fp.write("Ubicación del brazalete en el paciente: %s" %self.comboBox_3.currentText())
fp.write("\n")
fp.write("Paciente con condición de discapacidad: %s" %self.comboBox_2.currentText())
fp.write("\n")
fp.write("Frecuencia de muestreo de la MYO: 200 (Hz)")
fp.write("\n")
fp.write("Frecuencia de muestreo del Ultracortex (Cyton + Daisy): 125 (Hz)")
self.pushButton.setEnabled(True)
if __name__ == "__main__":
import sys
app = QtWidgets.QApplication(sys.argv)
Datos_Paciente = QtWidgets.QMainWindow()
ui = Ui_Datos_Paciente()
ui.setupUi(Datos_Paciente)
Datos_Paciente.show()
sys.exit(app.exec_())
| [
"PyQt5.QtWidgets.QWidget",
"PyQt5.QtWidgets.QTextEdit",
"os.path.exists",
"PyQt5.QtWidgets.QMainWindow",
"PyQt5.QtWidgets.QComboBox",
"PyQt5.QtWidgets.QMessageBox",
"PyQt5.QtCore.QMetaObject.connectSlotsByName",
"PyQt5.QtWidgets.QRadioButton",
"os.path.join",
"PyQt5.QtCore.QRect",
"PyQt5.QtWidge... | [((13884, 13916), 'PyQt5.QtWidgets.QApplication', 'QtWidgets.QApplication', (['sys.argv'], {}), '(sys.argv)\n', (13906, 13916), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((13938, 13961), 'PyQt5.QtWidgets.QMainWindow', 'QtWidgets.QMainWindow', ([], {}), '()\n', (13959, 13961), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((556, 589), 'PyQt5.QtWidgets.QWidget', 'QtWidgets.QWidget', (['Datos_Paciente'], {}), '(Datos_Paciente)\n', (573, 589), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((669, 705), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.centralwidget'], {}), '(self.centralwidget)\n', (685, 705), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((833, 869), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.centralwidget'], {}), '(self.centralwidget)\n', (849, 869), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1003, 1039), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.centralwidget'], {}), '(self.centralwidget)\n', (1019, 1039), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1176, 1215), 'PyQt5.QtWidgets.QTextEdit', 'QtWidgets.QTextEdit', (['self.centralwidget'], {}), '(self.centralwidget)\n', (1195, 1215), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1421, 1457), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.centralwidget'], {}), '(self.centralwidget)\n', (1437, 1457), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1596, 1635), 'PyQt5.QtWidgets.QTextEdit', 'QtWidgets.QTextEdit', (['self.centralwidget'], {}), '(self.centralwidget)\n', (1615, 1635), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1849, 1885), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.centralwidget'], {}), '(self.centralwidget)\n', (1865, 1885), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2024, 2063), 'PyQt5.QtWidgets.QTextEdit', 'QtWidgets.QTextEdit', (['self.centralwidget'], {}), '(self.centralwidget)\n', (2043, 2063), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2278, 2317), 'PyQt5.QtWidgets.QComboBox', 'QtWidgets.QComboBox', (['self.centralwidget'], {}), '(self.centralwidget)\n', (2297, 2317), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2558, 2594), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.centralwidget'], {}), '(self.centralwidget)\n', (2574, 2594), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2730, 2766), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.centralwidget'], {}), '(self.centralwidget)\n', (2746, 2766), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2905, 2944), 'PyQt5.QtWidgets.QComboBox', 'QtWidgets.QComboBox', (['self.centralwidget'], {}), '(self.centralwidget)\n', (2924, 2944), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3161, 3197), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.centralwidget'], {}), '(self.centralwidget)\n', (3177, 3197), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3337, 3379), 'PyQt5.QtWidgets.QRadioButton', 'QtWidgets.QRadioButton', (['self.centralwidget'], {}), '(self.centralwidget)\n', (3359, 3379), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3594, 3636), 'PyQt5.QtWidgets.QRadioButton', 'QtWidgets.QRadioButton', (['self.centralwidget'], {}), '(self.centralwidget)\n', (3616, 3636), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3880, 3921), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.centralwidget'], {}), '(self.centralwidget)\n', (3901, 3921), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4174, 4215), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.centralwidget'], {}), '(self.centralwidget)\n', (4195, 4215), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4435, 4474), 'PyQt5.QtWidgets.QTextEdit', 'QtWidgets.QTextEdit', (['self.centralwidget'], {}), '(self.centralwidget)\n', (4454, 4474), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4688, 4724), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.centralwidget'], {}), '(self.centralwidget)\n', (4704, 4724), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4863, 4902), 'PyQt5.QtWidgets.QComboBox', 'QtWidgets.QComboBox', (['self.centralwidget'], {}), '(self.centralwidget)\n', (4882, 4902), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5120, 5156), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.centralwidget'], {}), '(self.centralwidget)\n', (5136, 5156), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5300, 5341), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.centralwidget'], {}), '(self.centralwidget)\n', (5321, 5341), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5549, 5585), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.centralwidget'], {}), '(self.centralwidget)\n', (5565, 5585), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5725, 5761), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.centralwidget'], {}), '(self.centralwidget)\n', (5741, 5761), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5907, 5949), 'PyQt5.QtWidgets.QRadioButton', 'QtWidgets.QRadioButton', (['self.centralwidget'], {}), '(self.centralwidget)\n', (5929, 5949), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((6170, 6212), 'PyQt5.QtWidgets.QRadioButton', 'QtWidgets.QRadioButton', (['self.centralwidget'], {}), '(self.centralwidget)\n', (6192, 6212), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((6426, 6460), 'PyQt5.QtWidgets.QMenuBar', 'QtWidgets.QMenuBar', (['Datos_Paciente'], {}), '(Datos_Paciente)\n', (6444, 6460), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((6642, 6678), 'PyQt5.QtWidgets.QStatusBar', 'QtWidgets.QStatusBar', (['Datos_Paciente'], {}), '(Datos_Paciente)\n', (6662, 6678), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((6833, 6886), 'PyQt5.QtCore.QMetaObject.connectSlotsByName', 'QtCore.QMetaObject.connectSlotsByName', (['Datos_Paciente'], {}), '(Datos_Paciente)\n', (6870, 6886), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((11010, 11020), 'sys.exit', 'sys.exit', ([], {}), '()\n', (11018, 11020), False, 'import sys\n'), ((11224, 11271), 'subprocess.call', 'subprocess.call', (["['open', 'consentimiento.pdf']"], {}), "(['open', 'consentimiento.pdf'])\n", (11239, 11271), False, 'import subprocess\n'), ((11446, 11456), 'sys.exit', 'sys.exit', ([], {}), '()\n', (11454, 11456), False, 'import sys\n'), ((737, 766), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(320)', '(0)', '(221)', '(41)'], {}), '(320, 0, 221, 41)\n', (749, 766), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((903, 932), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(60)', '(60)', '(751)', '(71)'], {}), '(60, 60, 751, 71)\n', (915, 932), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1073, 1104), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(200)', '(160)', '(261)', '(16)'], {}), '(200, 160, 261, 16)\n', (1085, 1104), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1250, 1281), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(480)', '(150)', '(151)', '(31)'], {}), '(480, 150, 151, 31)\n', (1262, 1281), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1491, 1522), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(330)', '(260)', '(131)', '(16)'], {}), '(330, 260, 131, 16)\n', (1503, 1522), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1672, 1703), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(480)', '(260)', '(151)', '(31)'], {}), '(480, 260, 151, 31)\n', (1684, 1703), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1919, 1950), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(300)', '(370)', '(161)', '(16)'], {}), '(300, 370, 161, 16)\n', (1931, 1950), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2100, 2131), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(480)', '(370)', '(151)', '(31)'], {}), '(480, 370, 151, 31)\n', (2112, 2131), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2352, 2383), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(480)', '(200)', '(151)', '(41)'], {}), '(480, 200, 151, 41)\n', (2364, 2383), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2628, 2659), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(310)', '(210)', '(141)', '(20)'], {}), '(310, 210, 141, 20)\n', (2640, 2659), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2800, 2831), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(130)', '(450)', '(371)', '(91)'], {}), '(130, 450, 371, 91)\n', (2812, 2831), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2981, 3012), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(480)', '(470)', '(151)', '(41)'], {}), '(480, 470, 151, 41)\n', (2993, 3012), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3231, 3262), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(110)', '(610)', '(361)', '(61)'], {}), '(110, 610, 361, 61)\n', (3243, 3262), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3417, 3447), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(510)', '(630)', '(51)', '(20)'], {}), '(510, 630, 51, 20)\n', (3429, 3447), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3676, 3706), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(570)', '(630)', '(51)', '(20)'], {}), '(570, 630, 51, 20)\n', (3688, 3706), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4000, 4031), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(520)', '(680)', '(113)', '(32)'], {}), '(520, 680, 113, 32)\n', (4012, 4031), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4254, 4285), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(350)', '(680)', '(113)', '(32)'], {}), '(350, 680, 113, 32)\n', (4266, 4285), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4511, 4542), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(480)', '(310)', '(151)', '(31)'], {}), '(480, 310, 151, 31)\n', (4523, 4542), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4758, 4789), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(290)', '(310)', '(171)', '(20)'], {}), '(290, 310, 171, 20)\n', (4770, 4789), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4939, 4970), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(480)', '(420)', '(151)', '(41)'], {}), '(480, 420, 151, 41)\n', (4951, 4970), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5191, 5222), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(210)', '(430)', '(261)', '(20)'], {}), '(210, 430, 261, 20)\n', (5203, 5222), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5380, 5411), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(475)', '(540)', '(173)', '(25)'], {}), '(475, 540, 173, 25)\n', (5392, 5411), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5620, 5651), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(210)', '(545)', '(261)', '(20)'], {}), '(210, 545, 261, 20)\n', (5632, 5651), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5796, 5827), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(170)', '(580)', '(261)', '(20)'], {}), '(170, 580, 261, 20)\n', (5808, 5827), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5989, 6019), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(510)', '(580)', '(51)', '(20)'], {}), '(510, 580, 51, 20)\n', (6001, 6019), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((6252, 6282), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(570)', '(580)', '(51)', '(20)'], {}), '(570, 580, 51, 20)\n', (6264, 6282), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((6494, 6521), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(0)', '(0)', '(881)', '(22)'], {}), '(0, 0, 881, 22)\n', (6506, 6521), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((11701, 11714), 'PyQt5.QtWidgets.QMessageBox', 'QMessageBox', ([], {}), '()\n', (11712, 11714), False, 'from PyQt5.QtWidgets import QMessageBox\n'), ((11353, 11385), 'os.path.join', 'os.path.join', (['"""dato_carpeta.csv"""'], {}), "('dato_carpeta.csv')\n", (11365, 11385), False, 'import os\n'), ((12393, 12424), 'os.path.exists', 'os.path.exists', (['carpetaPaciente'], {}), '(carpetaPaciente)\n', (12407, 12424), False, 'import os\n'), ((12442, 12467), 'os.mkdir', 'os.mkdir', (['carpetaPaciente'], {}), '(carpetaPaciente)\n', (12450, 12467), False, 'import os\n'), ((12494, 12545), 'os.path.join', 'os.path.join', (['carpetaPaciente', '"""datos_paciente.txt"""'], {}), "(carpetaPaciente, 'datos_paciente.txt')\n", (12506, 12545), False, 'import os\n')] |
"""blog URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path,include
from . import views
from django.conf import settings
#from posts.views import categories
from contact.views import contact
from search.views import search
urlpatterns = [
path('admin/', admin.site.urls),
path('',views.home,name='home'),
path('about/',views.about,name='about'),
path('logout/',views.logout,name='logout'),
#contact
path('contact/',contact,name='contact'),
#search
path('search/',search,name='search'),
#Blog
path('blog/',include('posts.urls')),
#CKEditor
path('ckeditor/', include('ckeditor_uploader.urls')),
]
if settings.DEBUG:
from django.conf.urls.static import static
urlpatterns += static(settings.STATIC_URL,document_root=settings.STATIC_ROOT)
urlpatterns += static(settings.MEDIA_URL,document_root=settings.MEDIA_ROOT)
| [
"django.conf.urls.static.static",
"django.urls.path",
"django.urls.include"
] | [((904, 935), 'django.urls.path', 'path', (['"""admin/"""', 'admin.site.urls'], {}), "('admin/', admin.site.urls)\n", (908, 935), False, 'from django.urls import path, include\n'), ((942, 975), 'django.urls.path', 'path', (['""""""', 'views.home'], {'name': '"""home"""'}), "('', views.home, name='home')\n", (946, 975), False, 'from django.urls import path, include\n'), ((980, 1021), 'django.urls.path', 'path', (['"""about/"""', 'views.about'], {'name': '"""about"""'}), "('about/', views.about, name='about')\n", (984, 1021), False, 'from django.urls import path, include\n'), ((1026, 1070), 'django.urls.path', 'path', (['"""logout/"""', 'views.logout'], {'name': '"""logout"""'}), "('logout/', views.logout, name='logout')\n", (1030, 1070), False, 'from django.urls import path, include\n'), ((1091, 1132), 'django.urls.path', 'path', (['"""contact/"""', 'contact'], {'name': '"""contact"""'}), "('contact/', contact, name='contact')\n", (1095, 1132), False, 'from django.urls import path, include\n'), ((1155, 1193), 'django.urls.path', 'path', (['"""search/"""', 'search'], {'name': '"""search"""'}), "('search/', search, name='search')\n", (1159, 1193), False, 'from django.urls import path, include\n'), ((1434, 1497), 'django.conf.urls.static.static', 'static', (['settings.STATIC_URL'], {'document_root': 'settings.STATIC_ROOT'}), '(settings.STATIC_URL, document_root=settings.STATIC_ROOT)\n', (1440, 1497), False, 'from django.conf.urls.static import static\n'), ((1517, 1578), 'django.conf.urls.static.static', 'static', (['settings.MEDIA_URL'], {'document_root': 'settings.MEDIA_ROOT'}), '(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)\n', (1523, 1578), False, 'from django.conf.urls.static import static\n'), ((1224, 1245), 'django.urls.include', 'include', (['"""posts.urls"""'], {}), "('posts.urls')\n", (1231, 1245), False, 'from django.urls import path, include\n'), ((1297, 1330), 'django.urls.include', 'include', (['"""ckeditor_uploader.urls"""'], {}), "('ckeditor_uploader.urls')\n", (1304, 1330), False, 'from django.urls import path, include\n')] |
#!/usr/bin/env python3
collatz_memo = {1: 1}
def collatz(n):
'''Return the length of the Collatz sequence for the given input'''
if n in collatz_memo:
result = collatz_memo[n]
elif n % 2 == 0:
collatz_memo[n] = 1 + collatz(n / 2)
result = collatz_memo[n]
else:
collatz_memo[n] = 1 + collatz(3 * n + 1)
result = collatz_memo[n]
return result
if __name__ == '__main__':
import argparse
desc = 'Return the collatz number for a given input'
parser = argparse.ArgumentParser(description=desc)
parser.add_argument('n', type=int, help='integer to use as input')
args = parser.parse_args()
print(collatz(args.n))
| [
"argparse.ArgumentParser"
] | [((524, 565), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': 'desc'}), '(description=desc)\n', (547, 565), False, 'import argparse\n')] |
import os
from airflow import configuration as conf
from flask_appbuilder.security.manager import AUTH_LDAP
basedir = os.path.abspath(os.path.dirname(__file__))
SQLALCHEMY_DATABASE_URI = conf.get('core', 'SQL_ALCHEMY_CONN')
CSRF_ENABLED = True
AUTH_TYPE = AUTH_LDAP
AUTH_ROLE_ADMIN = 'Admin'
AUTH_USER_REGISTRATION = True
AUTH_USER_REGISTRATION_ROLE = "Op"
AUTH_LDAP_SERVER = os.environ.get("AUTH_LDAP_SERVER")
AUTH_LDAP_SEARCH = os.environ.get("AUTH_LDAP_SEARCH")
AUTH_LDAP_BIND_USER = 'uid=ssaappuser,ou=serviceaccounts,dc=redhat,dc=com'
AUTH_LDAP_BIND_PASSWORD = os.environ.get("AUTH_LDAP_BIND_PASSWORD")
AUTH_LDAP_UID_FIELD = 'uid'
# LDAPS
AUTH_LDAP_USE_TLS = False
AUTH_LDAP_ALLOW_SELF_SIGNED = False
AUTH_LDAP_TLS_CACERTFILE = os.environ.get("AUTH_LDAP_TLS_CACERTFILE")
| [
"os.path.dirname",
"airflow.configuration.get",
"os.environ.get"
] | [((189, 225), 'airflow.configuration.get', 'conf.get', (['"""core"""', '"""SQL_ALCHEMY_CONN"""'], {}), "('core', 'SQL_ALCHEMY_CONN')\n", (197, 225), True, 'from airflow import configuration as conf\n'), ((381, 415), 'os.environ.get', 'os.environ.get', (['"""AUTH_LDAP_SERVER"""'], {}), "('AUTH_LDAP_SERVER')\n", (395, 415), False, 'import os\n'), ((435, 469), 'os.environ.get', 'os.environ.get', (['"""AUTH_LDAP_SEARCH"""'], {}), "('AUTH_LDAP_SEARCH')\n", (449, 469), False, 'import os\n'), ((571, 612), 'os.environ.get', 'os.environ.get', (['"""AUTH_LDAP_BIND_PASSWORD"""'], {}), "('AUTH_LDAP_BIND_PASSWORD')\n", (585, 612), False, 'import os\n'), ((739, 781), 'os.environ.get', 'os.environ.get', (['"""AUTH_LDAP_TLS_CACERTFILE"""'], {}), "('AUTH_LDAP_TLS_CACERTFILE')\n", (753, 781), False, 'import os\n'), ((135, 160), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (150, 160), False, 'import os\n')] |
import logging
from flask import Blueprint, abort
from flask import current_app as app
from flask import redirect, render_template, request, url_for
from itsdangerous import BadData, BadSignature, SignatureExpired, URLSafeSerializer
from structlog import wrap_logger
from response_operations_ui.common import token_decoder
from response_operations_ui.controllers import uaa_controller
from response_operations_ui.controllers.notify_controller import NotifyController
from response_operations_ui.exceptions.exceptions import NotifyError
from response_operations_ui.forms import ForgotPasswordForm, ResetPasswordForm
logger = wrap_logger(logging.getLogger(__name__))
BAD_AUTH_ERROR = "Unauthorized user credentials"
passwords_bp = Blueprint("passwords_bp", __name__, static_folder="static", template_folder="templates")
@passwords_bp.route("/forgot-password", methods=["GET"])
def get_forgot_password():
form = ForgotPasswordForm(request.form)
return render_template("forgot-password.html", form=form)
@passwords_bp.route("/forgot-password", methods=["POST"])
def post_forgot_password():
form = ForgotPasswordForm(request.form)
form.email_address.data = form.email_address.data.strip()
email = form.data.get("email_address")
if form.validate():
return send_password_change_email(email)
return render_template("forgot-password.html", form=form, email=email)
@passwords_bp.route("/forgot-password/check-email", methods=["GET"])
def forgot_password_check_email():
encoded_email = request.args.get("email")
if encoded_email is None:
logger.error("No email parameter supplied")
return redirect(url_for("passwords_bp.get_forgot_password"))
try:
email = URLSafeSerializer(app.config["SECRET_KEY"]).loads(encoded_email)
except BadSignature:
logger.error("Unable to decode email from URL", encoded_email=encoded_email)
abort(404)
return render_template("forgot-password-check-email.html", email=email)
@passwords_bp.route("/reset-password/<token>", methods=["GET"])
def get_reset_password(token, form_errors=None):
form = ResetPasswordForm(request.form)
try:
duration = app.config["EMAIL_TOKEN_EXPIRY"]
_ = token_decoder.decode_email_token(token, duration)
except SignatureExpired:
logger.warning("Token expired for Response Operations password reset", token=token)
return render_template("reset-password-expired.html", token=token)
except (BadSignature, BadData):
logger.warning("Invalid token sent to Response Operations password reset", token=token)
return render_template("reset-password-expired.html", token=token)
template_data = {"error": {"type": form_errors}, "token": token}
return render_template("reset-password.html", form=form, data=template_data)
@passwords_bp.route("/reset-password/<token>", methods=["POST"])
def post_reset_password(token):
form = ResetPasswordForm(request.form)
if not form.validate():
return get_reset_password(token, form_errors=form.errors)
password = request.form.get("password")
try:
duration = app.config["EMAIL_TOKEN_EXPIRY"]
email = token_decoder.decode_email_token(token, duration)
except SignatureExpired:
logger.warning("Token expired for Response Operations password reset", token=token)
return render_template("reset-password-expired.html", token=token)
except (BadSignature, BadData):
logger.warning("Invalid token sent to Response Operations password reset", token=token)
return render_template("reset-password-expired.html", token=token)
response = uaa_controller.change_user_password(email, password)
if response is not None:
if response.status_code == 200:
# 200 == All good
logger.info("Successfully changed user password", token=token)
send_confirm_change_email(email)
return redirect(url_for("passwords_bp.reset_password_confirmation"))
if response.status_code == 422:
# 422 == New password same as old password
logger.info("New password same as old password", token=token)
errors = {"password": ["Please choose a different password or login with the old password"]}
return get_reset_password(token, form_errors=errors)
logger.warning("Error changing password in UAA", token=token)
return render_template("reset-password-error.html")
@passwords_bp.route("/reset-password/confirmation", methods=["GET"])
def reset_password_confirmation():
return render_template("reset-password-confirmation.html")
@passwords_bp.route("/resend-password-email-expired-token/<token>", methods=["GET"])
def resend_password_email_expired_token(token):
email = token_decoder.decode_email_token(token)
return send_password_change_email(email)
def send_password_change_email(email):
url_safe_serializer = URLSafeSerializer(app.config["SECRET_KEY"])
response = uaa_controller.get_user_by_email(email)
if response is None:
return render_template("forgot-password-error.html")
if response["totalResults"] > 0:
first_name = response["resources"][0]["name"]["givenName"]
internal_url = app.config["RESPONSE_OPERATIONS_UI_URL"]
verification_url = f"{internal_url}/passwords/reset-password/{token_decoder.generate_email_token(email)}"
logger.info("Sending password change email", verification_url=verification_url)
personalisation = {"RESET_PASSWORD_URL": verification_url, "FIRST_NAME": first_name}
try:
NotifyController().request_to_notify(
email=email, template_name="request_password_change", personalisation=personalisation
)
except NotifyError as e:
logger.error("Error sending password change request email to Notify Gateway", msg=e.description)
return render_template("forgot-password-error.html")
logger.info("Successfully sent password change request email", email=url_safe_serializer.dumps(email))
else:
# We still want to render the template for an email without an account to avoid
# people fishing for valid emails
logger.info("Requested password reset for email not in UAA", email=url_safe_serializer.dumps(email))
return redirect(url_for("passwords_bp.forgot_password_check_email", email=url_safe_serializer.dumps(email)))
def send_confirm_change_email(email):
user = uaa_controller.get_user_by_email(email)
first_name = user["resources"][0]["name"]["givenName"]
if first_name != "":
personalisation = {"FIRST_NAME": first_name}
try:
NotifyController().request_to_notify(
email=email, template_name="confirm_password_change", personalisation=personalisation
)
except NotifyError as e:
# This shouldn't show the client an error - the password change was still successful.
# They just won't get a confirmation email
logger.error("Error sending password change confirmation email to Notify Gateway", msg=e.description)
| [
"logging.getLogger",
"flask.render_template",
"flask.request.args.get",
"response_operations_ui.common.token_decoder.generate_email_token",
"itsdangerous.URLSafeSerializer",
"flask.request.form.get",
"response_operations_ui.common.token_decoder.decode_email_token",
"flask.url_for",
"response_operati... | [((734, 827), 'flask.Blueprint', 'Blueprint', (['"""passwords_bp"""', '__name__'], {'static_folder': '"""static"""', 'template_folder': '"""templates"""'}), "('passwords_bp', __name__, static_folder='static', template_folder\n ='templates')\n", (743, 827), False, 'from flask import Blueprint, abort\n'), ((639, 666), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (656, 666), False, 'import logging\n'), ((920, 952), 'response_operations_ui.forms.ForgotPasswordForm', 'ForgotPasswordForm', (['request.form'], {}), '(request.form)\n', (938, 952), False, 'from response_operations_ui.forms import ForgotPasswordForm, ResetPasswordForm\n'), ((964, 1014), 'flask.render_template', 'render_template', (['"""forgot-password.html"""'], {'form': 'form'}), "('forgot-password.html', form=form)\n", (979, 1014), False, 'from flask import redirect, render_template, request, url_for\n'), ((1114, 1146), 'response_operations_ui.forms.ForgotPasswordForm', 'ForgotPasswordForm', (['request.form'], {}), '(request.form)\n', (1132, 1146), False, 'from response_operations_ui.forms import ForgotPasswordForm, ResetPasswordForm\n'), ((1338, 1401), 'flask.render_template', 'render_template', (['"""forgot-password.html"""'], {'form': 'form', 'email': 'email'}), "('forgot-password.html', form=form, email=email)\n", (1353, 1401), False, 'from flask import redirect, render_template, request, url_for\n'), ((1528, 1553), 'flask.request.args.get', 'request.args.get', (['"""email"""'], {}), "('email')\n", (1544, 1553), False, 'from flask import redirect, render_template, request, url_for\n'), ((1938, 2002), 'flask.render_template', 'render_template', (['"""forgot-password-check-email.html"""'], {'email': 'email'}), "('forgot-password-check-email.html', email=email)\n", (1953, 2002), False, 'from flask import redirect, render_template, request, url_for\n'), ((2129, 2160), 'response_operations_ui.forms.ResetPasswordForm', 'ResetPasswordForm', (['request.form'], {}), '(request.form)\n', (2146, 2160), False, 'from response_operations_ui.forms import ForgotPasswordForm, ResetPasswordForm\n'), ((2769, 2838), 'flask.render_template', 'render_template', (['"""reset-password.html"""'], {'form': 'form', 'data': 'template_data'}), "('reset-password.html', form=form, data=template_data)\n", (2784, 2838), False, 'from flask import redirect, render_template, request, url_for\n'), ((2949, 2980), 'response_operations_ui.forms.ResetPasswordForm', 'ResetPasswordForm', (['request.form'], {}), '(request.form)\n', (2966, 2980), False, 'from response_operations_ui.forms import ForgotPasswordForm, ResetPasswordForm\n'), ((3092, 3120), 'flask.request.form.get', 'request.form.get', (['"""password"""'], {}), "('password')\n", (3108, 3120), False, 'from flask import redirect, render_template, request, url_for\n'), ((3668, 3720), 'response_operations_ui.controllers.uaa_controller.change_user_password', 'uaa_controller.change_user_password', (['email', 'password'], {}), '(email, password)\n', (3703, 3720), False, 'from response_operations_ui.controllers import uaa_controller\n'), ((4440, 4484), 'flask.render_template', 'render_template', (['"""reset-password-error.html"""'], {}), "('reset-password-error.html')\n", (4455, 4484), False, 'from flask import redirect, render_template, request, url_for\n'), ((4602, 4653), 'flask.render_template', 'render_template', (['"""reset-password-confirmation.html"""'], {}), "('reset-password-confirmation.html')\n", (4617, 4653), False, 'from flask import redirect, render_template, request, url_for\n'), ((4801, 4840), 'response_operations_ui.common.token_decoder.decode_email_token', 'token_decoder.decode_email_token', (['token'], {}), '(token)\n', (4833, 4840), False, 'from response_operations_ui.common import token_decoder\n'), ((4953, 4996), 'itsdangerous.URLSafeSerializer', 'URLSafeSerializer', (["app.config['SECRET_KEY']"], {}), "(app.config['SECRET_KEY'])\n", (4970, 4996), False, 'from itsdangerous import BadData, BadSignature, SignatureExpired, URLSafeSerializer\n'), ((5013, 5052), 'response_operations_ui.controllers.uaa_controller.get_user_by_email', 'uaa_controller.get_user_by_email', (['email'], {}), '(email)\n', (5045, 5052), False, 'from response_operations_ui.controllers import uaa_controller\n'), ((6518, 6557), 'response_operations_ui.controllers.uaa_controller.get_user_by_email', 'uaa_controller.get_user_by_email', (['email'], {}), '(email)\n', (6550, 6557), False, 'from response_operations_ui.controllers import uaa_controller\n'), ((2235, 2284), 'response_operations_ui.common.token_decoder.decode_email_token', 'token_decoder.decode_email_token', (['token', 'duration'], {}), '(token, duration)\n', (2267, 2284), False, 'from response_operations_ui.common import token_decoder\n'), ((3199, 3248), 'response_operations_ui.common.token_decoder.decode_email_token', 'token_decoder.decode_email_token', (['token', 'duration'], {}), '(token, duration)\n', (3231, 3248), False, 'from response_operations_ui.common import token_decoder\n'), ((5093, 5138), 'flask.render_template', 'render_template', (['"""forgot-password-error.html"""'], {}), "('forgot-password-error.html')\n", (5108, 5138), False, 'from flask import redirect, render_template, request, url_for\n'), ((1661, 1704), 'flask.url_for', 'url_for', (['"""passwords_bp.get_forgot_password"""'], {}), "('passwords_bp.get_forgot_password')\n", (1668, 1704), False, 'from flask import redirect, render_template, request, url_for\n'), ((1915, 1925), 'flask.abort', 'abort', (['(404)'], {}), '(404)\n', (1920, 1925), False, 'from flask import Blueprint, abort\n'), ((2421, 2480), 'flask.render_template', 'render_template', (['"""reset-password-expired.html"""'], {'token': 'token'}), "('reset-password-expired.html', token=token)\n", (2436, 2480), False, 'from flask import redirect, render_template, request, url_for\n'), ((2628, 2687), 'flask.render_template', 'render_template', (['"""reset-password-expired.html"""'], {'token': 'token'}), "('reset-password-expired.html', token=token)\n", (2643, 2687), False, 'from flask import redirect, render_template, request, url_for\n'), ((3385, 3444), 'flask.render_template', 'render_template', (['"""reset-password-expired.html"""'], {'token': 'token'}), "('reset-password-expired.html', token=token)\n", (3400, 3444), False, 'from flask import redirect, render_template, request, url_for\n'), ((3592, 3651), 'flask.render_template', 'render_template', (['"""reset-password-expired.html"""'], {'token': 'token'}), "('reset-password-expired.html', token=token)\n", (3607, 3651), False, 'from flask import redirect, render_template, request, url_for\n'), ((1732, 1775), 'itsdangerous.URLSafeSerializer', 'URLSafeSerializer', (["app.config['SECRET_KEY']"], {}), "(app.config['SECRET_KEY'])\n", (1749, 1775), False, 'from itsdangerous import BadData, BadSignature, SignatureExpired, URLSafeSerializer\n'), ((3969, 4020), 'flask.url_for', 'url_for', (['"""passwords_bp.reset_password_confirmation"""'], {}), "('passwords_bp.reset_password_confirmation')\n", (3976, 4020), False, 'from flask import redirect, render_template, request, url_for\n'), ((5378, 5419), 'response_operations_ui.common.token_decoder.generate_email_token', 'token_decoder.generate_email_token', (['email'], {}), '(email)\n', (5412, 5419), False, 'from response_operations_ui.common import token_decoder\n'), ((5946, 5991), 'flask.render_template', 'render_template', (['"""forgot-password-error.html"""'], {}), "('forgot-password-error.html')\n", (5961, 5991), False, 'from flask import redirect, render_template, request, url_for\n'), ((5631, 5649), 'response_operations_ui.controllers.notify_controller.NotifyController', 'NotifyController', ([], {}), '()\n', (5647, 5649), False, 'from response_operations_ui.controllers.notify_controller import NotifyController\n'), ((6721, 6739), 'response_operations_ui.controllers.notify_controller.NotifyController', 'NotifyController', ([], {}), '()\n', (6737, 6739), False, 'from response_operations_ui.controllers.notify_controller import NotifyController\n')] |
import numpy as np
import matplotlib.pyplot as plt
""" E is in MeV, D in μm, vB in μm/h, τ in h, and k in (MeV)^-1 """
def E(D, τ=5, vB=2.66, k=.8, n=1.2, a=1, z=1):
return z**2*a*((2*τ*vB/D - 1)/k)**(1/n)
def D(E, τ=5, vB=2.66, k=.8, n=1.2, a=1, z=1):
return np.where(E > 0,
2*τ*vB/(1 + k*(E/(z**2*a))**n),
np.nan)
if __name__ == '__main__':
plt.rcParams.update({'font.family': 'serif', 'font.size': 14})
x = np.linspace(1, 16)
# for k, n in [(.849, .806), (.626, .867), (.651, .830), (.651, .779), (.868, 1.322)]:
# plt.plot(x, D(x, k=k, n=n), '-')
plt.plot(x, D(x, a=1, z=1), '-k', linewidth=3)
# print(x.min(), E(3), E(1.7), x.max())
# plt.fill_between([E(1.7), x.max()], [D(x.max()), D(x.max())], [1.7, 1.7], color='b', alpha=.2)
# plt.fill_between([E(3), x.min()], [3, 3], [D(x.min()), D(x.min())], color='r', alpha=.2)
# plt.title("Relationship between incident energy and track diameter")
plt.xlabel("Energy (MeV)")
plt.ylabel("Diameter (μm)")
plt.tight_layout()
plt.show()
| [
"matplotlib.pyplot.ylabel",
"numpy.where",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.rcParams.update",
"numpy.linspace",
"matplotlib.pyplot.tight_layout",
"matplotlib.pyplot.show"
] | [((263, 334), 'numpy.where', 'np.where', (['(E > 0)', '(2 * τ * vB / (1 + k * (E / (z ** 2 * a)) ** n))', 'np.nan'], {}), '(E > 0, 2 * τ * vB / (1 + k * (E / (z ** 2 * a)) ** n), np.nan)\n', (271, 334), True, 'import numpy as np\n'), ((353, 415), 'matplotlib.pyplot.rcParams.update', 'plt.rcParams.update', (["{'font.family': 'serif', 'font.size': 14}"], {}), "({'font.family': 'serif', 'font.size': 14})\n", (372, 415), True, 'import matplotlib.pyplot as plt\n'), ((421, 439), 'numpy.linspace', 'np.linspace', (['(1)', '(16)'], {}), '(1, 16)\n', (432, 439), True, 'import numpy as np\n'), ((917, 943), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Energy (MeV)"""'], {}), "('Energy (MeV)')\n", (927, 943), True, 'import matplotlib.pyplot as plt\n'), ((945, 972), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Diameter (μm)"""'], {}), "('Diameter (μm)')\n", (955, 972), True, 'import matplotlib.pyplot as plt\n'), ((974, 992), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (990, 992), True, 'import matplotlib.pyplot as plt\n'), ((994, 1004), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1002, 1004), True, 'import matplotlib.pyplot as plt\n')] |
import groupDNNClassifier as DNNC
import os
from datetime import datetime
import time
start = time.clock()
now = str(datetime.now()).split('.')[0].replace(" ", "_").replace(":", "_").replace("-", "_")
directory = "runs/" + now
learning_rate = 0.000013
if not os.path.exists(directory):
os.makedirs(directory)
for i in range(1, 2):
partial_start = time.clock()
print("Learning Rate: ",learning_rate*i)
DNNC.run(learning_rate*i, 2000, 20, 20, [10, 10, 10, 10], directory + '/')
partial_end = time.clock()
print("Iteration ", i, " time : ", (partial_end-partial_start)/60)
end = time.clock()
print("Total execution time: ",(end-start)/60)
| [
"os.path.exists",
"os.makedirs",
"time.clock",
"datetime.datetime.now",
"groupDNNClassifier.run"
] | [((95, 107), 'time.clock', 'time.clock', ([], {}), '()\n', (105, 107), False, 'import time\n'), ((612, 624), 'time.clock', 'time.clock', ([], {}), '()\n', (622, 624), False, 'import time\n'), ((263, 288), 'os.path.exists', 'os.path.exists', (['directory'], {}), '(directory)\n', (277, 288), False, 'import os\n'), ((294, 316), 'os.makedirs', 'os.makedirs', (['directory'], {}), '(directory)\n', (305, 316), False, 'import os\n'), ((360, 372), 'time.clock', 'time.clock', ([], {}), '()\n', (370, 372), False, 'import time\n'), ((424, 500), 'groupDNNClassifier.run', 'DNNC.run', (['(learning_rate * i)', '(2000)', '(20)', '(20)', '[10, 10, 10, 10]', "(directory + '/')"], {}), "(learning_rate * i, 2000, 20, 20, [10, 10, 10, 10], directory + '/')\n", (432, 500), True, 'import groupDNNClassifier as DNNC\n'), ((518, 530), 'time.clock', 'time.clock', ([], {}), '()\n', (528, 530), False, 'import time\n'), ((119, 133), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (131, 133), False, 'from datetime import datetime\n')] |
from math import floor
import scipy.io as sio
from bokeh.plotting import figure, show, output_file, save, ColumnDataSource
from bokeh.models import HoverTool, CrosshairTool, PanTool, WheelZoomTool, ResetTool, SaveTool, CustomJS
from bokeh.models.widgets import Button
from bokeh.layouts import widgetbox, row, column, gridplot
import matplotlib as plt
import matplotlib.cm as cm
import numpy as np
# Different ways of extracting the matlab files - or their varying structure standards
def extract_matlab(filename):
file = sio.loadmat(filename)
wave_key = ""
trig_key = "StimTrig"
# Due to naming convention variations, it must search for the right keys
schmitt = ["Schmitt", "Sch_wav"]
for i in schmitt:
if i in file.keys():
wave_key = i
if wave_key == "":
raise KeyError("Can't find the schmitt wave data")
# Extract data using keys
file_comments = file[wave_key][0][0][1][0]
wave_timestamp = file[wave_key][0][0][4] # Neuron pop - milliseconds since trigger
stim_timestamp = file[trig_key][0][0][4] # Stimulus time into experiment - seconds
stim_amplitude = file[trig_key][0][0][5] # Amplitude of particular stimulus time
raw_values = []
assorted_values = []
final_values = []
# Pair the amplitudes with stimulus time
# Note: An Amplitude of 62 occurs between sets, null value
for i in range(len(stim_timestamp)):
raw_values += [[float("%.6f" % stim_timestamp[i][0]), stim_amplitude[i][0]]]
# Calculates time difference between stimulus and pops for each group
# then sorts them into sorted_values, before moving onto next group
index = -1
pops = []
for j in wave_timestamp:
if index < len(raw_values) - 1:
if j > raw_values[index + 1][0]:
# Add set to sorted_values
if index > -1:
assorted_values.append([raw_values[index][0], raw_values[index][1], pops])
# Move to next set of values
index += 1
pops = []
if index > -1:
# Compute time difference in ms, add to pops list
difference = float("%.3f" % ((j - raw_values[index][0]) * 1000))
if difference <= 50:
pops += [difference]
# Add final set to sorted_values
assorted_values.append([raw_values[index][0], raw_values[index][1], pops])
# Collate and order assorted_values into final_values
# Each batch is separated by a None value in the final list
batch = [[] for i in range(10)]
for i in range(len(assorted_values)):
if assorted_values[i][1] == 62: # 62 separator
# Append sorted batch, followed by a None to separate batches
final_values += batch + [None]
else:
batch[assorted_values[i][1] - 1] = assorted_values[i]
return final_values
# Sorts the values in separate sections to list of plot-able coordinates
def vals_to_coords(vals):
values = []
coords = []
n = []
for i in vals:
if not i: # end row
values += [n]
n = []
else:
n += [i]
for i in range(len(values)):
for j in values[i]:
for k in j[2]:
coords += [(k, j[1]+(i/len(values)))]
return coords
# Graphing and plotting functions
def generate_graph(extracted_file=None, raw_file="", scatter=False, heatmap=False,
hm_width=250, hm_height=125, hm_radius=10, dot_size=0.06, widgets=False):
# Initialise basic plot data
plot_title = "Plot: "
scatter_plot = None
heatmap_plot = None
toggle_scatter = None
toggle_heatmap = None
if (extracted_file == None and raw_file != ""):
extracted_file = extract_matlab(raw_file)
coordinates = vals_to_coords(extracted_file)
print("data size: " + str(len(coordinates)))
# Process individual data points
n = []
x = []
y = []
for i in coordinates:
n.append(floor(i[1]))
x.append(i[0])
y.append(i[1] - 1)
# Configure hovertext for individual data points
data_source = ColumnDataSource(data=dict(
x=x,
y=y,
time=x,
amp=n
))
Hover = HoverTool(
tooltips=[
("time", "@time ms"),
("amplitude", "@amp")
],
names=["dots"]
)
# Determine plot title
if (scatter and heatmap):
plot_title = "Composite Plot: "
elif (scatter):
plot_title = "Scatter Plot: "
elif (heatmap):
plot_title = "Heatmap Plot: "
# Initialise plot figure
tools = [Hover, CrosshairTool(), PanTool(), WheelZoomTool(), ResetTool(), SaveTool()]
p = figure(tools=tools, title=plot_title + raw_file.split("/")[-1], plot_width=50, plot_height=10)
p.sizing_mode = "stretch_both"
p.border_fill_color = "whitesmoke"
p.min_border_left = 40
p.min_border_right = 40
p.min_border_bottom = 50
p.min_border_top = 20
p.xaxis.axis_label = "Time (ms)"
p.yaxis.axis_label = "Amplitude"
p.width = 160
p.height = 70
# Add graphs to plot -- note: the order is important for layering
if heatmap:
heatmap_plot = add_heatmap(p, coordinates, w=hm_width, h=hm_height, radius=hm_radius)
if scatter:
scatter_plot = p.scatter('x', 'y', radius=dot_size, fill_alpha=0.8,
line_color=None, color="black", source=data_source, name='dots')
# Add amplitude lines to plot
for i in range(11):
p.line((0, 50), (i, i), color="black", alpha=0.5)
# Widgets to toggle visibility of layers
if widgets:
if scatter:
toggle_scatter = Button(
label="Toggle Scatter Plot")
toggle_scatter.width = 100
toggle_scatter.js_on_click(CustomJS(args=dict(scatter_plot=scatter_plot),
code="scatter_plot.visible=!scatter_plot.visible"))
if heatmap:
toggle_heatmap = Button(
label="Toggle Heatmap")
toggle_heatmap.width = 100
toggle_heatmap.js_on_click(CustomJS(args=dict(heatmap_plot=heatmap_plot),
code="heatmap_plot.visible=!heatmap_plot.visible"))
# Return plot w/ widgets
return p, toggle_scatter, toggle_heatmap
def add_heatmap(p, coordinates, w=500, h=250, radius=10):
# TODO: OPTIMISE THE CIRCLE CODE (there has to be a quicker way)
raw = np.zeros((h, w))
# Plot circles
for pos in coordinates:
x_pos = floor((pos[1] - 1) / 10 * h)
y_pos = floor(pos[0] / 50 * w)
for i in range(-radius, radius + 1):
for j in range(-radius, radius + 1):
x_pos_2 = x_pos + i
y_pos_2 = y_pos + j
if x_pos_2 >= 0 and x_pos_2 < h:
if y_pos_2 >= 0 and y_pos_2 < w:
if i * i + j * j < radius * radius:
raw[x_pos_2, y_pos_2] += 1
# Generate colour map
colormap = cm.get_cmap("RdPu")
bokeh_palette = [plt.colors.rgb2hex(m) for m in colormap(np.arange(colormap.N))]
# Render image
heatmap = p.image(image=[raw], x=0, y=0, dw=50, dh=10, palette=bokeh_palette)
return heatmap
# Plotting for the website
def graph_single(file_name, widgets=True, width=500, height=250, radius=10, auto_open=False, dir=""):
plot = generate_graph(raw_file=file_name, scatter=True, heatmap=True, dot_size=0.07,
hm_width=width, hm_height=height, hm_radius=radius, widgets=widgets)
output_layout = plot[0]
file_dir = file_name.split("/")[0] + "/"
file_name = file_name.split("/")[-1]
if dir != "":
file_dir = dir
name = file_dir + file_name.replace('.mat', '') + '.html'
title = "Composite Plot: " + file_name
output_file(name, title)
if widgets:
doc_layout = column(
[plot[0],
row([widgetbox([plot[1], plot[2]], width=10)], height=50, sizing_mode="fixed")],
sizing_mode="scale_width")
output_layout = doc_layout
if auto_open:
show(output_layout)
else:
save(output_layout)
def graph_multiple(file_names, width=250, height=100, radius=5, auto_open=False, dir="", ncols=2):
file_dir = file_names[0].split("/")[0] + "/"
file_name_parts = []
plots = []
if dir != "":
file_dir = dir
# loop through files, adding to plot list
for file in file_names:
for part in file.split("/")[-1].replace('.mat','').split('_'):
if part not in file_name_parts:
file_name_parts.append(part)
p = generate_graph(raw_file=file, scatter=True, heatmap=True, dot_size=0.11,
hm_width=width, hm_height=height, hm_radius=radius, widgets=False)[0]
p.min_border_bottom = 20
p.min_border_left = 30
p.min_border_right = 30
p.height = 90
plots.append(p)
for i in range(ncols - (len(file_names)%ncols)):
plots.append(None)
# make into nice grid
plots_layout = []
i = 0
while i < len(plots):
plots_layout += [[[plots[i:i+ncols]]]]
i += ncols
# generate final layout
file_name = '_'.join(file_name_parts)
output_file(file_dir+file_name+".html", file_name)
output_layout = gridplot(plots, ncols=ncols, sizing_mode="scale_width", merge_tools=True)
if auto_open:
show(output_layout)
else:
save(output_layout)
return file_name+".html"
# --- TEMPORARY TESTING CODE; REMOVE IN FINAL BUILD --- #
if __name__ == '__main__':
print("Make sure you're running app.py if you want the web interface")
print("This code is just for testing functions\n")
#graph_single("temp/659607_rec03_all.mat", widgets=True, width=500, height=200, radius=9)
#graph_multiple(["temp/659602_rec03_all.mat", "temp/659602_rec03_f01.mat", "temp/659602_rec03_f02.mat", "temp/659602_rec03_f03.mat"], auto_open=True, ncols=2)
| [
"bokeh.plotting.show",
"bokeh.layouts.widgetbox",
"math.floor",
"numpy.arange",
"bokeh.models.SaveTool",
"scipy.io.loadmat",
"bokeh.plotting.save",
"bokeh.models.widgets.Button",
"bokeh.layouts.gridplot",
"numpy.zeros",
"matplotlib.colors.rgb2hex",
"bokeh.models.WheelZoomTool",
"bokeh.models... | [((533, 554), 'scipy.io.loadmat', 'sio.loadmat', (['filename'], {}), '(filename)\n', (544, 554), True, 'import scipy.io as sio\n'), ((4264, 4350), 'bokeh.models.HoverTool', 'HoverTool', ([], {'tooltips': "[('time', '@time ms'), ('amplitude', '@amp')]", 'names': "['dots']"}), "(tooltips=[('time', '@time ms'), ('amplitude', '@amp')], names=[\n 'dots'])\n", (4273, 4350), False, 'from bokeh.models import HoverTool, CrosshairTool, PanTool, WheelZoomTool, ResetTool, SaveTool, CustomJS\n'), ((6545, 6561), 'numpy.zeros', 'np.zeros', (['(h, w)'], {}), '((h, w))\n', (6553, 6561), True, 'import numpy as np\n'), ((7119, 7138), 'matplotlib.cm.get_cmap', 'cm.get_cmap', (['"""RdPu"""'], {}), "('RdPu')\n", (7130, 7138), True, 'import matplotlib.cm as cm\n'), ((7927, 7951), 'bokeh.plotting.output_file', 'output_file', (['name', 'title'], {}), '(name, title)\n', (7938, 7951), False, 'from bokeh.plotting import figure, show, output_file, save, ColumnDataSource\n'), ((9375, 9429), 'bokeh.plotting.output_file', 'output_file', (["(file_dir + file_name + '.html')", 'file_name'], {}), "(file_dir + file_name + '.html', file_name)\n", (9386, 9429), False, 'from bokeh.plotting import figure, show, output_file, save, ColumnDataSource\n'), ((9446, 9519), 'bokeh.layouts.gridplot', 'gridplot', (['plots'], {'ncols': 'ncols', 'sizing_mode': '"""scale_width"""', 'merge_tools': '(True)'}), "(plots, ncols=ncols, sizing_mode='scale_width', merge_tools=True)\n", (9454, 9519), False, 'from bokeh.layouts import widgetbox, row, column, gridplot\n'), ((4666, 4681), 'bokeh.models.CrosshairTool', 'CrosshairTool', ([], {}), '()\n', (4679, 4681), False, 'from bokeh.models import HoverTool, CrosshairTool, PanTool, WheelZoomTool, ResetTool, SaveTool, CustomJS\n'), ((4683, 4692), 'bokeh.models.PanTool', 'PanTool', ([], {}), '()\n', (4690, 4692), False, 'from bokeh.models import HoverTool, CrosshairTool, PanTool, WheelZoomTool, ResetTool, SaveTool, CustomJS\n'), ((4694, 4709), 'bokeh.models.WheelZoomTool', 'WheelZoomTool', ([], {}), '()\n', (4707, 4709), False, 'from bokeh.models import HoverTool, CrosshairTool, PanTool, WheelZoomTool, ResetTool, SaveTool, CustomJS\n'), ((4711, 4722), 'bokeh.models.ResetTool', 'ResetTool', ([], {}), '()\n', (4720, 4722), False, 'from bokeh.models import HoverTool, CrosshairTool, PanTool, WheelZoomTool, ResetTool, SaveTool, CustomJS\n'), ((4724, 4734), 'bokeh.models.SaveTool', 'SaveTool', ([], {}), '()\n', (4732, 4734), False, 'from bokeh.models import HoverTool, CrosshairTool, PanTool, WheelZoomTool, ResetTool, SaveTool, CustomJS\n'), ((6626, 6654), 'math.floor', 'floor', (['((pos[1] - 1) / 10 * h)'], {}), '((pos[1] - 1) / 10 * h)\n', (6631, 6654), False, 'from math import floor\n'), ((6671, 6693), 'math.floor', 'floor', (['(pos[0] / 50 * w)'], {}), '(pos[0] / 50 * w)\n', (6676, 6693), False, 'from math import floor\n'), ((7160, 7181), 'matplotlib.colors.rgb2hex', 'plt.colors.rgb2hex', (['m'], {}), '(m)\n', (7178, 7181), True, 'import matplotlib as plt\n'), ((8215, 8234), 'bokeh.plotting.show', 'show', (['output_layout'], {}), '(output_layout)\n', (8219, 8234), False, 'from bokeh.plotting import figure, show, output_file, save, ColumnDataSource\n'), ((8253, 8272), 'bokeh.plotting.save', 'save', (['output_layout'], {}), '(output_layout)\n', (8257, 8272), False, 'from bokeh.plotting import figure, show, output_file, save, ColumnDataSource\n'), ((9547, 9566), 'bokeh.plotting.show', 'show', (['output_layout'], {}), '(output_layout)\n', (9551, 9566), False, 'from bokeh.plotting import figure, show, output_file, save, ColumnDataSource\n'), ((9585, 9604), 'bokeh.plotting.save', 'save', (['output_layout'], {}), '(output_layout)\n', (9589, 9604), False, 'from bokeh.plotting import figure, show, output_file, save, ColumnDataSource\n'), ((4026, 4037), 'math.floor', 'floor', (['i[1]'], {}), '(i[1])\n', (4031, 4037), False, 'from math import floor\n'), ((5733, 5768), 'bokeh.models.widgets.Button', 'Button', ([], {'label': '"""Toggle Scatter Plot"""'}), "(label='Toggle Scatter Plot')\n", (5739, 5768), False, 'from bokeh.models.widgets import Button\n'), ((6060, 6090), 'bokeh.models.widgets.Button', 'Button', ([], {'label': '"""Toggle Heatmap"""'}), "(label='Toggle Heatmap')\n", (6066, 6090), False, 'from bokeh.models.widgets import Button\n'), ((7200, 7221), 'numpy.arange', 'np.arange', (['colormap.N'], {}), '(colormap.N)\n', (7209, 7221), True, 'import numpy as np\n'), ((8038, 8077), 'bokeh.layouts.widgetbox', 'widgetbox', (['[plot[1], plot[2]]'], {'width': '(10)'}), '([plot[1], plot[2]], width=10)\n', (8047, 8077), False, 'from bokeh.layouts import widgetbox, row, column, gridplot\n')] |
# Local imports
import datetime
# Third party
import pandas as pd
import pymysql
from sqlalchemy import create_engine
class PartsPalImport():
def __init__(self,name,user,host,port,password):
self.user = user
self.password = password
self.host = host
self.port = port
self.name = name
self.connection_results = self.connection_to_db(self.name,self.user,self.host,self.port,self.password)
def connection_to_db(self,name,user,host,port,password):
connection_string = 'mysql+pymysql://{}:{}@{}:{}/{}?charset=utf8mb4'.format(user, password, host, port, name)
try:
print('establishing connection...')
connection = create_engine(connection_string)
connection.connect()
except Exception:
raise("Error, can't establishing connection...")
else:
print ('No exception occurred')
return connection
def read_input_file(self,filename):
data=pd.read_csv(filename)
return data
def insert_date_to_column(self,data,day):
data['date'] = datetime.date.today()+datetime.timedelta(days=day)
return data
def import_to_sql(self,data,connection):
data.to_sql(name='import_excel_partsauthority', con=connection, if_exists = 'append', index=False)
# def main():
# # Database credentials
# NAME='excel_comparison'
# USER = 'root'
# PASSWORD = '<PASSWORD>'
# HOST = 'localhost'
# PORT = 3306
# IMPORTING CSV FILE
# data = read_input_file('prueba.csv')
# add_future_date(data,1)
# # IMPORTING DATA TO SQL
# connection=connection_to_db(USER,PASSWORD,HOST,PORT,NAME)
# import_to_sql(data,connection)
# if __name__ == "__main__":
# main() | [
"sqlalchemy.create_engine",
"datetime.date.today",
"pandas.read_csv",
"datetime.timedelta"
] | [((888, 909), 'pandas.read_csv', 'pd.read_csv', (['filename'], {}), '(filename)\n', (899, 909), True, 'import pandas as pd\n'), ((648, 680), 'sqlalchemy.create_engine', 'create_engine', (['connection_string'], {}), '(connection_string)\n', (661, 680), False, 'from sqlalchemy import create_engine\n'), ((986, 1007), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (1005, 1007), False, 'import datetime\n'), ((1008, 1036), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': 'day'}), '(days=day)\n', (1026, 1036), False, 'import datetime\n')] |
import utils
import vss
from crypto import G1, G2, add, neg, multiply, random_scalar, check_pairing
w3 = utils.connect()
contract = utils.deploy_contract('DKG')
def test_add():
a = multiply(G1, 5)
b = multiply(G1, 10)
s = add(a, b)
assert contract.bn128_add([a[0], a[1], b[0], b[1]]) == list(s)
def test_multiply():
assert G1 == (1, 2)
assert contract.bn128_multiply([1, 2, 5]) == list(multiply(G1, 5))
def test_check_pairing():
P1 = multiply(G1, 5)
Q1 = G2
Q2 = multiply(neg(G2), 5)
P2 = G1
assert check_pairing(P1, Q1, P2, Q2)
def test_verify_decryption_key():
sk1, sk2 = random_scalar(), random_scalar()
pk1, pk2 = multiply(G1, sk1), multiply(G1, sk2)
shared_key = vss.shared_key(sk1, pk2)
chal, resp = vss.shared_key_proof(sk1, pk2)
assert vss.dleq_verify(G1, pk1, pk2, shared_key, chal, resp)
assert contract.verify_decryption_key(
shared_key,
[chal, resp],
pk1,
pk2
)
def test_verify_sk_knowledge():
sk = random_scalar()
pk = multiply(G1, sk)
addr = w3.eth.accounts[0]
proof = vss.prove_sk_knowledge(sk, pk, addr)
assert vss.verify_sk_knowledge(pk, proof[0], proof[1], addr)
print("sk", sk)
print("pk", pk)
print("account", addr)
print("proof", proof)
assert contract.verify_sk_knowledge(pk, proof)
| [
"vss.verify_sk_knowledge",
"crypto.random_scalar",
"vss.prove_sk_knowledge",
"vss.dleq_verify",
"crypto.multiply",
"crypto.check_pairing",
"utils.deploy_contract",
"vss.shared_key_proof",
"crypto.neg",
"utils.connect",
"crypto.add",
"vss.shared_key"
] | [((107, 122), 'utils.connect', 'utils.connect', ([], {}), '()\n', (120, 122), False, 'import utils\n'), ((134, 162), 'utils.deploy_contract', 'utils.deploy_contract', (['"""DKG"""'], {}), "('DKG')\n", (155, 162), False, 'import utils\n'), ((189, 204), 'crypto.multiply', 'multiply', (['G1', '(5)'], {}), '(G1, 5)\n', (197, 204), False, 'from crypto import G1, G2, add, neg, multiply, random_scalar, check_pairing\n'), ((213, 229), 'crypto.multiply', 'multiply', (['G1', '(10)'], {}), '(G1, 10)\n', (221, 229), False, 'from crypto import G1, G2, add, neg, multiply, random_scalar, check_pairing\n'), ((238, 247), 'crypto.add', 'add', (['a', 'b'], {}), '(a, b)\n', (241, 247), False, 'from crypto import G1, G2, add, neg, multiply, random_scalar, check_pairing\n'), ((470, 485), 'crypto.multiply', 'multiply', (['G1', '(5)'], {}), '(G1, 5)\n', (478, 485), False, 'from crypto import G1, G2, add, neg, multiply, random_scalar, check_pairing\n'), ((551, 580), 'crypto.check_pairing', 'check_pairing', (['P1', 'Q1', 'P2', 'Q2'], {}), '(P1, Q1, P2, Q2)\n', (564, 580), False, 'from crypto import G1, G2, add, neg, multiply, random_scalar, check_pairing\n'), ((735, 759), 'vss.shared_key', 'vss.shared_key', (['sk1', 'pk2'], {}), '(sk1, pk2)\n', (749, 759), False, 'import vss\n'), ((777, 807), 'vss.shared_key_proof', 'vss.shared_key_proof', (['sk1', 'pk2'], {}), '(sk1, pk2)\n', (797, 807), False, 'import vss\n'), ((820, 873), 'vss.dleq_verify', 'vss.dleq_verify', (['G1', 'pk1', 'pk2', 'shared_key', 'chal', 'resp'], {}), '(G1, pk1, pk2, shared_key, chal, resp)\n', (835, 873), False, 'import vss\n'), ((1033, 1048), 'crypto.random_scalar', 'random_scalar', ([], {}), '()\n', (1046, 1048), False, 'from crypto import G1, G2, add, neg, multiply, random_scalar, check_pairing\n'), ((1058, 1074), 'crypto.multiply', 'multiply', (['G1', 'sk'], {}), '(G1, sk)\n', (1066, 1074), False, 'from crypto import G1, G2, add, neg, multiply, random_scalar, check_pairing\n'), ((1118, 1154), 'vss.prove_sk_knowledge', 'vss.prove_sk_knowledge', (['sk', 'pk', 'addr'], {}), '(sk, pk, addr)\n', (1140, 1154), False, 'import vss\n'), ((1166, 1219), 'vss.verify_sk_knowledge', 'vss.verify_sk_knowledge', (['pk', 'proof[0]', 'proof[1]', 'addr'], {}), '(pk, proof[0], proof[1], addr)\n', (1189, 1219), False, 'import vss\n'), ((516, 523), 'crypto.neg', 'neg', (['G2'], {}), '(G2)\n', (519, 523), False, 'from crypto import G1, G2, add, neg, multiply, random_scalar, check_pairing\n'), ((632, 647), 'crypto.random_scalar', 'random_scalar', ([], {}), '()\n', (645, 647), False, 'from crypto import G1, G2, add, neg, multiply, random_scalar, check_pairing\n'), ((649, 664), 'crypto.random_scalar', 'random_scalar', ([], {}), '()\n', (662, 664), False, 'from crypto import G1, G2, add, neg, multiply, random_scalar, check_pairing\n'), ((680, 697), 'crypto.multiply', 'multiply', (['G1', 'sk1'], {}), '(G1, sk1)\n', (688, 697), False, 'from crypto import G1, G2, add, neg, multiply, random_scalar, check_pairing\n'), ((699, 716), 'crypto.multiply', 'multiply', (['G1', 'sk2'], {}), '(G1, sk2)\n', (707, 716), False, 'from crypto import G1, G2, add, neg, multiply, random_scalar, check_pairing\n'), ((416, 431), 'crypto.multiply', 'multiply', (['G1', '(5)'], {}), '(G1, 5)\n', (424, 431), False, 'from crypto import G1, G2, add, neg, multiply, random_scalar, check_pairing\n')] |
import signal
from unittest.mock import MagicMock
from klutch.exit_handler import ExitHandler
def test_initial_state():
handler = ExitHandler()
assert not handler.safe_to_exit
assert not handler.should_exit
def test_traps_signals(monkeypatch):
mock_signal = MagicMock()
monkeypatch.setattr("signal.signal", mock_signal)
handler = ExitHandler()
assert len(mock_signal.call_args_list) == 2
assert {c[0][0] for c in mock_signal.call_args_list} == {
signal.SIGTERM,
signal.SIGINT,
}
assert all(c[0][1] == handler.handle_signal for c in mock_signal.call_args_list)
def test_delays_exit(monkeypatch):
mock_exit = MagicMock()
monkeypatch.setattr("sys.exit", mock_exit)
handler = ExitHandler()
handler.handle_signal(signal.SIGTERM, 0)
# should not exit outside safe_exit context
assert handler.should_exit
assert not handler.safe_to_exit
mock_exit.assert_not_called()
# should immediately exit when entering safe_exit context
with handler.safe_exit():
assert handler.safe_to_exit
mock_exit.assert_called_once_with(0)
def test_immediately_exits_if_safe(monkeypatch):
mock_exit = MagicMock()
monkeypatch.setattr("sys.exit", mock_exit)
handler = ExitHandler()
# when in safe_exit context, should immediately exit when receiving signal
with handler.safe_exit():
handler.handle_signal(signal.SIGTERM, 0)
mock_exit.assert_called_once_with(0)
| [
"klutch.exit_handler.ExitHandler",
"unittest.mock.MagicMock"
] | [((137, 150), 'klutch.exit_handler.ExitHandler', 'ExitHandler', ([], {}), '()\n', (148, 150), False, 'from klutch.exit_handler import ExitHandler\n'), ((279, 290), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (288, 290), False, 'from unittest.mock import MagicMock\n'), ((359, 372), 'klutch.exit_handler.ExitHandler', 'ExitHandler', ([], {}), '()\n', (370, 372), False, 'from klutch.exit_handler import ExitHandler\n'), ((674, 685), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (683, 685), False, 'from unittest.mock import MagicMock\n'), ((747, 760), 'klutch.exit_handler.ExitHandler', 'ExitHandler', ([], {}), '()\n', (758, 760), False, 'from klutch.exit_handler import ExitHandler\n'), ((1195, 1206), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (1204, 1206), False, 'from unittest.mock import MagicMock\n'), ((1268, 1281), 'klutch.exit_handler.ExitHandler', 'ExitHandler', ([], {}), '()\n', (1279, 1281), False, 'from klutch.exit_handler import ExitHandler\n')] |
from django.contrib import admin
from safe.models import PublicKey, Credential, UserSecret
class PublicKeyAdmin(admin.ModelAdmin):
raw_id_fields = ['user']
readonly_fields = ['created', 'modified']
list_display = ['user', 'created', 'modified']
class UserSecretInline(admin.StackedInline):
model = UserSecret
extra = 0
raw_id_fields = ['user']
readonly_fields = ['encrypted_secret', 'created', 'modified']
class CredentialAdmin(admin.ModelAdmin):
inlines = [UserSecretInline]
list_display = ['title', 'slug', 'tags', 'login_name', 'created', 'modified']
readonly_fields = ['created', 'modified']
admin.site.register(PublicKey, PublicKeyAdmin)
admin.site.register(Credential, CredentialAdmin)
| [
"django.contrib.admin.site.register"
] | [((646, 692), 'django.contrib.admin.site.register', 'admin.site.register', (['PublicKey', 'PublicKeyAdmin'], {}), '(PublicKey, PublicKeyAdmin)\n', (665, 692), False, 'from django.contrib import admin\n'), ((693, 741), 'django.contrib.admin.site.register', 'admin.site.register', (['Credential', 'CredentialAdmin'], {}), '(Credential, CredentialAdmin)\n', (712, 741), False, 'from django.contrib import admin\n')] |
# -*- coding: utf-8 -*-
import pytest
from dongsam.basic import *
from dongsam import basic
word_type = {
0: 'eng',
1: 'kor',
2: 'chi'
}
class TestBasic:
def test_get_key_from_value(self):
assert get_key_from_value(word_type, 'kor') == 1
def test_get_key_from_value_not_exist(self):
assert get_key_from_value(word_type, 'kor2') == None
def test_basic_get_key_from_value(self):
assert basic.get_key_from_value(word_type, 'kor') == 1
def test_basic_list_get_key_from_value(self):
assert basic.struct.get_key_from_value(word_type, 'kor') == 1 | [
"dongsam.basic.struct.get_key_from_value",
"dongsam.basic.get_key_from_value"
] | [((440, 482), 'dongsam.basic.get_key_from_value', 'basic.get_key_from_value', (['word_type', '"""kor"""'], {}), "(word_type, 'kor')\n", (464, 482), False, 'from dongsam import basic\n'), ((554, 603), 'dongsam.basic.struct.get_key_from_value', 'basic.struct.get_key_from_value', (['word_type', '"""kor"""'], {}), "(word_type, 'kor')\n", (585, 603), False, 'from dongsam import basic\n')] |
from maya import cmds
import pyblish.api
class CollectMayaAscii(pyblish.api.InstancePlugin):
"""Collect May Ascii Data
"""
order = pyblish.api.CollectorOrder + 0.2
label = 'Collect Model Data'
families = ["mayaAscii"]
def process(self, instance):
# Extract only current frame (override)
frame = cmds.currentTime(query=True)
instance.data["frameStart"] = frame
instance.data["frameEnd"] = frame
# make ftrack publishable
if instance.data.get('families'):
instance.data['families'].append('ftrack')
else:
instance.data['families'] = ['ftrack']
| [
"maya.cmds.currentTime"
] | [((341, 369), 'maya.cmds.currentTime', 'cmds.currentTime', ([], {'query': '(True)'}), '(query=True)\n', (357, 369), False, 'from maya import cmds\n')] |
#!/usr/bin/env python
import os, logging
from alignclf import set_log_clf
from alignclf.utils import setup_logging
logger = logging.getLogger(__name__)
if __name__ == '__main__':
setup_logging(default_path='')
datadir = os.path.join('.', 'data', 'synthetic', 'BPI2018', 'net5')
dirty_datadir = os.path.join(datadir, 'dirty')
clf_type = 'concept-name'
memory = 6
prom_dir = os.path.join('.', 'prom-nightly')
prom_pkg = os.path.join(prom_dir, 'ProM651_lib')
jar_name = 'set-log-classifier.jar'
jar_fpath = os.path.join(prom_dir, jar_name)
main_class = 'org.processmining.experiments.log.SetLogClassifier'
log_folders = [
'l1000',
'l2000',
'l5000'
]
for folder in log_folders:
dirpath = os.path.join(dirty_datadir, folder)
for f in os.listdir(dirpath):
log_fpath = os.path.join(dirpath, f)
log_name = f.strip('.xes.gz')
log_name_modified = log_name + '-' + folder
log_outpath = os.path.join(datadir, log_name_modified + '.xes.gz')
logger.info('Modified log name: {}'.format(log_name_modified))
logger.info('Log outpath: {}'.format(log_outpath))
set_log_clf.set_log_clf(log_fpath, clf_type, log_outpath, memory,
jar_fpath, main_class, prom_dir, prom_pkg)
| [
"logging.getLogger",
"os.listdir",
"os.path.join",
"alignclf.utils.setup_logging",
"alignclf.set_log_clf.set_log_clf"
] | [((128, 155), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (145, 155), False, 'import os, logging\n'), ((189, 219), 'alignclf.utils.setup_logging', 'setup_logging', ([], {'default_path': '""""""'}), "(default_path='')\n", (202, 219), False, 'from alignclf.utils import setup_logging\n'), ((235, 292), 'os.path.join', 'os.path.join', (['"""."""', '"""data"""', '"""synthetic"""', '"""BPI2018"""', '"""net5"""'], {}), "('.', 'data', 'synthetic', 'BPI2018', 'net5')\n", (247, 292), False, 'import os, logging\n'), ((313, 343), 'os.path.join', 'os.path.join', (['datadir', '"""dirty"""'], {}), "(datadir, 'dirty')\n", (325, 343), False, 'import os, logging\n'), ((405, 438), 'os.path.join', 'os.path.join', (['"""."""', '"""prom-nightly"""'], {}), "('.', 'prom-nightly')\n", (417, 438), False, 'import os, logging\n'), ((454, 491), 'os.path.join', 'os.path.join', (['prom_dir', '"""ProM651_lib"""'], {}), "(prom_dir, 'ProM651_lib')\n", (466, 491), False, 'import os, logging\n'), ((548, 580), 'os.path.join', 'os.path.join', (['prom_dir', 'jar_name'], {}), '(prom_dir, jar_name)\n', (560, 580), False, 'import os, logging\n'), ((778, 813), 'os.path.join', 'os.path.join', (['dirty_datadir', 'folder'], {}), '(dirty_datadir, folder)\n', (790, 813), False, 'import os, logging\n'), ((832, 851), 'os.listdir', 'os.listdir', (['dirpath'], {}), '(dirpath)\n', (842, 851), False, 'import os, logging\n'), ((877, 901), 'os.path.join', 'os.path.join', (['dirpath', 'f'], {}), '(dirpath, f)\n', (889, 901), False, 'import os, logging\n'), ((1026, 1078), 'os.path.join', 'os.path.join', (['datadir', "(log_name_modified + '.xes.gz')"], {}), "(datadir, log_name_modified + '.xes.gz')\n", (1038, 1078), False, 'import os, logging\n'), ((1231, 1343), 'alignclf.set_log_clf.set_log_clf', 'set_log_clf.set_log_clf', (['log_fpath', 'clf_type', 'log_outpath', 'memory', 'jar_fpath', 'main_class', 'prom_dir', 'prom_pkg'], {}), '(log_fpath, clf_type, log_outpath, memory, jar_fpath,\n main_class, prom_dir, prom_pkg)\n', (1254, 1343), False, 'from alignclf import set_log_clf\n')] |
import gpu
import numpy
from bgl import *
from . rectangle import Rectangle
from gpu_extras.batch import batch_for_shader
shader = gpu.shader.from_builtin('2D_UNIFORM_COLOR')
class InterpolationPreview:
def __init__(self, interpolation, position, width, resolution):
self.interpolation = interpolation
self.position = position
self.width = width
self.normalHeight = width
self.resolution = resolution
self.padding = 5
self.boundary = Rectangle()
self.samples = interpolation.sample(amount = resolution)
def calculateBoundaries(self):
minSample = self.samples.getMinValue()
maxSample = self.samples.getMaxValue()
bottomOvershoot = abs(min(0, minSample) * self.normalHeight)
topOvershoot = abs(max(0, maxSample - 1) * self.normalHeight)
x1 = self.position.x
x2 = x1 + self.width
y1 = self.position.y
y2 = y1 - self.normalHeight - bottomOvershoot - topOvershoot
self.boundary.resetPosition(x1, y1, x2, y2)
self.interpolationLeft = x1
self.interpolationRight = x2
self.interpolationTop = y1 - topOvershoot - self.padding
self.interpolationBottom = y2 + bottomOvershoot + self.padding
def getHeight(self):
return self.boundary.height
def draw(self, backgroundColor = (0.9, 0.9, 0.9, 0.6),
borderColor = (0.9, 0.76, 0.4, 1.0),
borderThickness = -1):
self.boundary.draw(
color = backgroundColor,
borderColor = borderColor,
borderThickness = borderThickness
)
self.drawInterpolationCurve()
self.drawRangeLines()
def drawInterpolationCurve(self):
left, right = self.interpolationLeft, self.interpolationRight
top, bottom = self.interpolationTop, self.interpolationBottom
x = numpy.linspace(left, right, self.resolution)
y = top + (self.samples.asNumpyArray() - 1) * (top - bottom)
points = numpy.stack((x, y), axis = -1).astype(numpy.float32)
batch = batch_for_shader(shader, 'LINE_STRIP', {"pos": points})
shader.bind()
shader.uniform_float("color", (0.2, 0.2, 0.2, 0.8))
glLineWidth(2)
glEnable(GL_BLEND)
glEnable(GL_LINE_SMOOTH)
batch.draw(shader)
glDisable(GL_LINE_SMOOTH)
glDisable(GL_BLEND)
glLineWidth(1)
def drawRangeLines(self):
points = (
(self.boundary.left, self.interpolationTop),
(self.boundary.right, self.interpolationTop),
(self.boundary.left, self.interpolationBottom),
(self.boundary.right, self.interpolationBottom))
batch = batch_for_shader(shader, 'LINES', {"pos": points})
shader.bind()
shader.uniform_float("color", (0.2, 0.2, 0.2, 0.5))
glLineWidth(1)
glEnable(GL_BLEND)
batch.draw(shader)
glDisable(GL_BLEND)
| [
"numpy.stack",
"gpu.shader.from_builtin",
"gpu_extras.batch.batch_for_shader",
"numpy.linspace"
] | [((132, 175), 'gpu.shader.from_builtin', 'gpu.shader.from_builtin', (['"""2D_UNIFORM_COLOR"""'], {}), "('2D_UNIFORM_COLOR')\n", (155, 175), False, 'import gpu\n'), ((1903, 1947), 'numpy.linspace', 'numpy.linspace', (['left', 'right', 'self.resolution'], {}), '(left, right, self.resolution)\n', (1917, 1947), False, 'import numpy\n'), ((2103, 2158), 'gpu_extras.batch.batch_for_shader', 'batch_for_shader', (['shader', '"""LINE_STRIP"""', "{'pos': points}"], {}), "(shader, 'LINE_STRIP', {'pos': points})\n", (2119, 2158), False, 'from gpu_extras.batch import batch_for_shader\n'), ((2740, 2790), 'gpu_extras.batch.batch_for_shader', 'batch_for_shader', (['shader', '"""LINES"""', "{'pos': points}"], {}), "(shader, 'LINES', {'pos': points})\n", (2756, 2790), False, 'from gpu_extras.batch import batch_for_shader\n'), ((2034, 2062), 'numpy.stack', 'numpy.stack', (['(x, y)'], {'axis': '(-1)'}), '((x, y), axis=-1)\n', (2045, 2062), False, 'import numpy\n')] |
from django.db import models
from django.db.models import Q
class PaperQuerySet(models.QuerySet):
def published(self):
"""Get all published items."""
return self.filter(Q(publish=True))
| [
"django.db.models.Q"
] | [((191, 206), 'django.db.models.Q', 'Q', ([], {'publish': '(True)'}), '(publish=True)\n', (192, 206), False, 'from django.db.models import Q\n')] |
#
# Copyright (c) 2020 Bitdefender
# SPDX-License-Identifier: Apache-2.0
#
import yaml
import struct
import os
import crc32
from options import get_options_for_os_version
from objects import CamiYAMLObject, CamiObject, CamiAtom, CamiDataTable, FilePointerException, get_all_objects
from common import IntrocoreVersion
from intro_defines import section_hints, defines, detour_args, version_any
class WinSupportedOs(CamiYAMLObject, CamiAtom):
min_intro_ver = IntrocoreVersion.min_version()
max_intro_ver = IntrocoreVersion.max_version()
yaml_tag = "!intro_update_win_supported_os"
"""
struct _CAMI_WIN_DESCRIPTOR
{
DWORD BuildNumber; // Buildnumber for this Windows OS
BOOLEAN Kpti; // If this OS has Kpti support.
BOOLEAN Is64; // If this OS is 64 bits.
WORD _Reserved1; // Alignment mostly, but may become useful.
QWORD MinIntroVersion; // Minimum introcore version which supports this OS
QWORD MaxIntroVersion; // Maximum introcore version which supports this OS
DWORD KmStructuresCount; // KM opaque fields count
DWORD KmStructuresTable; // KM opaque fields file pointer. (pointer to a CAMI_OPAQUE_STRUCTURE[] array
DWORD UmStructuresCount; // UM opaque fields count
DWORD UmStructuresTable; // UM opaque fields file pointer (pointer to a CAMI_OPAQUE_STRUCTURE[] array
DWORD FunctionCount; // Functions count
DWORD FunctionTable; // Functions file pointer. (pointer to a CAMI_WIN_FUNCTION[] array.
DWORD CustomProtectionOffset; // Protection flags for this os. (pointer to a CAMI_CUSTOM_PROTECTION struct)
DWORD VersionStringOffset;
DWORD _Reserved3;
DWORD _Reserved4;
}
"""
descriptor_layout = "<IBBHQQIIIIIIIIII"
def post_create(self, state):
if hasattr(self, "functions"):
self.functions = WinOsFunctionsTable(state["functions"])
def set_um_fields(self, um_fields_list):
""" Set the UM fields for this OS
We have to do this by hand because a set of um fields apply to a lot of supported OS versions.
This method will iterate the um_fields_list and find the suitable one for this OS.
Args:
um_fields_list: A list of WinOsUmFields.
Raises:
Exception: If multiple or none um_fields match this OS version.
"""
if hasattr(self, "um_fields"):
return
found = None
for um in um_fields_list:
if self.is_64 == um.is64 and self.build_number >= um.min_ver and self.build_number <= um.max_ver:
if found is not None:
raise Exception(
"Found duplicated UM fields for build_number %d, is_64: %r" % (self.build_number, self.is_64)
)
found = um.fields
if found is None:
raise Exception("Could not find um for build_number %d, is_64: %d" % (self.build_number, self.is_64))
self.um_fields = found
def set_functions(self, functions):
""" Set the functions for this OS
Given the list of functions, this method will filter it and will keep only the function with patterns and
arguments needed for this OS and will create the final form of the functions attribute a.k.a. a CamiDataTable instead
of a python list.
Args:
functions: A list of WinFunction
"""
if hasattr(self, "functions"):
return
funcs = WinOsFunctionsTable()
print("Functions for Windows OS {} (is 64: {})".format(self.build_number, self.is_64))
for function in functions:
new_func = function.get_function_for_os(self)
if new_func is not None:
funcs.add_entry(new_func)
print(
"\t- {} with {} patterns and arguments: {}".format(
new_func.name.ljust(30),
str(new_func.patterns.get_entry_count()).rjust(2),
new_func.arguments.args,
).expandtabs()
)
self.functions = funcs
def get_descriptor(self):
""" Generate the CamiDataTable entry for this OS version
Returns:
bytes: the CamiDataTable entry (a _CAMI_WIN_DESCRIPTOR structure)
Raises:
FilePointerException: If this method is called before generating its body with serialize()
"""
print(
"Windows OS {} (kpti: {}, 64: {})".format(
str(self.build_number).ljust(5), str(self.kpti_installed).ljust(5), str(self.is_64).ljust(5),
)
)
print("\t- Options: ", self.intro_options)
print("\t- Min intro version: ", self.min_intro_ver)
print("\t- Max intro version: ", self.max_intro_ver)
return struct.pack(
self.descriptor_layout,
self.build_number,
self.kpti_installed,
self.is_64,
0,
self.min_intro_ver.get_raw(),
self.max_intro_ver.get_raw(),
self.km_fields.get_entry_count(),
self.km_fields.get_file_pointer(),
self.um_fields.get_entry_count(),
self.um_fields.get_file_pointer(),
self.functions.get_entry_count(),
self.functions.get_file_pointer(),
self.intro_options.get_file_pointer(),
self.version_string.get_file_pointer(),
0,
0,
) # reserved
def serialize(self, start):
""" Generate the body of this OS in it's binary form.
Here we are also setting the functions and usermode fields if they are empty.
Args:
start: The offset in the file where this os body will be placed
Returns:
bytes: The body of this OS: um and km fields + functions
"""
self.intro_options = get_options_for_os_version((self.build_number, self.kpti_installed, self.is_64))
self.set_functions(get_all_objects(WinFunction))
self.set_um_fields(get_all_objects(WinOsUmFields))
data = self.km_fields.serialize(start)
data += self.um_fields.serialize(start + len(data))
data += self.functions.serialize(start + len(data))
data += self.intro_options.serialize(start + len(data))
data += self.version_string.serialize(start + len(data))
return data
class WinVersionString(CamiYAMLObject, CamiObject):
yaml_tag = "!intro_update_win_version_string"
descriptor_layout = "<Q{}sQ{}s".format(defines["MAX_VERSION_STRING_SIZE"], defines["MAX_VERSION_STRING_SIZE"])
def serialize(self, start):
self.set_file_pointer(start)
size = len(self.version_string) + 1
if size > (defines["MAX_VERSION_STRING_SIZE"] - 1):
raise Exception("String is too big!")
size_server = len(self.server_version_string) + 1
if size_server > (defines["MAX_VERSION_STRING_SIZE"] - 1):
raise Exception("String for server is too big!")
return struct.pack(
self.descriptor_layout,
size,
bytes(self.version_string, "utf-8"),
size_server,
bytes(self.server_version_string, "utf-8"),
)
class WinOsUmFields(CamiYAMLObject):
yaml_tag = "!intro_update_win_um_fields"
class WinFunction(CamiYAMLObject, CamiAtom):
yaml_tag = "!intro_update_win_function"
"""
struct _CAMI_WIN_FUNCTION
{
DWORD NameHash;
DWORD PatternsCount;
DWORD PatternsTable;
DWORD ArgumentsCount;
DWORD ArgumentsTable;
QWORD _Reserved1;
DWORD _Reserved2;
DWORD _Reserved3;
}
"""
g_patterns_list = []
descriptor_layout = "<IIIIIQII"
def __init__(self, other):
""" This is basically a copy constructor.
We don't use deepcopy because we don't want to duplicate the patterns or arguments
Args:
other: Another WinFunction object
Attributes:
name: The function name
patterns: A table* with the patterns for this function.
arguments: A WinFunctionArgument with the arguments for this function.
Notes:
* Depending on how the object was created, table could mean:
- A python list, if the object was created by the YAML loader. This is an intermediate form and should
be transformed in a CamiDataTable.
- A CamiDataTable, if the object was created by get_function_for_os()
"""
if type(self) != type(other):
raise Exception("Invalid object type sent to {} copy constructor: {}".format(type(self), type(other)))
self.__dict__.update(other.__dict__)
def post_create(self, state):
""" This is the YAML constructor
Args:
state: The YAML file in a dictionary form
"""
# We are doing this because some functions don't have custom arguments
if not hasattr(self, "arguments"):
self.arguments = []
def __eq__(self, other):
if type(self) != type(other):
raise Exception("Invalid comparison between %s and %s" % (type(self), type(other)))
# this is a rudimentary comparison but it's enough for our needs
return self.__dict__ == other.__dict__
def get_function_for_os(self, os):
""" Create another instance of this object which only contains patterns and arguments suitable for the given OS
This method will filter the attributes of this function and will create another object which
will contain only the patterns & arguments which are suitable for the given OS. This method should
be called for object which are in the intermediate form described above.
Args:
os: A SupportedOsWin object
Returns:
- Another instance of this object containing only the patterns and arguments needed by the given OS.
- None, if the functions has no patterns for the given OS or the function is for 64bits OSs and the OS is
a 32bits one (or vice versa).
Raises:
Exception: If there are multiple arguments for this OS. (Maybe we can shall our own exception class ?? )
"""
if self.guest64 != os.is_64:
return None
new_patterns = []
new_arguments = None
for pattern in self.patterns:
if os.build_number >= pattern.min_ver and os.build_number <= pattern.max_ver:
new_patterns.append(pattern)
for arguments in self.arguments:
if os.build_number >= arguments.min_ver and os.build_number <= arguments.max_ver:
if new_arguments is None:
new_arguments = arguments
else:
raise Exception("Found more arguments for function {}, 64: {}".format(self.name, self.guest64))
if len(new_patterns) == 0:
return None
new_patterns = sorted(new_patterns, key=lambda x: x.max_ver - x.min_ver)
new_function = WinFunction(self)
if new_arguments is None:
new_function.arguments = WinFunctionArgument()
else:
new_function.arguments = new_arguments
new_function.patterns = WinFunctionsPatternsTable()
new_function.patterns.set_entries(new_patterns)
try:
idx = self.g_patterns_list.index(new_function.patterns)
new_function.patterns = self.g_patterns_list[idx]
except ValueError:
self.g_patterns_list.append(new_function.patterns)
return new_function
def get_descriptor(self):
""" Generate the CamiDataTable entry for this function
Returns:
bytes: the CamiDataTable entry (a _CAMI_WIN_FUNCTION structure)
Raises:
FilePointerException: If this method is called before generating the binary form of its
code (with serialize)
"""
return struct.pack(
self.descriptor_layout,
crc32.crc32(self.name),
self.patterns.get_entry_count(),
self.patterns.get_file_pointer(),
self.arguments.get_count(),
self.arguments.get_file_pointer(),
0,
0,
0,
)
def serialize(self, start):
""" Generate the body of this function in it's binary form.
Get the binary form of this function's body by packing it's arguments and patterns.
Args:
start: The offset in the file where this function will be placed
Returns:
bytes: The body of this function containing the arguments and patterns
"""
data = self.arguments.serialize(start)
return data + self.patterns.serialize(start + len(data))
class WinFunctionPattern(CamiYAMLObject, CamiAtom):
yaml_tag = "!intro_update_win_pattern"
"""
struct _CAMI_WIN_PATTERN
{
CHAR SectionHint[8];
DWORD HashLength;
DWORD HashOffset;
DWORD _Reserved1;
DWORD _Reserved2;
}
"""
descriptor_layout = "<8sIIII"
def post_create(self, state):
""" The YAML constructor for this object
Args:
state: The YAML file in a dictionary form
"""
if self.min_ver in version_any.keys():
self.min_ver = version_any[self.min_ver]
if self.max_ver in version_any.keys():
self.max_ver = version_any[self.max_ver]
if self.section_hint is None:
self.section_hint = ""
def __eq__(self, other):
if type(self) != type(other):
raise Exception("Invalid comparison between %s and %s" % (type(self), type(other)))
# this is a rudimentary comparison but it's enough for our needs
return self.__dict__ == other.__dict__
def get_descriptor(self):
""" Generate the CamiDataTable entry for this pattern
Returns:
bytes: the CamiDataTable entry (a _CAMI_WIN_PATTERN structure)
Raises:
FilePointerException: If this method is called before generating the binary form
of the pattern code. (with serialize)
"""
return struct.pack(
self.descriptor_layout,
bytes(self.section_hint, "utf-8"),
self.pattern.get_count(),
self.pattern.get_file_pointer(),
0,
0,
)
def serialize(self, start):
""" Genereate the body of this pattern in it's binary form
Get the binary form of this pattern's body by packing it's code.
Args:
start: The offset in the file where this pattern will be placed
Returns:
bytes: The body of this pattern (the code)
"""
return self.pattern.serialize(start)
class WinFunctionArgument(CamiYAMLObject, CamiObject):
yaml_tag = "!intro_update_win_args"
def post_create(self, state):
if self.min_ver in version_any.keys():
self.min_ver = version_any[self.min_ver]
if self.max_ver in version_any.keys():
self.max_ver = version_any[self.max_ver]
def __init__(self):
""" Constructor for this object.
We need this for functions without custom arguments in order to simplify the code
Attributes:
min_ver: Minimum build_number required for this list of arguments
max_ver: Maximum build_number supported by this list of arguments
"""
self.args = []
def get_count(self):
""" Returns the length of the arguments list """
return len(self.args)
def get_binary(self):
""" Pack the arguments in a bytes object
We are doing this here (not in serialize) in order to simplify the code
Returns:
bytes: The arguments in a binary form (can be empty)
May raise KeyError if there are unknown arguments in the YAML file.
"""
c_struct = bytes()
# make sure we don't put more arguments than introcore could use
assert len(self.args) <= detour_args["DET_ARGS_MAX"]
for arg in self.args:
c_struct += struct.pack("<I", detour_args[arg])
return c_struct
def serialize(self, start):
""" Returns the bytes object of the arguments list
The return value can be an empty bytes() object if this list of arguments is already in the file
"""
try:
self.set_file_pointer(start)
except FilePointerException:
return bytes()
return self.get_binary()
class WinSupportedOsTable(CamiDataTable):
section_hint = section_hints["supported_os"] | section_hints["windows"]
entry_type = WinSupportedOs
def process_list(self):
self._entries.sort(key=lambda os: os.build_number)
class WinOsFunctionsTable(CamiDataTable):
# no section hint needed
entry_type = WinFunction
class WinFunctionsPatternsTable(CamiDataTable):
# no section hint needed
entry_type = WinFunctionPattern
| [
"common.IntrocoreVersion.max_version",
"objects.get_all_objects",
"intro_defines.version_any.keys",
"crc32.crc32",
"struct.pack",
"common.IntrocoreVersion.min_version",
"options.get_options_for_os_version"
] | [((463, 493), 'common.IntrocoreVersion.min_version', 'IntrocoreVersion.min_version', ([], {}), '()\n', (491, 493), False, 'from common import IntrocoreVersion\n'), ((514, 544), 'common.IntrocoreVersion.max_version', 'IntrocoreVersion.max_version', ([], {}), '()\n', (542, 544), False, 'from common import IntrocoreVersion\n'), ((6397, 6482), 'options.get_options_for_os_version', 'get_options_for_os_version', (['(self.build_number, self.kpti_installed, self.is_64)'], {}), '((self.build_number, self.kpti_installed, self.is_64)\n )\n', (6423, 6482), False, 'from options import get_options_for_os_version\n'), ((6505, 6533), 'objects.get_all_objects', 'get_all_objects', (['WinFunction'], {}), '(WinFunction)\n', (6520, 6533), False, 'from objects import CamiYAMLObject, CamiObject, CamiAtom, CamiDataTable, FilePointerException, get_all_objects\n'), ((6562, 6592), 'objects.get_all_objects', 'get_all_objects', (['WinOsUmFields'], {}), '(WinOsUmFields)\n', (6577, 6592), False, 'from objects import CamiYAMLObject, CamiObject, CamiAtom, CamiDataTable, FilePointerException, get_all_objects\n'), ((12690, 12712), 'crc32.crc32', 'crc32.crc32', (['self.name'], {}), '(self.name)\n', (12701, 12712), False, 'import crc32\n'), ((14018, 14036), 'intro_defines.version_any.keys', 'version_any.keys', ([], {}), '()\n', (14034, 14036), False, 'from intro_defines import section_hints, defines, detour_args, version_any\n'), ((14119, 14137), 'intro_defines.version_any.keys', 'version_any.keys', ([], {}), '()\n', (14135, 14137), False, 'from intro_defines import section_hints, defines, detour_args, version_any\n'), ((15701, 15719), 'intro_defines.version_any.keys', 'version_any.keys', ([], {}), '()\n', (15717, 15719), False, 'from intro_defines import section_hints, defines, detour_args, version_any\n'), ((15802, 15820), 'intro_defines.version_any.keys', 'version_any.keys', ([], {}), '()\n', (15818, 15820), False, 'from intro_defines import section_hints, defines, detour_args, version_any\n'), ((16907, 16942), 'struct.pack', 'struct.pack', (['"""<I"""', 'detour_args[arg]'], {}), "('<I', detour_args[arg])\n", (16918, 16942), False, 'import struct\n')] |
#!/usr/bin/env python2
from bs4 import BeautifulSoup
import urllib2
import json
import re
import time
config_siblings = [];
#Read data from config.json
with open('../../../data/links/config-siblings.json', 'r') as f1:
try:
config_siblings = json.load(f1)
# if the file is empty the ValueError will be thrown
except ValueError:
config_siblings = {}
#Function get all urls in this page
def get_urls(page_url):
def customize_links(link_list):
url_list = []
for link in link_list:
if "http" not in link['href']:
url_list.append(config_siblings['website_url'] + link['href'])
else:
url_list.append(link['href'])
#Return unique item in the list
return list(url_list)
p = urllib2.urlopen(page_url).read()
soup = BeautifulSoup(p);
soup.prettify();
# return customize_links(soup.findAll('a', href=True))
for pa in config_siblings['pages']:
if pa['pattern'] != '':
pa['siblings_urls'].extend(url for url in customize_links(soup.findAll(href=re.compile(pa['pattern']))) if url not in pa['siblings_urls'])
#def find_siblings(urls):
# for pa in config_siblings['pages']:
# [pa['siblings_urls'].append(u) for u in urls if pa['pattern'] != '' and u == pa['pattern'] and u not in pa['siblings_urls']]
#Step 1: Get_urls from the first page
#get_urls(config_siblings['pages'][0]['url']);
#Step 2: Loop all learned sibling urls from step 1
#runTime = 1
#minNumber = 10 * (runTime - 1)
#maxNumber = 10 * runTime
#for chance in range(minNumber, maxNumber):
# time.sleep(3);
# get_urls(config_siblings['pages'][1]['siblings_urls'][chance]);
#Step 3: Add detail /detail/ for all compnay urls
for idx, url in enumerate(config_siblings['pages'][2]['siblings_urls']):
url_merge = {};
url_merge['url'] = url;
url_merge['objects'] = [];
obj = {};
obj['detail'] = url + 'details/';
url_merge['objects'].append(obj);
config_siblings['pages'][2]['siblings_urls'][idx] = url_merge;
with open('../../../data/links/config-siblings.json', 'w') as f2:
json.dump(config_siblings, f2) | [
"urllib2.urlopen",
"re.compile",
"bs4.BeautifulSoup",
"json.load",
"json.dump"
] | [((845, 861), 'bs4.BeautifulSoup', 'BeautifulSoup', (['p'], {}), '(p)\n', (858, 861), False, 'from bs4 import BeautifulSoup\n'), ((2154, 2184), 'json.dump', 'json.dump', (['config_siblings', 'f2'], {}), '(config_siblings, f2)\n', (2163, 2184), False, 'import json\n'), ((254, 267), 'json.load', 'json.load', (['f1'], {}), '(f1)\n', (263, 267), False, 'import json\n'), ((801, 826), 'urllib2.urlopen', 'urllib2.urlopen', (['page_url'], {}), '(page_url)\n', (816, 826), False, 'import urllib2\n'), ((1108, 1133), 're.compile', 're.compile', (["pa['pattern']"], {}), "(pa['pattern'])\n", (1118, 1133), False, 'import re\n')] |
# This function takes a file that has labels and flowers name reads it and it
# then returns it as dictionary containing labels as index and flowers name as
# values
def label_mapping(filename):
# importing required python module to read .json file
import json
with open(filename, 'r') as f:
label_to_name = json.load(f)
# returns dictionary containing labels as index and flowers name as values
# so that can be used by the main function
return label_to_name
| [
"json.load"
] | [((330, 342), 'json.load', 'json.load', (['f'], {}), '(f)\n', (339, 342), False, 'import json\n')] |
# -*- coding: utf-8 -*-
"""
Data Definition File
"""
import astropy.units as u
from astropy.coordinates import SkyCoord
from .coords import OsirisInstrumentFrame
from ..coords import AstrometricFrame
class SpectrographParameters(object):
"""Spectrograph parameters"""
def __init__(self, filter, scale, itime, coadds):
super(SpectrographParameters, self).__init__()
self.filter = filter
self.scale = u.Quantity(scale, unit=u.arcsec/u.pix)
self.itime = u.Quantity(itime, unit=u.s)
self.coadds = int(coadds)
@classmethod
def parse(cls, xml):
"""Parse an XML tree for the dataset parameters."""
scale = float(xml.get('scale').split('"')[0])
return cls(filter=xml.get('filter'), scale=scale, itime=float(xml.get('itime')),
coadds=int(xml.get('coadds')))
class ImagerFrames(object):
"""An individual imager frame"""
def __init__(self, filter, itime, coadds, repeats):
super(ImagerFrames, self).__init__()
self.filter = filter
self.itime = u.Quantity(itime, unit=u.s)
self.coadds = int(coadds)
self.repeats = int(repeats)
@classmethod
def parse(cls, xml):
"""Parse an XML tree for the dataset parameters."""
return cls(filter=xml.get('filter'), itime=float(xml.get('itime')),
coadds=int(xml.get('coadds')), repeats=int(xml.get('repeats')))
class ImagerParameters(object):
"""Imager parameters"""
def __init__(self, frames, mode):
super(ImagerParameters, self).__init__()
self.frames = frames
self.mode = mode
@property
def enabled(self):
"""Is the imager enabled."""
return self.mode != "Disabled (Spec only)"
@classmethod
def parse(cls, xml):
"""Parse an XML tree for the dataset parameters."""
frames = [ ImagerFrames.parse(frame) for frame in xml.findall('imagFrame') ]
return cls(frames, mode=xml.get('mode'))
class DitherPosition(object):
"""A dither position"""
def __init__(self, position, sky=False):
super(DitherPosition, self).__init__()
self.position = position
self.sky = sky
class DitherPattern(object):
"""A collection of dither positions."""
def __init__(self, frame, positions, position_angle):
super(DitherPattern, self).__init__()
self.frame = frame
self.positions = positions
self.position_angle = position_angle
@property
def imager(self):
"""Get the imager positions."""
return SkyCoord([pos.position for pos in self.positions], frame=self.frame) + self.frame.imager
@property
def spectrograph(self):
"""Get the spectrograph positions."""
return SkyCoord([pos.position for pos in self.positions], frame=self.frame) + self.frame.spectrograph
@classmethod
def parse(cls, xml):
"""Parse the XML for a series of dither positions."""
coords = xml.get('coords')
position_angle = float(xml.get('skyPA')) * u.degree
units = u.Unit(xml.get('units'))
if coords == "sky":
frame = AstrometricFrame()
else:
frame = OsirisInstrumentFrame(pointing_origin="spec", rotation=position_angle)
positions = []
for position in xml.findall('ditherPosition'):
X = float(position.get('xOff')) * units
Y = float(position.get('yOff')) * units
sky = position.get('sky') == 'true'
positions.append(DitherPosition(SkyCoord(X=X, Y=Y, frame=frame), sky=sky))
return cls(frame, positions, position_angle)
class DatasetParameters(object):
"""Dataset parameters"""
def __init__(self, name, number, aomode, object, spec, imag, dithers):
super(DatasetParameters, self).__init__()
self.name = name
self.number = int(number)
self.aomode = aomode
self.object = object
self.spec = spec
self.imag = imag
self.dithers = dithers
@property
def laser(self):
"""Is this a laser mode?"""
return "LGS" in self.aomode
@classmethod
def parse(cls, xml):
"""Parse an XML tree for the dataset parameters."""
name = xml.get('name')
number = int(xml.get('setnum'))
aomode = xml.get('aomode')
obj = Object.parse(xml.find('object'))
spec = SpectrographParameters.parse(xml.find('spec'))
imag = ImagerParameters.parse(xml.find('imag'))
dither = DitherPattern.parse(xml.find('ditherPattern'))
return cls(name, number, aomode, obj, spec, imag, dither)
class Object(object):
"""Object parameters"""
def __init__(self, name):
super(Object, self).__init__()
self.name = name
@classmethod
def parse(cls, xml):
"""Parse an XML tree for the object parameter."""
return cls("".join(xml.itertext()))
class DataDefinitionFile(object):
"""A container for the data definition file format."""
def __init__(self, dataset):
super(DataDefinitionFile, self).__init__()
self.dataset = dataset
@classmethod
def from_file(cls, file):
"""Read the DDF from a file."""
import xml.etree.ElementTree as ET
return cls.parse(ET.parse(file))
@classmethod
def parse(cls, xml):
"""Parse the XML."""
ds = DatasetParameters.parse(xml.find('dataset'))
return cls(ds)
| [
"xml.etree.ElementTree.parse",
"astropy.coordinates.SkyCoord",
"astropy.units.Quantity"
] | [((434, 474), 'astropy.units.Quantity', 'u.Quantity', (['scale'], {'unit': '(u.arcsec / u.pix)'}), '(scale, unit=u.arcsec / u.pix)\n', (444, 474), True, 'import astropy.units as u\n'), ((494, 521), 'astropy.units.Quantity', 'u.Quantity', (['itime'], {'unit': 'u.s'}), '(itime, unit=u.s)\n', (504, 521), True, 'import astropy.units as u\n'), ((1067, 1094), 'astropy.units.Quantity', 'u.Quantity', (['itime'], {'unit': 'u.s'}), '(itime, unit=u.s)\n', (1077, 1094), True, 'import astropy.units as u\n'), ((2607, 2675), 'astropy.coordinates.SkyCoord', 'SkyCoord', (['[pos.position for pos in self.positions]'], {'frame': 'self.frame'}), '([pos.position for pos in self.positions], frame=self.frame)\n', (2615, 2675), False, 'from astropy.coordinates import SkyCoord\n'), ((2808, 2876), 'astropy.coordinates.SkyCoord', 'SkyCoord', (['[pos.position for pos in self.positions]'], {'frame': 'self.frame'}), '([pos.position for pos in self.positions], frame=self.frame)\n', (2816, 2876), False, 'from astropy.coordinates import SkyCoord\n'), ((5380, 5394), 'xml.etree.ElementTree.parse', 'ET.parse', (['file'], {}), '(file)\n', (5388, 5394), True, 'import xml.etree.ElementTree as ET\n'), ((3598, 3629), 'astropy.coordinates.SkyCoord', 'SkyCoord', ([], {'X': 'X', 'Y': 'Y', 'frame': 'frame'}), '(X=X, Y=Y, frame=frame)\n', (3606, 3629), False, 'from astropy.coordinates import SkyCoord\n')] |
# -*- coding: utf-8 -*-
# from django.contrib.auth.models import User
from django import forms
from models.models import AdvancedUser, CommentModel
import django.forms.widgets as widgets
class UserForm(forms.ModelForm):
username = forms.CharField(min_length=3, max_length=40)
password = forms.CharField(widget=forms.PasswordInput, min_length=5,
max_length=40)
repeat_password = forms.CharField(
widget=widgets.PasswordInput(attrs={
'placeholder': 'repeat password',
}),
min_length=5,
max_length=40,
required=True,
error_messages={
'required': 'please repeat password'
}
)
class Meta:
model = AdvancedUser
fields = ['username', 'password', 'repeat_password', 'email', 'avatar']
def clean(self):
password = self.cleaned_data.get("password")
repeat_password = self.cleaned_data.get("repeat_password")
if password and repeat_password and password != repeat_password:
raise forms.ValidationError("Passwords don't match")
username = self.cleaned_data.get("username")
user = AdvancedUser.objects.filter(username=username)
if user.exists():
raise forms.ValidationError("username already taken")
class Comment_form(forms.ModelForm):
class Meta:
model = CommentModel
fields = ['text']
| [
"models.models.AdvancedUser.objects.filter",
"django.forms.ValidationError",
"django.forms.CharField",
"django.forms.widgets.PasswordInput"
] | [((237, 281), 'django.forms.CharField', 'forms.CharField', ([], {'min_length': '(3)', 'max_length': '(40)'}), '(min_length=3, max_length=40)\n', (252, 281), False, 'from django import forms\n'), ((297, 369), 'django.forms.CharField', 'forms.CharField', ([], {'widget': 'forms.PasswordInput', 'min_length': '(5)', 'max_length': '(40)'}), '(widget=forms.PasswordInput, min_length=5, max_length=40)\n', (312, 369), False, 'from django import forms\n'), ((1176, 1222), 'models.models.AdvancedUser.objects.filter', 'AdvancedUser.objects.filter', ([], {'username': 'username'}), '(username=username)\n', (1203, 1222), False, 'from models.models import AdvancedUser, CommentModel\n'), ((455, 518), 'django.forms.widgets.PasswordInput', 'widgets.PasswordInput', ([], {'attrs': "{'placeholder': 'repeat password'}"}), "(attrs={'placeholder': 'repeat password'})\n", (476, 518), True, 'import django.forms.widgets as widgets\n'), ((1060, 1106), 'django.forms.ValidationError', 'forms.ValidationError', (['"""Passwords don\'t match"""'], {}), '("Passwords don\'t match")\n', (1081, 1106), False, 'from django import forms\n'), ((1267, 1314), 'django.forms.ValidationError', 'forms.ValidationError', (['"""username already taken"""'], {}), "('username already taken')\n", (1288, 1314), False, 'from django import forms\n')] |
from tkinter import ttk
from tkinter import *
import cv2
from tkinter.filedialog import *
root = Tk()
root.withdraw
o_file = askopenfilename(initialdir = os.getcwd(), title = "Select Image File", filetypes = (("jpg file", "*.jpg"), ("png file", "*.png"),("jpeg file", "*.jpeg"), ("All file", "*.*")))
image = cv2.imread(o_file)
grey_img = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
invert = cv2.bitwise_not(grey_img)
blur = cv2.GaussianBlur(invert, (21,21),0)
invertedblur = cv2.bitwise_not(blur)
sketch = cv2.divide(grey_img, invertedblur, scale=256.0)
keep = cv2.imwrite("sketch1.jpg", sketch)
cv2.imshow('sketch1.jpg',keep)
cv2.waitKey(0)
cv2.destroyAllWindows()
| [
"cv2.imwrite",
"cv2.divide",
"cv2.imshow",
"cv2.waitKey",
"cv2.destroyAllWindows",
"cv2.cvtColor",
"cv2.bitwise_not",
"cv2.GaussianBlur",
"cv2.imread"
] | [((310, 328), 'cv2.imread', 'cv2.imread', (['o_file'], {}), '(o_file)\n', (320, 328), False, 'import cv2\n'), ((340, 379), 'cv2.cvtColor', 'cv2.cvtColor', (['image', 'cv2.COLOR_BGR2GRAY'], {}), '(image, cv2.COLOR_BGR2GRAY)\n', (352, 379), False, 'import cv2\n'), ((389, 414), 'cv2.bitwise_not', 'cv2.bitwise_not', (['grey_img'], {}), '(grey_img)\n', (404, 414), False, 'import cv2\n'), ((422, 459), 'cv2.GaussianBlur', 'cv2.GaussianBlur', (['invert', '(21, 21)', '(0)'], {}), '(invert, (21, 21), 0)\n', (438, 459), False, 'import cv2\n'), ((473, 494), 'cv2.bitwise_not', 'cv2.bitwise_not', (['blur'], {}), '(blur)\n', (488, 494), False, 'import cv2\n'), ((504, 551), 'cv2.divide', 'cv2.divide', (['grey_img', 'invertedblur'], {'scale': '(256.0)'}), '(grey_img, invertedblur, scale=256.0)\n', (514, 551), False, 'import cv2\n'), ((559, 593), 'cv2.imwrite', 'cv2.imwrite', (['"""sketch1.jpg"""', 'sketch'], {}), "('sketch1.jpg', sketch)\n", (570, 593), False, 'import cv2\n'), ((594, 625), 'cv2.imshow', 'cv2.imshow', (['"""sketch1.jpg"""', 'keep'], {}), "('sketch1.jpg', keep)\n", (604, 625), False, 'import cv2\n'), ((625, 639), 'cv2.waitKey', 'cv2.waitKey', (['(0)'], {}), '(0)\n', (636, 639), False, 'import cv2\n'), ((640, 663), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (661, 663), False, 'import cv2\n')] |
import pytest
import os
import re
import json
from renamerename.executor.app import run, parse_args
class TestApp:
@pytest.fixture
def dir_files(self):
return set(['wintercourse_doc.tar.gz', 'img.jpeg',
'summercourse_doc.tar.gz', 'icon.png',
'fallcourse_doc.tar.gz', 'file.py',
'script.sh', '.gitignore'])
def test_basic_actions(self, dir_files, tmp_path):
# create files in dir
for file in dir_files:
with open(tmp_path / file, 'w') as f:
f.write("content...")
# create some unuseful dir
os.makedirs(tmp_path / "dir1")
with open(tmp_path / "dir1/something.txt", 'w') as f:
f.write("something_else")
# check that files are created
all_items = dir_files.union(set(["dir1"]))
assert set(os.listdir(tmp_path)) == all_items
assert set(os.listdir(tmp_path / "dir1")) == set(['something.txt'])
# run renaming
cli = f"--dir={tmp_path} --filter=*.tar.gz --prefix=foo_ --suffix=_bar --change-extension=.zip --save-renaming".split(" ")
args = parse_args(args=cli)
res = run(args=args)
assert res == 0
expected_files = set([
'foo_wintercourse_doc_bar.zip', 'img.jpeg',
'foo_summercourse_doc_bar.zip', 'icon.png',
'foo_fallcourse_doc_bar.zip', 'file.py',
'script.sh', '.gitignore'
])
expected_items = set(['dir1'])
expected_items.update(expected_files)
current_dir = set(os.listdir(tmp_path))
# check that new filenames were added
assert all([file in current_dir for file in expected_items])
# check that the new filenames replace the old ones
assert len(current_dir - expected_items) == 1 # the missing file is the renaming JSON file
# check that the renaming JSON file is valid
renaming_file = list(current_dir - expected_items)[0]
assert re.fullmatch(re.compile(r"renaming_\d{8}_\d{6}.json"), renaming_file) is not None
with open(tmp_path / renaming_file, 'r') as f:
renaming_content = json.loads(f.read())
assert renaming_content == {
"wintercourse_doc.tar.gz": "foo_wintercourse_doc_bar.zip",
"summercourse_doc.tar.gz": "foo_summercourse_doc_bar.zip",
"fallcourse_doc.tar.gz": "foo_fallcourse_doc_bar.zip"
}
# check if all files retain their contents
for file in expected_files:
with open(tmp_path / file, 'r') as f:
assert f.read() == "content..."
with open(tmp_path / "dir1/something.txt", 'r') as f:
assert f.read() == "something_else"
| [
"renamerename.executor.app.parse_args",
"os.listdir",
"os.makedirs",
"re.compile",
"renamerename.executor.app.run"
] | [((634, 664), 'os.makedirs', 'os.makedirs', (["(tmp_path / 'dir1')"], {}), "(tmp_path / 'dir1')\n", (645, 664), False, 'import os\n'), ((1156, 1176), 'renamerename.executor.app.parse_args', 'parse_args', ([], {'args': 'cli'}), '(args=cli)\n', (1166, 1176), False, 'from renamerename.executor.app import run, parse_args\n'), ((1191, 1205), 'renamerename.executor.app.run', 'run', ([], {'args': 'args'}), '(args=args)\n', (1194, 1205), False, 'from renamerename.executor.app import run, parse_args\n'), ((1589, 1609), 'os.listdir', 'os.listdir', (['tmp_path'], {}), '(tmp_path)\n', (1599, 1609), False, 'import os\n'), ((875, 895), 'os.listdir', 'os.listdir', (['tmp_path'], {}), '(tmp_path)\n', (885, 895), False, 'import os\n'), ((929, 958), 'os.listdir', 'os.listdir', (["(tmp_path / 'dir1')"], {}), "(tmp_path / 'dir1')\n", (939, 958), False, 'import os\n'), ((2031, 2072), 're.compile', 're.compile', (['"""renaming_\\\\d{8}_\\\\d{6}.json"""'], {}), "('renaming_\\\\d{8}_\\\\d{6}.json')\n", (2041, 2072), False, 'import re\n')] |
import base64
import secrets
import cryptography.hazmat.primitives.ciphers
import cryptography.hazmat.primitives.ciphers.algorithms
import cryptography.hazmat.primitives.ciphers.modes
import cryptography.hazmat.backends
'''
This module provides AES GCM based payload protection.
Flow:
1. aes_gcm_generate_key() to get JWT AES 256 GCM key
2. Deliver the key to a Javascript app, also store AES key on the server
3. Import the key in the Javascript
window.crypto.subtle.importKey(
"jwk",
jwt_aes_key,
{ name: "AES-GCM" },
false,
["encrypt", "decrypt"]
)
.then(function(key){
... use_the_key_here ...
})
.catch(function(err){
console.error(err);
});
4. Encrypt the payload in Javascript
const iv = window.crypto.getRandomValues(new Uint8Array(12));
window.crypto.subtle.encrypt(
{
name: "AES-GCM",
iv: iv,
// Optionally provide additionalData: ,
tagLength: 128,
},
key, //from importKey above
plaintext
)
.then(function(ciphertext){
axios.post('/api/...',
concatUint8ArrayAndArrayBuffer(iv, ciphertext),
)
}
5. Submit the payload to the server, don't send AES key with it!
5. Decrypt the payload on the server side
plaintext = aes_gcm_decrypt(key = key_from_generate, ciphertext)
'''
def aes_gcm_decrypt(key: bytes, ciphertext: bytes, associated_data: bytes = None) -> bytes:
'''
Decrypt the ciphertext that is encrypted by AES GCM.
'''
iv = ciphertext[:12]
message = ciphertext[12:-16]
tag = ciphertext[-16:]
# Construct a Cipher object, with the key, iv, and additionally the
# GCM tag used for authenticating the message.
decryptor = cryptography.hazmat.primitives.ciphers.Cipher(
cryptography.hazmat.primitives.ciphers.algorithms.AES(key),
cryptography.hazmat.primitives.ciphers.modes.GCM(iv, tag),
backend=cryptography.hazmat.backends.default_backend()
).decryptor()
# We put associated_data back in or the tag will fail to verify
# when we finalize the decryptor.
if associated_data is not None:
decryptor.authenticate_additional_data(associated_data)
# Decryption gets us the authenticated plaintext.
# If the tag does not match an InvalidTag exception will be raised.
return decryptor.update(message) + decryptor.finalize()
def aes_gcm_generate_key(key: bytes = None) -> dict:
'''
Generate JWT AES 256 GCM key.
'''
if key is None:
key = secrets.token_bytes(256 // 8)
# JWT key
return {
"kty": "oct",
"k": base64.urlsafe_b64encode(key).decode('ascii').rstrip('='),
"alg": "A256GCM",
"ext": True,
}
| [
"secrets.token_bytes",
"base64.urlsafe_b64encode"
] | [((2345, 2374), 'secrets.token_bytes', 'secrets.token_bytes', (['(256 // 8)'], {}), '(256 // 8)\n', (2364, 2374), False, 'import secrets\n'), ((2420, 2449), 'base64.urlsafe_b64encode', 'base64.urlsafe_b64encode', (['key'], {}), '(key)\n', (2444, 2449), False, 'import base64\n')] |
from PIL import Image
import sys
import torch
import preprocessing as pp
import vggTransfer
f_name = str(sys.argv[1])
device = torch.device("cuda")
vgg = vggTransfer.loadVgg(n_classes = 10)
if torch.cuda.is_available():
vgg = vgg.to(device)
vgg.load_state_dict(torch.load('model_11.pt'))
vgg.eval()
tform = pp.createDataTransforms()['val']
img = tform(Image.open(f_name))
if torch.cuda.is_available():
img = img.to(device)
img.unsqueeze_(0)
classes = ['gossiping',
'isolation',
'laughing',
'nonbullying',
'pullinghair',
'punching',
'quarrel',
'slapping',
'stabbing',
'strangle']
_, pred = torch.max(vgg(img), 1)
print(classes[pred]) | [
"vggTransfer.loadVgg",
"PIL.Image.open",
"preprocessing.createDataTransforms",
"torch.load",
"torch.cuda.is_available",
"torch.device"
] | [((131, 151), 'torch.device', 'torch.device', (['"""cuda"""'], {}), "('cuda')\n", (143, 151), False, 'import torch\n'), ((159, 192), 'vggTransfer.loadVgg', 'vggTransfer.loadVgg', ([], {'n_classes': '(10)'}), '(n_classes=10)\n', (178, 192), False, 'import vggTransfer\n'), ((198, 223), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (221, 223), False, 'import torch\n'), ((386, 411), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (409, 411), False, 'import torch\n'), ((271, 296), 'torch.load', 'torch.load', (['"""model_11.pt"""'], {}), "('model_11.pt')\n", (281, 296), False, 'import torch\n'), ((318, 343), 'preprocessing.createDataTransforms', 'pp.createDataTransforms', ([], {}), '()\n', (341, 343), True, 'import preprocessing as pp\n'), ((363, 381), 'PIL.Image.open', 'Image.open', (['f_name'], {}), '(f_name)\n', (373, 381), False, 'from PIL import Image\n')] |
import gmaneLegacy as g, importlib
dl=g.DownloadGmaneData('~/.gmane2/')
dl.downloadListIDS()
#dl.getDownloadedLists()
#dl.correctFilenames()
dl.cleanDownloadedLists()
| [
"gmaneLegacy.DownloadGmaneData"
] | [((38, 71), 'gmaneLegacy.DownloadGmaneData', 'g.DownloadGmaneData', (['"""~/.gmane2/"""'], {}), "('~/.gmane2/')\n", (57, 71), True, 'import gmaneLegacy as g, importlib\n')] |
#################################################################################
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. #
# #
# Licensed under the Apache License, Version 2.0 (the "License"). #
# You may not use this file except in compliance with the License. #
# You may obtain a copy of the License at #
# #
# http://www.apache.org/licenses/LICENSE-2.0 #
# #
# Unless required by applicable law or agreed to in writing, software #
# distributed under the License is distributed on an "AS IS" BASIS, #
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
# See the License for the specific language governing permissions and #
# limitations under the License. #
#################################################################################
"""A class for ROS Environment Side Channel."""
import abc
import numpy as np
import ude_ros_env.constants as const
from ude_ros_env.service_proxy_wrapper import ServiceProxyWrapper
from ude import (
AbstractSideChannel, SideChannelData,
BUILTIN_TYPE_TO_SIDE_CHANNEL_DATATYPE
)
from ude_ros_msgs.srv import (
UDESideChannelSrv,
UDESideChannelSrvRequest, UDESideChannelSrvResponse
)
import rospy
# Python 2 and 3 compatible Abstract class
ABC = abc.ABCMeta('ABC', (object,), {})
class ROSEnvironmentSideChannel(AbstractSideChannel):
"""
ROSEnvironmentSideChannel to be used in ROS Environment.
"""
def __init__(self):
"""
Initialize ROSEnvironmentSideChannel.
"""
super().__init__()
rospy.loginfo("[ROSEnvironmentSideChannel] initiating...")
# Create to_env service
self._to_env_service = rospy.Service(const.SideChannelServiceType.TO_ENV.value,
UDESideChannelSrv,
self.on_message_received)
# Create to_ude client.
rospy.loginfo("[ROSEnvironmentSideChannel] wait for to_ude service...")
self._send_msg_to_ude_cli = ServiceProxyWrapper(const.SideChannelServiceType.TO_UDE.value,
UDESideChannelSrv)
rospy.loginfo("[ROSEnvironmentSideChannel] to_ude service available...")
def on_message_received(self, request: UDESideChannelSrvRequest) -> UDESideChannelSrvResponse:
"""
Handle side channel message received from UDE.
Args:
request (UDESideChannelSrvRequest): side channel message
"""
key = request.key
data_type = request.data_type
try:
value = getattr(request.data, const.SIDE_CHANNEL_DATATYPE_TO_ROS_MSG_ATTR_MAP[data_type])
except KeyError:
raise TypeError("Not supported type: {}".format(data_type))
if data_type == const.SideChannelDataType.FLOAT_LIST.value:
value = list(value)
elif data_type == const.SideChannelDataType.BYTES.value:
value = bytes(value)
rospy.loginfo("[ROSEnvironmentSideChannel] on_message_received (key={}, value={})...".format(key,
str(value)))
self.store(key=key, value=value)
self.notify(key=key, value=value)
response = UDESideChannelSrvResponse()
return response
def _send(self, key: str, value: SideChannelData, store_local: bool = False) -> None:
"""
Send the side channel message to ROS Server
Args:
key (str): The string identifier of message
value (SideChannelData): The data of the message.
store_local (bool, optional): The flag whether to store locally or not.
"""
req = UDESideChannelSrvRequest()
req.key = key
if type(value).__module__ == np.__name__:
value = value.item()
try:
req.data_type = BUILTIN_TYPE_TO_SIDE_CHANNEL_DATATYPE[type(value)]
setattr(req.data, const.BUILTIN_TYPE_TO_ROS_MSG_ATTR_MAP[type(value)], value)
except KeyError:
raise TypeError("Not supported type: {}".format(type(value)))
self._send_msg_to_ude_cli(req)
| [
"ude_ros_msgs.srv.UDESideChannelSrvResponse",
"abc.ABCMeta",
"rospy.Service",
"ude_ros_env.service_proxy_wrapper.ServiceProxyWrapper",
"ude_ros_msgs.srv.UDESideChannelSrvRequest",
"rospy.loginfo"
] | [((1695, 1728), 'abc.ABCMeta', 'abc.ABCMeta', (['"""ABC"""', '(object,)', '{}'], {}), "('ABC', (object,), {})\n", (1706, 1728), False, 'import abc\n'), ((1992, 2050), 'rospy.loginfo', 'rospy.loginfo', (['"""[ROSEnvironmentSideChannel] initiating..."""'], {}), "('[ROSEnvironmentSideChannel] initiating...')\n", (2005, 2050), False, 'import rospy\n'), ((2115, 2220), 'rospy.Service', 'rospy.Service', (['const.SideChannelServiceType.TO_ENV.value', 'UDESideChannelSrv', 'self.on_message_received'], {}), '(const.SideChannelServiceType.TO_ENV.value, UDESideChannelSrv,\n self.on_message_received)\n', (2128, 2220), False, 'import rospy\n'), ((2348, 2419), 'rospy.loginfo', 'rospy.loginfo', (['"""[ROSEnvironmentSideChannel] wait for to_ude service..."""'], {}), "('[ROSEnvironmentSideChannel] wait for to_ude service...')\n", (2361, 2419), False, 'import rospy\n'), ((2456, 2541), 'ude_ros_env.service_proxy_wrapper.ServiceProxyWrapper', 'ServiceProxyWrapper', (['const.SideChannelServiceType.TO_UDE.value', 'UDESideChannelSrv'], {}), '(const.SideChannelServiceType.TO_UDE.value,\n UDESideChannelSrv)\n', (2475, 2541), False, 'from ude_ros_env.service_proxy_wrapper import ServiceProxyWrapper\n'), ((2602, 2674), 'rospy.loginfo', 'rospy.loginfo', (['"""[ROSEnvironmentSideChannel] to_ude service available..."""'], {}), "('[ROSEnvironmentSideChannel] to_ude service available...')\n", (2615, 2674), False, 'import rospy\n'), ((3737, 3764), 'ude_ros_msgs.srv.UDESideChannelSrvResponse', 'UDESideChannelSrvResponse', ([], {}), '()\n', (3762, 3764), False, 'from ude_ros_msgs.srv import UDESideChannelSrv, UDESideChannelSrvRequest, UDESideChannelSrvResponse\n'), ((4187, 4213), 'ude_ros_msgs.srv.UDESideChannelSrvRequest', 'UDESideChannelSrvRequest', ([], {}), '()\n', (4211, 4213), False, 'from ude_ros_msgs.srv import UDESideChannelSrv, UDESideChannelSrvRequest, UDESideChannelSrvResponse\n')] |
#!/usr/bin/env python3.5
import json
from json import JSONDecodeError
import sys
from collections import Counter
from urllib.parse import urlsplit
import argparse
lineno = 0
ctypes = Counter()
parser = argparse.ArgumentParser(description='Dump the raw web page content given a URL from the JSON on stdin')
parser.add_argument('url', help='URL to look for in the url property')
args = parser.parse_args()
for line in sys.stdin:
try:
o = json.loads(line)
except JSONDecodeError as err:
print('{0}: JSON parse error: {1}'.format(lineno, err), file=sys.stderr)
lineno += 1
if 'url' in o and o['url'] == args.url:
print(o['raw_content'])
break
| [
"collections.Counter",
"json.loads",
"argparse.ArgumentParser"
] | [((184, 193), 'collections.Counter', 'Counter', ([], {}), '()\n', (191, 193), False, 'from collections import Counter\n'), ((204, 312), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Dump the raw web page content given a URL from the JSON on stdin"""'}), "(description=\n 'Dump the raw web page content given a URL from the JSON on stdin')\n", (227, 312), False, 'import argparse\n'), ((451, 467), 'json.loads', 'json.loads', (['line'], {}), '(line)\n', (461, 467), False, 'import json\n')] |
import matplotlib.pyplot as plt
import numpy as np
import pyrealsense2 as rs
from src.utils.plots import _plot_depth_image_live
def generate_live_images():
pipe = rs.pipeline()
cfg = rs.config()
cfg.enable_stream(rs.stream.depth, 640, 480)
pipe.start(cfg)
try:
while True:
frameset = pipe.wait_for_frames()
depth_frame = frameset.get_depth_frame()
depth_image = np.array(depth_frame.get_data())
depth_image = depth_image[..., np.newaxis]
yield depth_image
finally:
pipe.stop()
def print_live_images(num=None):
generator = generate_live_images()
fig, ax = plt.subplots()
i = 0
while True:
if i == num:
break
i += 1
depth_image = next(generator)
_plot_depth_image_live(ax, depth_image)
def intrinsic_parameters():
"""
Retrieves camera's parameters for a depth stream of
shape (640, 480).
Returns
-------
intrinsics
max_distance
"""
pipe = rs.pipeline()
cfg = rs.config()
cfg.enable_stream(rs.stream.depth, 640, 480)
profile = pipe.start(cfg)
stream = profile.get_stream(rs.stream.depth).as_video_stream_profile()
intrinsics = stream.get_intrinsics()
depth_sensor = profile.get_device().first_depth_sensor()
max_distance = depth_sensor.get_option(rs.option.max_distance)
"""
fx = fy = 476.0068054199219
ppx, ppy = 313.6830139, 242.7547302
depth_units = 0.00012498664727900177
=> 1 mm / depth_units = 8.00085466544
D415
[ 640x480 p[313.79 238.076] f[592.138 592.138] <NAME> [0 0 0 0 0] ]
"""
pipe.stop()
return intrinsics, max_distance
if __name__ == '__main__':
# intrinsic_parameters()
print_live_images()
| [
"pyrealsense2.config",
"src.utils.plots._plot_depth_image_live",
"matplotlib.pyplot.subplots",
"pyrealsense2.pipeline"
] | [((170, 183), 'pyrealsense2.pipeline', 'rs.pipeline', ([], {}), '()\n', (181, 183), True, 'import pyrealsense2 as rs\n'), ((194, 205), 'pyrealsense2.config', 'rs.config', ([], {}), '()\n', (203, 205), True, 'import pyrealsense2 as rs\n'), ((671, 685), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (683, 685), True, 'import matplotlib.pyplot as plt\n'), ((1045, 1058), 'pyrealsense2.pipeline', 'rs.pipeline', ([], {}), '()\n', (1056, 1058), True, 'import pyrealsense2 as rs\n'), ((1069, 1080), 'pyrealsense2.config', 'rs.config', ([], {}), '()\n', (1078, 1080), True, 'import pyrealsense2 as rs\n'), ((813, 852), 'src.utils.plots._plot_depth_image_live', '_plot_depth_image_live', (['ax', 'depth_image'], {}), '(ax, depth_image)\n', (835, 852), False, 'from src.utils.plots import _plot_depth_image_live\n')] |
# Generated by Django 3.0 on 2019-12-16 19:59
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='List',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=60)),
],
),
migrations.CreateModel(
name='Tag',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=20)),
('color', models.CharField(blank=True, max_length=100)),
],
),
migrations.CreateModel(
name='Task',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=60)),
('completed', models.BooleanField(default=False)),
('deadline', models.DateTimeField(blank=True, null=True)),
('reminder_before_deadline', models.DurationField(blank=True, null=True)),
('list', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='todo.List')),
('tags', models.ManyToManyField(blank=True, to='todo.Tag')),
],
),
migrations.AddField(
model_name='list',
name='tags',
field=models.ManyToManyField(blank=True, to='todo.Tag'),
),
]
| [
"django.db.models.ForeignKey",
"django.db.models.DurationField",
"django.db.models.ManyToManyField",
"django.db.models.BooleanField",
"django.db.models.AutoField",
"django.db.models.DateTimeField",
"django.db.models.CharField"
] | [((1655, 1704), 'django.db.models.ManyToManyField', 'models.ManyToManyField', ([], {'blank': '(True)', 'to': '"""todo.Tag"""'}), "(blank=True, to='todo.Tag')\n", (1677, 1704), False, 'from django.db import migrations, models\n'), ((331, 424), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (347, 424), False, 'from django.db import migrations, models\n'), ((448, 479), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(60)'}), '(max_length=60)\n', (464, 479), False, 'from django.db import migrations, models\n'), ((608, 701), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (624, 701), False, 'from django.db import migrations, models\n'), ((725, 756), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(20)'}), '(max_length=20)\n', (741, 756), False, 'from django.db import migrations, models\n'), ((785, 829), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(100)'}), '(blank=True, max_length=100)\n', (801, 829), False, 'from django.db import migrations, models\n'), ((959, 1052), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (975, 1052), False, 'from django.db import migrations, models\n'), ((1076, 1107), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(60)'}), '(max_length=60)\n', (1092, 1107), False, 'from django.db import migrations, models\n'), ((1140, 1174), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (1159, 1174), False, 'from django.db import migrations, models\n'), ((1206, 1249), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (1226, 1249), False, 'from django.db import migrations, models\n'), ((1297, 1340), 'django.db.models.DurationField', 'models.DurationField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (1317, 1340), False, 'from django.db import migrations, models\n'), ((1368, 1446), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""todo.List"""'}), "(on_delete=django.db.models.deletion.CASCADE, to='todo.List')\n", (1385, 1446), False, 'from django.db import migrations, models\n'), ((1474, 1523), 'django.db.models.ManyToManyField', 'models.ManyToManyField', ([], {'blank': '(True)', 'to': '"""todo.Tag"""'}), "(blank=True, to='todo.Tag')\n", (1496, 1523), False, 'from django.db import migrations, models\n')] |
#!/usr/bin/python3
'''Advent of Code 2018 Day 15 tests'''
import unittest
import os
import sys
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
from aoc2018 import day13 # pylint: disable=wrong-import-position
class TestUM(unittest.TestCase):
'''Unit Tests'''
def test_day13part1(self) -> None:
'''Test part 1'''
with open('tests/day13test.txt', 'r') as f:
inputs = day13.readinputdata(f)
self.assertEqual(day13.runpart1(inputs), (7, 3))
def test_day13part2(self) -> None:
'''Test part 1'''
with open('tests/day13test2.txt', 'r') as f:
inputs = day13.readinputdata(f)
self.assertEqual(day13.runpart2(inputs), (6, 4))
| [
"os.path.dirname",
"aoc2018.day13.runpart2",
"aoc2018.day13.readinputdata",
"aoc2018.day13.runpart1"
] | [((143, 168), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (158, 168), False, 'import os\n'), ((437, 459), 'aoc2018.day13.readinputdata', 'day13.readinputdata', (['f'], {}), '(f)\n', (456, 459), False, 'from aoc2018 import day13\n'), ((485, 507), 'aoc2018.day13.runpart1', 'day13.runpart1', (['inputs'], {}), '(inputs)\n', (499, 507), False, 'from aoc2018 import day13\n'), ((657, 679), 'aoc2018.day13.readinputdata', 'day13.readinputdata', (['f'], {}), '(f)\n', (676, 679), False, 'from aoc2018 import day13\n'), ((705, 727), 'aoc2018.day13.runpart2', 'day13.runpart2', (['inputs'], {}), '(inputs)\n', (719, 727), False, 'from aoc2018 import day13\n')] |
# coding: utf-8
import pytest
from jinja2schema import InvalidExpression
from jinja2schema.core import infer
from jinja2schema.model import Dictionary
def test_items():
template = '''
{% macro selectinputdict(name, values, value=0, addemptyrow=false ,extrataginfo='') -%}
<select name="{{ name }}" id="{{ name }}" {{ extrataginfo }}>
{% if addemptyrow %}
<option></option>
{% endif %}
{% for k,v in values.items() %}
<option value="{{ k or '' }}" {{ 'selected' if value==k+1 }}>{{ v }}</option>
{% endfor %}
</select>
{%- endmacro %}
<td>{{ selectinputdict('priv_new_member', g.users, 5,true) }}</td>
'''
struct = infer(template)
expected_struct = Dictionary({
'g': Dictionary(label="g", data={
'users': Dictionary(label="users", data={}, linenos=[2, 12]),
}, linenos=[12]),
})
assert struct == expected_struct
def test_items_noarg():
template = '''
{% macro selectinputdict(name, values, value=0, addemptyrow=false ,extrataginfo='') -%}
<select name="{{ name }}" id="{{ name }}" {{ extrataginfo }}>
{% for k,v in values.items(456) %}
<option value="{{ k or '' }}" {{ 'selected' if value==k+1 }}>{{ v }}</option>
{% endfor %}
</select>
{%- endmacro %}
<td>{{ selectinputdict('priv_new_member', g.users, 5,true) }}</td>
'''
with pytest.raises(InvalidExpression):
infer(template)
| [
"jinja2schema.model.Dictionary",
"pytest.raises",
"jinja2schema.core.infer"
] | [((665, 680), 'jinja2schema.core.infer', 'infer', (['template'], {}), '(template)\n', (670, 680), False, 'from jinja2schema.core import infer\n'), ((1353, 1385), 'pytest.raises', 'pytest.raises', (['InvalidExpression'], {}), '(InvalidExpression)\n', (1366, 1385), False, 'import pytest\n'), ((1395, 1410), 'jinja2schema.core.infer', 'infer', (['template'], {}), '(template)\n', (1400, 1410), False, 'from jinja2schema.core import infer\n'), ((779, 830), 'jinja2schema.model.Dictionary', 'Dictionary', ([], {'label': '"""users"""', 'data': '{}', 'linenos': '[2, 12]'}), "(label='users', data={}, linenos=[2, 12])\n", (789, 830), False, 'from jinja2schema.model import Dictionary\n')] |
"""OAuth 2.0 WSGI server middleware providing MyProxy certificates as access tokens
"""
__author__ = "<NAME>"
__date__ = "12/12/11"
__copyright__ = "(C) 2011 Science and Technology Facilities Council"
__license__ = "BSD - see LICENSE file in top-level directory"
__contact__ = "<EMAIL>"
__revision__ = "$Id$"
from beaker.cache import CacheManager
from beaker.util import parse_cache_config_options
class RegisterBase(object):
"""
Base class for persistent registers. Entries are stored in a Beaker cache.
"""
def __init__(self, name, config):
cacheMgr = CacheManager(**parse_cache_config_options(config))
self.cache = cacheMgr.get_cache(name)
def set_value(self, key, value):
self.cache.put(key, value)
def get_value(self, key):
return self.cache.get(key)
def has_key(self, key):
return self.cache.has_key(key)
def parse_config(self, prefix, name, config):
base = ("%s.%s." % (prefix, name))
cache_opts = {
'cache.expire': config.get(base + 'expire', None),
'cache.type': config.get(base + 'type', 'file'),
'cache.data_dir': config.get(base + 'data_dir',
'/tmp/ndgoauth/cache/' + name),
'cache.lock_dir': config.get(base + 'lock_dir', None)
}
return cache_opts
| [
"beaker.util.parse_cache_config_options"
] | [((595, 629), 'beaker.util.parse_cache_config_options', 'parse_cache_config_options', (['config'], {}), '(config)\n', (621, 629), False, 'from beaker.util import parse_cache_config_options\n')] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Fri Apr 9 15:58:40 2021
@author: <NAME> <<EMAIL>>
Try to model data operations on plink files
"""
import io
import re
import csv
import logging
from pathlib import Path
from dataclasses import dataclass
from tqdm import tqdm
from mongoengine.errors import DoesNotExist
from plinkio import plinkfile
from .snpchimp import clean_chrom
from .smarterdb import (
VariantSheep, SampleSheep, Breed, Dataset, SmarterDBException, SEX,
VariantGoat, SampleGoat, Location, get_sample_type)
from .utils import TqdmToLogger
from .illumina import read_snpList, read_illuminaRow
# Get an instance of a logger
logger = logging.getLogger(__name__)
class CodingException(Exception):
pass
class PlinkIOException(Exception):
pass
class IlluminaReportException(Exception):
pass
@dataclass
class MapRecord():
chrom: str
name: str
cm: float
position: int
def __post_init__(self):
# types are annotations. So, enforce position type:
self.position = int(self.position)
self.cm = float(self.cm)
def get_reader(handle: io.TextIOWrapper):
logger.debug(f"Reading '{handle.name}' content")
sniffer = csv.Sniffer()
dialect = sniffer.sniff(handle.read(2048))
handle.seek(0)
return csv.reader(handle, dialect=dialect)
class SmarterMixin():
"""Common features of a Smarter related dataset file"""
_species = None
mapdata = list()
locations = list()
filtered = set()
VariantSpecies = None
SampleSpecies = None
chip_name = None
# this need to be set to the proper read genotype method
read_genotype_method = None
@property
def species(self):
return self._species
@species.setter
def species(self, species):
# determine the SampleClass
if species == 'Sheep':
self.VariantSpecies = VariantSheep
self.SampleSpecies = SampleSheep
elif species == 'Goat':
self.VariantSpecies = VariantGoat
self.SampleSpecies = SampleGoat
else:
raise NotImplementedError(
f"Species '{species}' not yet implemented"
)
self._species = species
def get_breed(self, fid, dataset, *args, **kwargs):
# this is a $elemMatch query
breed = Breed.objects(
aliases__match={'fid': fid, 'dataset': dataset}).get()
logger.debug(f"Found breed {breed}")
return breed
def get_country(self, dataset: Dataset, breed: Breed):
# this will be the default value
country = dataset.country
# search for country in my aliases
for alias in breed.aliases:
if alias.dataset != dataset:
continue
if alias.country:
# override country if defined
country = alias.country
return country
def update_mapfile(self, outputfile: str):
# helper function to get default value for cM
def get_cM(record):
"""Returns distance in cM or '0' (default for map file)"""
if hasattr(record, 'cm'):
return record.cm
return '0'
with open(outputfile, 'w') as handle:
writer = csv.writer(handle, delimiter=' ', lineterminator="\n")
for idx, record in enumerate(self.mapdata):
if idx in self.filtered:
logger.warning(f"Skipping {record}: not in database")
continue
# get a location relying on indexes
location = self.locations[idx]
# get the tracked variant name relying on indexes
variant_name = self.variants_name[idx]
# a new record in mapfile
writer.writerow([
clean_chrom(location.chrom),
variant_name,
get_cM(record),
location.position
])
def _deal_with_relationship(self, line: list, dataset: Dataset):
# deal with special items
sex = None
father_id = None
mother_id = None
# test with sex column
if int(line[4]) in [1, 2]:
sex = SEX(int(line[4]))
# test with father id
if str(line[2]) != '0':
qs = self.SampleSpecies.objects(
original_id=line[2], dataset=dataset)
if qs.count() == 1:
father_id = qs.get()
# test with mother id
if str(line[3]) != '0':
qs = self.SampleSpecies.objects(
original_id=line[3], dataset=dataset)
if qs.count() == 1:
mother_id = qs.get()
return sex, father_id, mother_id
def get_sample(
self,
line: list,
dataset: Dataset,
sample_field: str = "original_id"):
"""Get a registered sample from database"""
# search for sample in database
qs = self.SampleSpecies.objects(
dataset=dataset,
**{sample_field: line[1]}
)
sex, father_id, mother_id = self._deal_with_relationship(
line, dataset)
# this will be the sample I will return
sample = None
if qs.count() == 1:
logger.debug(f"Sample '{line[1]}' found in database")
sample = qs.get()
# update records if necessary
if sample.father_id != father_id or sample.mother_id != mother_id:
logger.warning(f"Update relationships for sample '{line[1]}'")
sample.father_id = father_id
sample.mother_id = mother_id
sample.save()
elif qs.count() == 0:
logger.warning(f"Sample '{line[1]}' not found in database")
else:
raise SmarterDBException(
f"Got {qs.count()} results for '{line[1]}'")
return sample
def get_or_create_sample(self, line: list, dataset: Dataset, breed: Breed):
"""Get a sample from database or create a new one"""
# search for sample in database
qs = self.SampleSpecies.objects(
original_id=line[1], dataset=dataset)
sex, father_id, mother_id = self._deal_with_relationship(
line, dataset)
if qs.count() == 1:
logger.debug(f"Sample '{line[1]}' found in database")
sample = qs.get()
# update records if necessary
if sample.father_id != father_id or sample.mother_id != mother_id:
logger.warning(f"Update relationships for sample '{line[1]}'")
sample.father_id = father_id
sample.mother_id = mother_id
sample.save()
elif qs.count() == 0:
# do I have a multi country dataset?
country = self.get_country(dataset, breed)
# test if foreground or background dataset
type_ = get_sample_type(dataset)
# insert sample into database
logger.info(f"Registering sample '{line[1]}' in database")
sample = self.SampleSpecies(
original_id=line[1],
country=country,
species=dataset.species,
breed=breed.name,
breed_code=breed.code,
dataset=dataset,
type_=type_,
chip_name=self.chip_name,
sex=sex,
father_id=father_id,
mother_id=mother_id
)
sample.save()
# incrementing breed n_individuals counter
breed.n_individuals += 1
breed.save()
else:
raise SmarterDBException(
f"Got {qs.count()} results for '{line[1]}'")
return sample
def fetch_coordinates(
self, version: str, imported_from: str,
search_field: str = "name"):
"""Search for variants in smarter database
Args:
version (str): the Location.version attribute
imported_from (str): the Location.imported_from attribute
search_field (str): search variant by field (def. "name")
"""
# reset meta informations
self.locations = list()
self.filtered = set()
self.variants_name = list()
# this is required to search with the desidered coordinate system
# relying on mongodb elemMatch and projection
coordinate_system = {
"imported_from": imported_from,
"version": version
}
tqdm_out = TqdmToLogger(logger, level=logging.INFO)
for idx, record in enumerate(tqdm(
self.mapdata, file=tqdm_out, mininterval=1)):
try:
# TODO: remember to project illumina_top if it become
# a VariantSpecies attribute
variant = self.VariantSpecies.objects(
locations__match=coordinate_system,
**{search_field: record.name}
).fields(
elemMatch__locations=coordinate_system,
name=1,
rs_id=1
).get()
except DoesNotExist as e:
logger.warning(
f"Couldn't find {record.name} in {coordinate_system}"
f" assembly: {e}")
# skip this variant (even in ped)
self.filtered.add(idx)
# need to add an empty value in locations (or my indexes
# won't work properly). The same for variants name
self.locations.append(None)
self.variants_name.append(None)
# don't check location for missing SNP
continue
# using projection I will have only one location if I could
# find a SNP
location = variant.locations[0]
# track data for this location
self.locations.append(location)
# track variant.name read from database (useful when searching
# using probeset_id)
self.variants_name.append(variant.name)
logger.debug(
f"collected {len(self.locations)} in '{version}' coordinates")
def _to_top(
self, index: int, genotype: list, coding: str,
location: Location) -> list:
"""
Check genotype with coding and returns illumina_top alleles
Parameters
----------
index: int
The i-th SNP received
genotype : list
The genotype as a list (ex: ['T', 'C'])
coding : str
The coding input type ('top', 'forward', ...)
location : Location
A smarterdb location used to check input genotype and coding and
to return the corresponing illumina top genotype (ex ['A', 'G'])
Raises
------
CodingException
Raised when input genotype hasn't a match in the smarter database
with the provided coding
NotImplementedError
A coding format not yet supported (implemented)
Returns
-------
list
The illumina top genotype as a list (ex ['A', 'G'])
"""
# for semplicity
a1, a2 = genotype
# the returned value
top_genotype = []
# TODO: coding need to be a dataset attribute
if coding == 'top':
if not location.is_top(genotype):
logger.critical(
f"Error for SNP {index}:{self.mapdata[index].name}: "
f"{a1}/{a2} <> {location.illumina_top}"
)
raise CodingException("Not illumina top format")
# allele coding is the same received as input
top_genotype = genotype
elif coding == 'forward':
if not location.is_forward(genotype):
logger.critical(
f"Error for SNP {index}:{self.mapdata[index].name}: "
f"{a1}/{a2} <> {location.illumina_forward}"
)
raise CodingException("Not illumina forward format")
# change the allele coding
top_genotype = location.forward2top(genotype)
elif coding == 'ab':
if not location.is_ab(genotype):
logger.critical(
f"Error for SNP {index}:{self.mapdata[index].name}: "
f"{a1}/{a2} <> A/B"
)
raise CodingException("Not illumina ab format")
# change the allele coding
top_genotype = location.ab2top(genotype)
elif coding == 'affymetrix':
if not location.is_affymetrix(genotype):
logger.critical(
f"Error for SNP {index}:{self.mapdata[index].name}: "
f"{a1}/{a2} <> {location.affymetrix_ab}"
)
raise CodingException("Not affymetrix format")
# change the allele coding
top_genotype = location.affy2top(genotype)
else:
raise NotImplementedError(f"Coding '{coding}' not supported")
return top_genotype
def _process_genotypes(self, line: list, coding: str):
new_line = line.copy()
# ok now is time to update genotypes
for i in range(len(self.mapdata)):
# replacing the i-th genotypes. Skip 6 columns
a1 = new_line[6+i*2]
a2 = new_line[6+i*2+1]
genotype = [a1, a2]
# xor condition: https://stackoverflow.com/a/433161/4385116
if (a1 in ["0", "-"]) != (a2 in ["0", "-"]):
logger.warning(
f"Found half-missing SNP in {new_line[1]}: {i*2}: "
f"[{a1}/{a2}]. Forcing SNP to be MISSING")
new_line[6+i*2], new_line[6+i*2+1] = ["0", "0"]
continue
# is this snp filtered out
if i in self.filtered:
logger.debug(
f"Skipping {self.mapdata[i].name}:[{a1}/{a2}] "
"not in database!"
)
continue
# get the proper position
location = self.locations[i]
# check and return illumina top genotype
top_genotype = self._to_top(i, genotype, coding, location)
# replace alleles in ped lines only if necessary
new_line[6+i*2], new_line[6+i*2+1] = top_genotype
return new_line
def _check_file_sizes(self, line):
# check genotypes size 2*mapdata (diploidy) + 6 extra columns:
if len(line) != len(self.mapdata)*2 + 6:
logger.critical(
f"SNPs sizes don't match in '{self.mapfile}' "
"and '{self.pedfile}'")
logger.critical("Please check file contents")
raise PlinkIOException(".ped line size doens't match .map size")
def _process_relationship(self, line, sample):
# create a copy of the original object
new_line = line.copy()
# add father or mather to ped line (if I can)
if str(line[2]) != '0':
if sample.father_id:
new_line[2] = sample.father_id.smarter_id
else:
logger.warning(
f"Cannot resolve relationship for father {line[2]}")
new_line[2] = '0'
if str(line[3]) != '0':
if sample.mother_id:
new_line[3] = sample.mother_id.smarter_id
else:
logger.warning(
f"Cannot resolve relationship for mother {line[3]}")
new_line[3] = '0'
return new_line
def _process_pedline(
self,
line: list,
dataset: Dataset,
coding: str,
create_samples: bool = False,
sample_field: str = "original_id"):
self._check_file_sizes(line)
logger.debug(f"Processing {line[:10]+ ['...']}")
# check for breed in database reling on fid.
try:
breed = self.get_breed(fid=line[0], dataset=dataset)
except DoesNotExist as e:
logger.error(e)
raise SmarterDBException(
f"Couldn't find breed_code '{line[0]}': {line[:10]+ ['...']}"
)
# check for sample in database
if create_samples:
sample = self.get_or_create_sample(line, dataset, breed)
else:
sample = self.get_sample(line, dataset, sample_field)
# if I couldn't find a registered sample (in such case)
# i can skip such record
if not sample:
return None
# a new line obj
new_line = line.copy()
# updating ped line with smarter ids
new_line[0] = breed.code
new_line[1] = sample.smarter_id
# replace relationship if possible
new_line = self._process_relationship(new_line, sample)
# check and fix genotypes if necessary
new_line = self._process_genotypes(new_line, coding)
# need to remove filtered snps from ped line
for index in sorted(self.filtered, reverse=True):
# index is snp position. Need to delete two fields
del new_line[6+index*2+1]
del new_line[6+index*2]
return new_line
def update_pedfile(
self,
outputfile: str,
dataset: Dataset,
coding: str,
create_samples: bool = False,
sample_field: str = "original_id",
*args,
**kwargs):
"""
Write a new pedfile relying on illumina_top genotypes and coordinates
stored in smarter database
Args:
outputfile (str): write ped to this path (overwrite if exists)
dataset (Dataset): the dataset we are converting
coding (str): the source coding (could be 'top', 'ab', 'forward')
create_samples (bool): create samples if not exist (useful to
create samples directly from ped file)
sample_field (str): search samples using this attribute (def.
'original_id')
"""
with open(outputfile, "w") as target:
writer = csv.writer(
target, delimiter=' ', lineterminator="\n")
processed = 0
for line in self.read_genotype_method(
dataset=dataset, *args, **kwargs):
# covert the ped line with the desidered format
new_line = self._process_pedline(
line, dataset, coding, create_samples, sample_field)
if new_line:
# write updated line into updated ped file
logger.info(
f"Writing: {new_line[:10]+ ['...']} "
f"({int((len(new_line)-6)/2)} SNPs)")
writer.writerow(new_line)
processed += 1
else:
logger.warning(
f"Skipping: {line[:10]+ ['...']} "
f"({int((len(line)-6)/2)} SNPs)"
)
logger.info(f"Processed {processed} individuals")
# output file block
# input file block
class TextPlinkIO(SmarterMixin):
mapfile = None
pedfile = None
def __init__(
self,
prefix: str = None,
mapfile: str = None,
pedfile: str = None,
species: str = None,
chip_name: str = None):
# need to be set in order to write a genotype
self.read_genotype_method = self.read_pedfile
if prefix:
self.mapfile = prefix + ".map"
self.pedfile = prefix + ".ped"
elif mapfile or pedfile:
self.mapfile = mapfile
self.pedfile = pedfile
if species:
self.species = species
if chip_name:
self.chip_name = chip_name
def read_mapfile(self):
"""Read map data and track informations in memory. Useful to process
data files"""
with open(self.mapfile) as handle:
reader = get_reader(handle)
self.mapdata = [MapRecord(*record) for record in reader]
def read_pedfile(self, *args, **kwargs):
"""Open pedfile for reading return iterator"""
with open(self.pedfile) as handle:
reader = get_reader(handle)
for line in reader:
yield line
# a new class for affymetrix plink files, which are slightly different from
# plink text files
class AffyPlinkIO(TextPlinkIO):
def read_mapfile(self):
"""Read map data and track informations in memory. Useful to process
data files"""
self.mapdata = []
with open(self.mapfile) as handle:
# affy files has both " " and "\t" in their files
for line in handle:
record = re.split('[ \t]+', line.strip())
# affy data may have comments in files
if not record[0].startswith("#"):
self.mapdata.append(MapRecord(*record))
def get_breed(self, fid, *args, **kwargs):
"""Override the default get_breed method"""
breed = Breed.objects(code=fid, species=self.species).get()
logger.debug(f"Found breed {breed}")
return breed
def read_pedfile(self, fid: str, *args, **kwargs):
"""Open pedfile for reading return iterator"""
with open(self.pedfile) as handle:
# affy files has both " " and "\t" in their files
for record in handle:
# affy data may have comments in files
if record.startswith("#"):
logger.info(f"Skipping {record}")
continue
line = re.split('[ \t]+', record.strip())
# affy ped lacks of plink columns. add such value to line
line.insert(0, fid) # FID
line.insert(2, '0') # father
line.insert(3, '0') # mother
line.insert(4, '0') # SEX
line.insert(5, -9) # phenotype
yield line
class BinaryPlinkIO(SmarterMixin):
plink_file = None
_prefix = None
def __init__(
self,
prefix: str = None,
species: str = None,
chip_name: str = None):
# need to be set in order to write a genotype
self.read_genotype_method = self.read_pedfile
if prefix:
self.prefix = prefix
if species:
self.species = species
if chip_name:
self.chip_name = chip_name
@property
def prefix(self):
return self._prefix
@prefix.setter
def prefix(self, prefix: str):
self._prefix = prefix
self.plink_file = plinkfile.open(self._prefix)
def read_mapfile(self):
"""Read map data and track informations in memory. Useful to process
data files"""
self.mapdata = list()
for locus in self.plink_file.get_loci():
record = MapRecord(
chrom=locus.chromosome,
name=locus.name,
position=locus.bp_position,
cm=locus.position
)
self.mapdata.append(record)
def read_pedfile(self, *args, **kwargs):
"""Open pedfile for reading return iterator"""
sample_list = self.plink_file.get_samples()
locus_list = self.plink_file.get_loci()
snp_arrays = list(self.plink_file)
def format_sex(value):
if value in [1, 2]:
return str(value)
else:
return "0"
def convert(genotype, locus):
# in binary format, allele2 is REF allele1 ALT
if genotype == 0:
return locus.allele1, locus.allele1
elif genotype == 1:
return locus.allele2, locus.allele1
elif genotype == 2:
return locus.allele2, locus.allele2
elif genotype == 3:
return "0", "0"
else:
raise CodingException("Genotype %s Not supported" % genotype)
# determine genotype length
size = 6 + 2*len(self.mapdata)
for sample_idx, sample in enumerate(sample_list):
# this will be the returned row
line = ["0"] * size
# set values. I need to set a breed code in order to get a
# proper ped line
line[0:6] = [
sample.fid,
sample.iid,
sample.father_iid,
sample.mother_iid,
format_sex(sample.sex),
int(sample.phenotype)
]
for idx, locus in enumerate(locus_list):
genotype = snp_arrays[idx][sample_idx]
line[6+idx*2], line[6+idx*2+1] = convert(genotype, locus)
yield line
class IlluminaReportIO(SmarterMixin):
snpfile = None
report = None
def __init__(
self,
snpfile: str = None,
report: str = None,
species: str = None,
chip_name: str = None):
# need to be set in order to write a genotype
self.read_genotype_method = self.read_reportfile
if snpfile or report:
self.snpfile = snpfile
self.report = report
if species:
self.species = species
if chip_name:
self.chip_name = chip_name
def get_breed(self, fid, *args, **kwargs):
"""Override the default get_breed method"""
breed = Breed.objects(code=fid, species=self.species).get()
logger.debug(f"Found breed {breed}")
return breed
def read_snpfile(self):
"""Read snp data and track informations in memory. Useful to process
data files"""
self.mapdata = list(read_snpList(self.snpfile))
# this will be called when calling read_genotype_method()
def read_reportfile(
self, fid: str = None, dataset: Dataset = None, *args, **kwargs):
"""Open illumina report returns iterator"""
# determine genotype length
size = 6 + 2*len(self.mapdata)
# track sample
last_sample = None
# need to have snp indexes
indexes = [record.name for record in self.mapdata]
# this will be the returned row
line = list()
# this is the snp position index
idx = 0
# tray to returns something like a ped row
for row in read_illuminaRow(self.report):
if row.sample_id != last_sample:
logger.debug(f"Reading sample {row.sample_id}")
# this is not returned if I'm processing the first sample
if last_sample:
yield line
# initialize an empty array
line = ["0"] * size
logger.debug(f"Searching fid for sample '{row.sample_id}'")
# determine fid from sample, if not received as argument
if not fid:
sample = self.SampleSpecies.objects.get(
original_id=row.sample_id,
dataset=dataset
)
breed = sample.breed_code
logger.debug(f"Found breed {breed} from {row.sample_id}")
else:
breed = fid
# set values. I need to set a breed code in order to get a
# proper ped line
line[0], line[1], line[5] = breed, row.sample_id, -9
# track last sample
last_sample = row.sample_id
# reset index
idx = 0
# check snp name consistency
if indexes[idx] != row.snp_name:
raise IlluminaReportException(
f"snp positions doens't match "
f"{indexes[idx]}<>{row.snp_name}"
)
# update line relying on records
line[6+idx*2], line[6+idx*2+1] = row.allele1_ab, row.allele2_ab
# updating indexes
idx += 1
# after completing rows, I need to return last one
yield line
def plink_binary_exists(prefix: Path):
"Test if plink binary files exists"
for ext in [".bed", ".bim", ".fam"]:
test = prefix.with_suffix(ext)
if not test.exists():
return False
# if I arrive here, all plink binary output files exists
return True
| [
"logging.getLogger",
"csv.writer",
"plinkio.plinkfile.open",
"tqdm.tqdm",
"csv.Sniffer",
"csv.reader"
] | [((681, 708), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (698, 708), False, 'import logging\n'), ((1224, 1237), 'csv.Sniffer', 'csv.Sniffer', ([], {}), '()\n', (1235, 1237), False, 'import csv\n'), ((1315, 1350), 'csv.reader', 'csv.reader', (['handle'], {'dialect': 'dialect'}), '(handle, dialect=dialect)\n', (1325, 1350), False, 'import csv\n'), ((23160, 23188), 'plinkio.plinkfile.open', 'plinkfile.open', (['self._prefix'], {}), '(self._prefix)\n', (23174, 23188), False, 'from plinkio import plinkfile\n'), ((3296, 3350), 'csv.writer', 'csv.writer', (['handle'], {'delimiter': '""" """', 'lineterminator': '"""\n"""'}), "(handle, delimiter=' ', lineterminator='\\n')\n", (3306, 3350), False, 'import csv\n'), ((8771, 8819), 'tqdm.tqdm', 'tqdm', (['self.mapdata'], {'file': 'tqdm_out', 'mininterval': '(1)'}), '(self.mapdata, file=tqdm_out, mininterval=1)\n', (8775, 8819), False, 'from tqdm import tqdm\n'), ((18500, 18554), 'csv.writer', 'csv.writer', (['target'], {'delimiter': '""" """', 'lineterminator': '"""\n"""'}), "(target, delimiter=' ', lineterminator='\\n')\n", (18510, 18554), False, 'import csv\n')] |
import math
N = int(input())
for i in range(1, int(math.sqrt(N))+1)[::-1]:
if N%i == 0:
print(N//i+i-2)
break
| [
"math.sqrt"
] | [((52, 64), 'math.sqrt', 'math.sqrt', (['N'], {}), '(N)\n', (61, 64), False, 'import math\n')] |
import graphene
import dishes.schema
class Query(dishes.schema.Query, graphene.ObjectType):
pass
class Mutations(graphene.ObjectType):
create_category = dishes.schema.CreateCategory.Field()
edit_category = dishes.schema.EditCategory.Field()
delete_category = dishes.schema.DeleteCategory.Field()
create_dish = dishes.schema.CreateDish.Field()
edit_dish = dishes.schema.EditDish.Field()
delete_dish = dishes.schema.DeleteDish.Field()
schema = graphene.Schema(query=Query, mutation=Mutations)
| [
"graphene.Schema"
] | [((493, 541), 'graphene.Schema', 'graphene.Schema', ([], {'query': 'Query', 'mutation': 'Mutations'}), '(query=Query, mutation=Mutations)\n', (508, 541), False, 'import graphene\n')] |
# Generated by Django 2.0.9 on 2019-04-30 08:38
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Probe',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(help_text='Name of the probe.', max_length=128, unique=True)),
('version', models.CharField(help_text='Version of the probe.', max_length=28)),
('nameversion', models.CharField(help_text='Name, version tuple.', max_length=128)),
('description', models.CharField(max_length=1024)),
('comment', models.CharField(max_length=512)),
('repository', models.CharField(max_length=512)),
('docurl', models.CharField(max_length=512)),
('user', models.CharField(blank=True, max_length=32)),
('datetime', models.DateTimeField(blank=True, max_length=32, null=True)),
],
options={
'verbose_name': 'Probe',
},
),
]
| [
"django.db.models.DateTimeField",
"django.db.models.AutoField",
"django.db.models.CharField"
] | [((301, 394), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (317, 394), False, 'from django.db import migrations, models\n'), ((418, 495), 'django.db.models.CharField', 'models.CharField', ([], {'help_text': '"""Name of the probe."""', 'max_length': '(128)', 'unique': '(True)'}), "(help_text='Name of the probe.', max_length=128, unique=True)\n", (434, 495), False, 'from django.db import migrations, models\n'), ((526, 592), 'django.db.models.CharField', 'models.CharField', ([], {'help_text': '"""Version of the probe."""', 'max_length': '(28)'}), "(help_text='Version of the probe.', max_length=28)\n", (542, 592), False, 'from django.db import migrations, models\n'), ((627, 693), 'django.db.models.CharField', 'models.CharField', ([], {'help_text': '"""Name, version tuple."""', 'max_length': '(128)'}), "(help_text='Name, version tuple.', max_length=128)\n", (643, 693), False, 'from django.db import migrations, models\n'), ((728, 761), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(1024)'}), '(max_length=1024)\n', (744, 761), False, 'from django.db import migrations, models\n'), ((792, 824), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(512)'}), '(max_length=512)\n', (808, 824), False, 'from django.db import migrations, models\n'), ((858, 890), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(512)'}), '(max_length=512)\n', (874, 890), False, 'from django.db import migrations, models\n'), ((920, 952), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(512)'}), '(max_length=512)\n', (936, 952), False, 'from django.db import migrations, models\n'), ((980, 1023), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(32)'}), '(blank=True, max_length=32)\n', (996, 1023), False, 'from django.db import migrations, models\n'), ((1055, 1113), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'blank': '(True)', 'max_length': '(32)', 'null': '(True)'}), '(blank=True, max_length=32, null=True)\n', (1075, 1113), False, 'from django.db import migrations, models\n')] |
import pytest
from django.contrib.auth import get_user_model
from django.template import Context, Template
from proposals.models import AdditionalSpeaker, TalkProposal
def render_template(template_string, context_data=None):
context_data = context_data or {}
return Template(template_string).render(Context(context_data))
@pytest.fixture
def talk_proposals(talk_proposal, user, another_user):
proposal_0 = TalkProposal.objects.create(
submitter=another_user, title='Concrete construct saturation',
)
proposal_1 = talk_proposal
AdditionalSpeaker.objects.create(user=another_user, proposal=proposal_1)
user_3 = get_user_model().objects.create_user(
email='<EMAIL>', password='19',
speaker_name='<NAME>',
)
proposal_2 = TalkProposal.objects.create(
submitter=user_3, title='Render-farm smart-meta-rain-ware',
)
AdditionalSpeaker.objects.create(user=another_user, proposal=proposal_2)
AdditionalSpeaker.objects.create(user=user, proposal=proposal_2)
return [
proposal_0, # Proposal without additional speakers.
proposal_1, # Proposal with one additional speaker.
proposal_2, # Proposal with two additional speakers.
]
def test_speaker_names_display(talk_proposals, parser):
result = render_template(
'{% load proposals %}'
'<ul>'
'{% for proposal in proposals %}'
'<li>{{ proposal|speaker_names_display }}</li>'
'{% endfor %}'
'</ul>', {'proposals': talk_proposals},
)
actual = parser.arrange(parser.parse(text=result, create_parent=False))
expected = parser.arrange("""
<ul>
<li>Misaki Mei</li>
<li>User and Misaki Mei</li>
<li>Somebody Somewhere, Misaki Mei and User</li>
</ul>
""")
assert actual == expected
| [
"django.contrib.auth.get_user_model",
"django.template.Template",
"proposals.models.AdditionalSpeaker.objects.create",
"proposals.models.TalkProposal.objects.create",
"django.template.Context"
] | [((424, 519), 'proposals.models.TalkProposal.objects.create', 'TalkProposal.objects.create', ([], {'submitter': 'another_user', 'title': '"""Concrete construct saturation"""'}), "(submitter=another_user, title=\n 'Concrete construct saturation')\n", (451, 519), False, 'from proposals.models import AdditionalSpeaker, TalkProposal\n'), ((566, 638), 'proposals.models.AdditionalSpeaker.objects.create', 'AdditionalSpeaker.objects.create', ([], {'user': 'another_user', 'proposal': 'proposal_1'}), '(user=another_user, proposal=proposal_1)\n', (598, 638), False, 'from proposals.models import AdditionalSpeaker, TalkProposal\n'), ((785, 877), 'proposals.models.TalkProposal.objects.create', 'TalkProposal.objects.create', ([], {'submitter': 'user_3', 'title': '"""Render-farm smart-meta-rain-ware"""'}), "(submitter=user_3, title=\n 'Render-farm smart-meta-rain-ware')\n", (812, 877), False, 'from proposals.models import AdditionalSpeaker, TalkProposal\n'), ((892, 964), 'proposals.models.AdditionalSpeaker.objects.create', 'AdditionalSpeaker.objects.create', ([], {'user': 'another_user', 'proposal': 'proposal_2'}), '(user=another_user, proposal=proposal_2)\n', (924, 964), False, 'from proposals.models import AdditionalSpeaker, TalkProposal\n'), ((969, 1033), 'proposals.models.AdditionalSpeaker.objects.create', 'AdditionalSpeaker.objects.create', ([], {'user': 'user', 'proposal': 'proposal_2'}), '(user=user, proposal=proposal_2)\n', (1001, 1033), False, 'from proposals.models import AdditionalSpeaker, TalkProposal\n'), ((311, 332), 'django.template.Context', 'Context', (['context_data'], {}), '(context_data)\n', (318, 332), False, 'from django.template import Context, Template\n'), ((278, 303), 'django.template.Template', 'Template', (['template_string'], {}), '(template_string)\n', (286, 303), False, 'from django.template import Context, Template\n'), ((653, 669), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (667, 669), False, 'from django.contrib.auth import get_user_model\n')] |
from django.db import models
from django.core.urlresolvers import reverse
from vocabs.models import SkosConcept
from places.models import Place
from bib.models import Book
class Institution(models.Model):
name = models.CharField(max_length=300, blank=True)
abbreviation = models.CharField(max_length=300, blank=True)
identifier = models.CharField(max_length=300, blank=True)
parent_institution = models.ForeignKey('Institution', blank=True, null=True)
def __str__(self):
return "{}".format(self.name)
class Person(models.Model):
forename = models.CharField(max_length=300, blank=True)
name = models.CharField(max_length=300, blank=True)
institution = models.ForeignKey(Institution, blank=True, null=True)
identifier = models.CharField(max_length=300, blank=True)
def __str__(self):
return "{}".format(self.name)
class Document(models.Model):
legacy_id = models.CharField(max_length=300, blank=True, verbose_name='ID')
filename = models.CharField(max_length=300, blank=True, verbose_name="Dateiname")
entry_order = models.CharField(
max_length=300, blank=True, verbose_name="Ordnungskriterium/Eingabe"
)
medium = models.ForeignKey(
SkosConcept, blank=True, null=True, related_name='medium', verbose_name="Medium"
)
analogue_format = models.ForeignKey(
SkosConcept, blank=True, null=True, related_name="analogue_format",
verbose_name="Analoges Format"
)
author = models.ManyToManyField(
Person, blank=True, related_name="author", verbose_name="Autor"
)
institution = models.ManyToManyField(
Institution, blank=True, verbose_name="Institution", related_name="institution_document"
)
date_analogue = models.CharField(max_length=300, blank=True, verbose_name="Analoges Datum")
date_digitization = models.DateField(
auto_now=False, blank=True, null=True, verbose_name="Datum der Digitalisierung"
)
digital_format = models.ForeignKey(
SkosConcept, blank=True, null=True, related_name="digital_format",
verbose_name="Speicherformat"
)
note = models.TextField(blank=True, verbose_name="Anmerkung")
content = models.TextField(blank=True, verbose_name="Inhalt")
topic_group = models.ForeignKey(
SkosConcept, blank=True, null=True, related_name="topic_group",
verbose_name="Gruppe"
)
combination = models.CharField(max_length=300, blank=True, verbose_name="Kombination")
location_id = models.CharField(max_length=300, blank=True, verbose_name="Fundnummer in FDB")
place = models.ForeignKey(Place, blank=True, null=True, verbose_name="KG/Areal")
location_digitized_object = models.CharField(
max_length=300, blank=True, verbose_name="Aufbewahrung Datei"
)
location_analogue = models.CharField(max_length=300, blank=True, verbose_name="Standort analog")
curator = models.ForeignKey(
Person, blank=True, null=True, verbose_name="Bearbeiter Digitalisierung"
)
filesize = models.FloatField(blank=True, null=True, verbose_name="Dateigröße KB")
place_digizization = models.ForeignKey(
Institution, blank=True, null=True, related_name="place_digizization",
verbose_name="Ort der Digitalisierung"
)
reference = models.ManyToManyField(Book, blank=True, verbose_name="Literaturzitate")
path = models.CharField(max_length=300, blank=True, verbose_name="Dateipfad")
amendments = models.TextField(blank=True, verbose_name="Ergänzungen")
def __str__(self):
return "{}".format(self.filename)
def get_absolute_url(self):
return reverse('documents:document_detail', kwargs={'pk': self.id})
| [
"django.db.models.DateField",
"django.db.models.FloatField",
"django.db.models.TextField",
"django.db.models.ForeignKey",
"django.db.models.ManyToManyField",
"django.core.urlresolvers.reverse",
"django.db.models.CharField"
] | [((218, 262), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)', 'blank': '(True)'}), '(max_length=300, blank=True)\n', (234, 262), False, 'from django.db import models\n'), ((282, 326), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)', 'blank': '(True)'}), '(max_length=300, blank=True)\n', (298, 326), False, 'from django.db import models\n'), ((344, 388), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)', 'blank': '(True)'}), '(max_length=300, blank=True)\n', (360, 388), False, 'from django.db import models\n'), ((414, 469), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""Institution"""'], {'blank': '(True)', 'null': '(True)'}), "('Institution', blank=True, null=True)\n", (431, 469), False, 'from django.db import models\n'), ((577, 621), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)', 'blank': '(True)'}), '(max_length=300, blank=True)\n', (593, 621), False, 'from django.db import models\n'), ((633, 677), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)', 'blank': '(True)'}), '(max_length=300, blank=True)\n', (649, 677), False, 'from django.db import models\n'), ((696, 749), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Institution'], {'blank': '(True)', 'null': '(True)'}), '(Institution, blank=True, null=True)\n', (713, 749), False, 'from django.db import models\n'), ((767, 811), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)', 'blank': '(True)'}), '(max_length=300, blank=True)\n', (783, 811), False, 'from django.db import models\n'), ((922, 985), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)', 'blank': '(True)', 'verbose_name': '"""ID"""'}), "(max_length=300, blank=True, verbose_name='ID')\n", (938, 985), False, 'from django.db import models\n'), ((1001, 1071), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)', 'blank': '(True)', 'verbose_name': '"""Dateiname"""'}), "(max_length=300, blank=True, verbose_name='Dateiname')\n", (1017, 1071), False, 'from django.db import models\n'), ((1090, 1181), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)', 'blank': '(True)', 'verbose_name': '"""Ordnungskriterium/Eingabe"""'}), "(max_length=300, blank=True, verbose_name=\n 'Ordnungskriterium/Eingabe')\n", (1106, 1181), False, 'from django.db import models\n'), ((1204, 1307), 'django.db.models.ForeignKey', 'models.ForeignKey', (['SkosConcept'], {'blank': '(True)', 'null': '(True)', 'related_name': '"""medium"""', 'verbose_name': '"""Medium"""'}), "(SkosConcept, blank=True, null=True, related_name='medium',\n verbose_name='Medium')\n", (1221, 1307), False, 'from django.db import models\n'), ((1340, 1462), 'django.db.models.ForeignKey', 'models.ForeignKey', (['SkosConcept'], {'blank': '(True)', 'null': '(True)', 'related_name': '"""analogue_format"""', 'verbose_name': '"""Analoges Format"""'}), "(SkosConcept, blank=True, null=True, related_name=\n 'analogue_format', verbose_name='Analoges Format')\n", (1357, 1462), False, 'from django.db import models\n'), ((1493, 1584), 'django.db.models.ManyToManyField', 'models.ManyToManyField', (['Person'], {'blank': '(True)', 'related_name': '"""author"""', 'verbose_name': '"""Autor"""'}), "(Person, blank=True, related_name='author',\n verbose_name='Autor')\n", (1515, 1584), False, 'from django.db import models\n'), ((1613, 1729), 'django.db.models.ManyToManyField', 'models.ManyToManyField', (['Institution'], {'blank': '(True)', 'verbose_name': '"""Institution"""', 'related_name': '"""institution_document"""'}), "(Institution, blank=True, verbose_name='Institution',\n related_name='institution_document')\n", (1635, 1729), False, 'from django.db import models\n'), ((1760, 1835), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)', 'blank': '(True)', 'verbose_name': '"""Analoges Datum"""'}), "(max_length=300, blank=True, verbose_name='Analoges Datum')\n", (1776, 1835), False, 'from django.db import models\n'), ((1860, 1962), 'django.db.models.DateField', 'models.DateField', ([], {'auto_now': '(False)', 'blank': '(True)', 'null': '(True)', 'verbose_name': '"""Datum der Digitalisierung"""'}), "(auto_now=False, blank=True, null=True, verbose_name=\n 'Datum der Digitalisierung')\n", (1876, 1962), False, 'from django.db import models\n'), ((1993, 2113), 'django.db.models.ForeignKey', 'models.ForeignKey', (['SkosConcept'], {'blank': '(True)', 'null': '(True)', 'related_name': '"""digital_format"""', 'verbose_name': '"""Speicherformat"""'}), "(SkosConcept, blank=True, null=True, related_name=\n 'digital_format', verbose_name='Speicherformat')\n", (2010, 2113), False, 'from django.db import models\n'), ((2142, 2196), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)', 'verbose_name': '"""Anmerkung"""'}), "(blank=True, verbose_name='Anmerkung')\n", (2158, 2196), False, 'from django.db import models\n'), ((2211, 2262), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)', 'verbose_name': '"""Inhalt"""'}), "(blank=True, verbose_name='Inhalt')\n", (2227, 2262), False, 'from django.db import models\n'), ((2281, 2390), 'django.db.models.ForeignKey', 'models.ForeignKey', (['SkosConcept'], {'blank': '(True)', 'null': '(True)', 'related_name': '"""topic_group"""', 'verbose_name': '"""Gruppe"""'}), "(SkosConcept, blank=True, null=True, related_name=\n 'topic_group', verbose_name='Gruppe')\n", (2298, 2390), False, 'from django.db import models\n'), ((2426, 2498), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)', 'blank': '(True)', 'verbose_name': '"""Kombination"""'}), "(max_length=300, blank=True, verbose_name='Kombination')\n", (2442, 2498), False, 'from django.db import models\n'), ((2517, 2595), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)', 'blank': '(True)', 'verbose_name': '"""Fundnummer in FDB"""'}), "(max_length=300, blank=True, verbose_name='Fundnummer in FDB')\n", (2533, 2595), False, 'from django.db import models\n'), ((2608, 2680), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Place'], {'blank': '(True)', 'null': '(True)', 'verbose_name': '"""KG/Areal"""'}), "(Place, blank=True, null=True, verbose_name='KG/Areal')\n", (2625, 2680), False, 'from django.db import models\n'), ((2713, 2792), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)', 'blank': '(True)', 'verbose_name': '"""Aufbewahrung Datei"""'}), "(max_length=300, blank=True, verbose_name='Aufbewahrung Datei')\n", (2729, 2792), False, 'from django.db import models\n'), ((2831, 2907), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)', 'blank': '(True)', 'verbose_name': '"""Standort analog"""'}), "(max_length=300, blank=True, verbose_name='Standort analog')\n", (2847, 2907), False, 'from django.db import models\n'), ((2922, 3018), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Person'], {'blank': '(True)', 'null': '(True)', 'verbose_name': '"""Bearbeiter Digitalisierung"""'}), "(Person, blank=True, null=True, verbose_name=\n 'Bearbeiter Digitalisierung')\n", (2939, 3018), False, 'from django.db import models\n'), ((3043, 3113), 'django.db.models.FloatField', 'models.FloatField', ([], {'blank': '(True)', 'null': '(True)', 'verbose_name': '"""Dateigröße KB"""'}), "(blank=True, null=True, verbose_name='Dateigröße KB')\n", (3060, 3113), False, 'from django.db import models\n'), ((3139, 3272), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Institution'], {'blank': '(True)', 'null': '(True)', 'related_name': '"""place_digizization"""', 'verbose_name': '"""Ort der Digitalisierung"""'}), "(Institution, blank=True, null=True, related_name=\n 'place_digizization', verbose_name='Ort der Digitalisierung')\n", (3156, 3272), False, 'from django.db import models\n'), ((3306, 3378), 'django.db.models.ManyToManyField', 'models.ManyToManyField', (['Book'], {'blank': '(True)', 'verbose_name': '"""Literaturzitate"""'}), "(Book, blank=True, verbose_name='Literaturzitate')\n", (3328, 3378), False, 'from django.db import models\n'), ((3390, 3460), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)', 'blank': '(True)', 'verbose_name': '"""Dateipfad"""'}), "(max_length=300, blank=True, verbose_name='Dateipfad')\n", (3406, 3460), False, 'from django.db import models\n'), ((3478, 3534), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)', 'verbose_name': '"""Ergänzungen"""'}), "(blank=True, verbose_name='Ergänzungen')\n", (3494, 3534), False, 'from django.db import models\n'), ((3649, 3709), 'django.core.urlresolvers.reverse', 'reverse', (['"""documents:document_detail"""'], {'kwargs': "{'pk': self.id}"}), "('documents:document_detail', kwargs={'pk': self.id})\n", (3656, 3709), False, 'from django.core.urlresolvers import reverse\n')] |
import threading
import app.models
import app.view
import calendar
from datetime import date, datetime
from app.models import session
from app.models.booking import Booking
from app.models.returns import Returns
def init_database():
app.models.__init__
def init_gui():
app.view.__init__
def generate_invoices():
# today = date.today()
# this_month = today.strftime("%m")
# if int(today.strftime("%d")) is int(get_last_day_of_month(int(this_month))):
# _returns = session.query(Returns).filter(Returns.date < get_last_day_of_month(int(this_month))).all()
# _bookings = []
# for returns in _returns:
# amount = 0
# booking = session.query(Booking).filter(Booking.id == returns.booking_id)
# # returns.booking_id
# if returns.date > booking.booked_date + datetime.timedelta(days=float(booking.duration_of_booking)):
# amount += booking.daily_price
pass
def get_last_day_of_month(month):
return calendar.monthrange(2020, month)[1]
if __name__ == '__main__':
# using separate threads for each package to improve the performance
t = threading.Thread(target=init_database, args=())
t.daemon = True
t.start()
t = threading.Thread(target=init_gui, args=())
t.daemon = True
t.start()
t = threading.Thread(target=generate_invoices, args=())
t.daemon = True
t.start()
| [
"threading.Thread",
"calendar.monthrange"
] | [((1163, 1210), 'threading.Thread', 'threading.Thread', ([], {'target': 'init_database', 'args': '()'}), '(target=init_database, args=())\n', (1179, 1210), False, 'import threading\n'), ((1254, 1296), 'threading.Thread', 'threading.Thread', ([], {'target': 'init_gui', 'args': '()'}), '(target=init_gui, args=())\n', (1270, 1296), False, 'import threading\n'), ((1340, 1391), 'threading.Thread', 'threading.Thread', ([], {'target': 'generate_invoices', 'args': '()'}), '(target=generate_invoices, args=())\n', (1356, 1391), False, 'import threading\n'), ((1017, 1049), 'calendar.monthrange', 'calendar.monthrange', (['(2020)', 'month'], {}), '(2020, month)\n', (1036, 1049), False, 'import calendar\n')] |
from resotolib.config import Config
from resoto_plugin_example_collector import ExampleCollectorPlugin
def test_config():
config = Config("dummy", "dummy")
ExampleCollectorPlugin.add_config(config)
Config.init_default_config()
# assert Config.example.region is None
| [
"resotolib.config.Config",
"resotolib.config.Config.init_default_config",
"resoto_plugin_example_collector.ExampleCollectorPlugin.add_config"
] | [((137, 161), 'resotolib.config.Config', 'Config', (['"""dummy"""', '"""dummy"""'], {}), "('dummy', 'dummy')\n", (143, 161), False, 'from resotolib.config import Config\n'), ((166, 207), 'resoto_plugin_example_collector.ExampleCollectorPlugin.add_config', 'ExampleCollectorPlugin.add_config', (['config'], {}), '(config)\n', (199, 207), False, 'from resoto_plugin_example_collector import ExampleCollectorPlugin\n'), ((212, 240), 'resotolib.config.Config.init_default_config', 'Config.init_default_config', ([], {}), '()\n', (238, 240), False, 'from resotolib.config import Config\n')] |
from unittest import TestCase, main
from urllib.parse import parse_qs, quote_plus, urlencode, urljoin
from expects import expect, be_empty, contain, equal
from twin_sister.expects_matchers import raise_ex
from questions_three.html_form import HtmlForm
import questions_three.html_form.exceptions as exceptions
from twin_sister.expects_matchers import contain_key_with_value
from twin_sister.fakes import EmptyFake
class FakeResponse(EmptyFake):
def __init__(self, content, status_code=200):
self.content = content
self.text = content
self.status_code = status_code
def __bool__(self):
return True
class FakeHttpClient:
def __init__(self):
self.get_requests = []
self.post_requests = []
self.put_requests = []
self.get_responses = {}
self.post_response = None
def get(self, url, *args, **kwargs):
self.get_requests.append((url, args, kwargs))
if url in self.get_responses.keys():
return self.get_responses[url]
return FakeResponse("not found", status_code=404)
def post(self, url, *args, **kwargs):
self.post_requests.append((url, args, kwargs))
return self.post_response or FakeResponse("nothing to see here")
def put(self, url, *args, **kwargs):
self.put_requests.append((url, args, kwargs))
return FakeResponse("nothing to see here")
def extract_query(request):
url, args, kwargs = request
expect(kwargs.keys()).to(contain("data"))
return parse_qs(kwargs["data"])
class TestForm(TestCase):
def test_complains_if_form_not_found(self):
fake = FakeHttpClient()
url = "http://something"
fake.get_responses[url] = FakeResponse(
"""
<html><body><form id="wrong" action="spam" method="POST"/>
</body></html>
"""
)
def attempt():
HtmlForm(http_client=fake, url=url, xpath="//form[@name='right']")
expect(attempt).to(raise_ex(exceptions.FormNotFound))
def test_complains_if_method_not_specified(self):
fake = FakeHttpClient()
url = "http://something"
fake.get_responses[url] = FakeResponse(
"""
<html><body><form action="spam"/></body></html>
"""
)
def attempt():
HtmlForm(http_client=fake, url=url, xpath="//form")
expect(attempt).to(raise_ex(exceptions.FormElementLacksMethod))
def test_uses_specified_http_method_ignoring_case(self):
fake = FakeHttpClient()
url = "http://something"
fake.get_responses[url] = FakeResponse(
"""
<html><body><form action="spam" method="pUt"/></body></html>
"""
)
form = HtmlForm(http_client=fake, url=url, xpath="//form")
form.submit()
expect(fake.put_requests).not_to(be_empty)
def test_complains_if_action_not_specified(self):
fake = FakeHttpClient()
url = "http://something"
fake.get_responses[url] = FakeResponse(
"""
<html><body><form method="pUt"/></body></html>
"""
)
def attempt():
HtmlForm(http_client=fake, url=url, xpath="//form")
expect(attempt).to(raise_ex(exceptions.FormElementLacksAction))
def test_handles_absolute_action_url(self):
action = "http://go.net/somewhere/warm"
fake = FakeHttpClient()
url = "http://something"
fake.get_responses[url] = FakeResponse(
"""
<html><body><form action="%s" method="POST"/></body></html>
"""
% action
)
form = HtmlForm(http_client=fake, url=url, xpath="//form")
form.submit()
actual, args, kwargs = fake.post_requests[-1]
expect(actual).to(equal(action))
def test_handles_action_path_with_leading_slash(self):
action = "/the/primrose/path"
fake = FakeHttpClient()
scheme_and_host = "https://std.io"
url = "%s/yogurt?things=stuff" % scheme_and_host
fake.get_responses[url] = FakeResponse(
"""
<html><body><form action="%s" method="POST"/></body></html>
"""
% action
)
form = HtmlForm(http_client=fake, url=url, xpath="//form")
form.submit()
actual, args, kwargs = fake.post_requests[-1]
expect(actual).to(equal(urljoin(scheme_and_host, action)))
def test_handles_action_path_without_leading_slash(self):
action = "form-processor/lives.here"
fake = FakeHttpClient()
scheme_and_host = "https://std.io"
path = "parent-directory/generator"
url = "%s/%s?things=stuff" % (scheme_and_host, path)
fake.get_responses[url] = FakeResponse(
"""
<html><body><form action="%s" method="POST"/></body></html>
"""
% action
)
form = HtmlForm(http_client=fake, url=url, xpath="//form")
form.submit()
actual, args, kwargs = fake.post_requests[-1]
expect(actual).to(equal(urljoin(urljoin(scheme_and_host, path), action)))
def test_sends_data_set_for_existing_field(self):
fake = FakeHttpClient()
url = "http://yadda.dada"
field_name = "dinks"
value = "aargamuffin"
fake.get_responses[url] = FakeResponse(
"""
<html><body>
<form action="spam" method="POST">
<input type="text" name="%s"/>
</form>
</body></html>
"""
% field_name
)
form = HtmlForm(http_client=fake, url=url, xpath="//form")
form.fields[field_name] = value
form.submit()
fields = extract_query(fake.post_requests[-1])
expect(fields).to(contain_key_with_value(field_name, [value]))
def test_sends_data_for_non_existing_field(self):
fake = FakeHttpClient()
url = "http://yadda.dada"
field_name = "dinks"
value = "aargamuffin"
fake.get_responses[url] = FakeResponse(
"""
<html><body>
<form action="spam" method="POST">
<input type="text" name="something-else"/>
</form>
</body></html>
"""
)
form = HtmlForm(http_client=fake, url=url, xpath="//form")
form.fields[field_name] = value
form.submit()
fields = extract_query(fake.post_requests[-1])
expect(fields).to(contain_key_with_value(field_name, [value]))
def test_sends_data_for_hidden_field(self):
fake = FakeHttpClient()
url = "http://yadda.dada"
field_name = "taunt"
value = "Your mother was a hamster"
fake.get_responses[url] = FakeResponse(
"""
<html><body>
<form action="spam" method="POST">
<div><input type="hidden" name="%s" value="%s"/></div>
</form>
</body></html>
"""
% (field_name, value)
)
form = HtmlForm(http_client=fake, url=url, xpath="//form")
form.submit()
fields = extract_query(fake.post_requests[-1])
expect(fields).to(contain_key_with_value(field_name, [value]))
def test_sends_data_for_other_populated_input_field(self):
fake = FakeHttpClient()
url = "http://yadda.dada"
field_name = "taunt"
value = "Your father smelled of elderberries"
fake.get_responses[url] = FakeResponse(
"""
<html><body>
<form action="spam" method="POST">
<div><input type="spam" name="%s" value="%s"/></div>
</form>
</body></html>
"""
% (field_name, value)
)
form = HtmlForm(http_client=fake, url=url, xpath="//form")
form.submit()
fields = extract_query(fake.post_requests[-1])
expect(fields).to(contain_key_with_value(field_name, [value]))
def test_ignores_elements_outside_specified_form(self):
fake = FakeHttpClient()
url = "http://yadda.dada"
field_name = "taunt"
fake.get_responses[url] = FakeResponse(
"""
<html><body>
<form action="spam" method="POST"/>
<input type="hidden" name="%s" value="oops"/>
</body></html>
"""
% field_name
)
form = HtmlForm(http_client=fake, url=url, xpath="//form")
form.submit()
fields = extract_query(fake.post_requests[-1])
expect(fields.keys()).not_to(contain(field_name))
def test_sends_content_type(self):
fake = FakeHttpClient()
url = "http://yadda.dada"
fake.get_responses[url] = FakeResponse(
"""
<html><body><form action="spam" method="POST"/></body></html>
"""
)
form = HtmlForm(http_client=fake, url=url, xpath="//form")
form.submit()
_, args, kwargs = fake.post_requests[-1]
expect(kwargs.keys()).to(contain("headers"))
expect(kwargs["headers"]).to(contain_key_with_value("Content-type", "application/x-www-form-urlencoded"))
def test_urlencodes_payload(self):
fake = FakeHttpClient()
url = "http://yadda.dada"
fake.get_responses[url] = FakeResponse(
"""
<html><body>
<form action="spam" method="POST"/>
</body></html>
"""
)
form = HtmlForm(http_client=fake, url=url, xpath="//form")
field_name = "bad-things"
value = "Things that don't belong in a URL: \\/!@#$%^&*()<>,}{\"?"
form.fields[field_name] = value
form.submit()
_, args, kwargs = fake.post_requests[-1]
expect(kwargs.keys()).to(contain("data"))
expect(kwargs["data"]).to(equal(urlencode({field_name: value}, quote_via=quote_plus)))
def test_returns_response(self):
fake = FakeHttpClient()
url = "http://yadda.dada"
fake.get_responses[url] = FakeResponse(
"""
<html><body>
<form action="spam" method="POST"/>
</body></html>
"""
)
form = HtmlForm(http_client=fake, url=url, xpath="//form")
response = FakeResponse("winner!")
fake.post_response = response
expect(form.submit()).to(equal(response))
if "__main__" == __name__:
main()
| [
"questions_three.html_form.HtmlForm",
"urllib.parse.urlencode",
"twin_sister.expects_matchers.contain_key_with_value",
"urllib.parse.parse_qs",
"urllib.parse.urljoin",
"expects.expect",
"unittest.main",
"expects.contain",
"expects.equal",
"twin_sister.expects_matchers.raise_ex"
] | [((1529, 1553), 'urllib.parse.parse_qs', 'parse_qs', (["kwargs['data']"], {}), "(kwargs['data'])\n", (1537, 1553), False, 'from urllib.parse import parse_qs, quote_plus, urlencode, urljoin\n'), ((10587, 10593), 'unittest.main', 'main', ([], {}), '()\n', (10591, 10593), False, 'from unittest import TestCase, main\n'), ((1501, 1516), 'expects.contain', 'contain', (['"""data"""'], {}), "('data')\n", (1508, 1516), False, 'from expects import expect, be_empty, contain, equal\n'), ((2785, 2836), 'questions_three.html_form.HtmlForm', 'HtmlForm', ([], {'http_client': 'fake', 'url': 'url', 'xpath': '"""//form"""'}), "(http_client=fake, url=url, xpath='//form')\n", (2793, 2836), False, 'from questions_three.html_form import HtmlForm\n'), ((3700, 3751), 'questions_three.html_form.HtmlForm', 'HtmlForm', ([], {'http_client': 'fake', 'url': 'url', 'xpath': '"""//form"""'}), "(http_client=fake, url=url, xpath='//form')\n", (3708, 3751), False, 'from questions_three.html_form import HtmlForm\n'), ((4297, 4348), 'questions_three.html_form.HtmlForm', 'HtmlForm', ([], {'http_client': 'fake', 'url': 'url', 'xpath': '"""//form"""'}), "(http_client=fake, url=url, xpath='//form')\n", (4305, 4348), False, 'from questions_three.html_form import HtmlForm\n'), ((4978, 5029), 'questions_three.html_form.HtmlForm', 'HtmlForm', ([], {'http_client': 'fake', 'url': 'url', 'xpath': '"""//form"""'}), "(http_client=fake, url=url, xpath='//form')\n", (4986, 5029), False, 'from questions_three.html_form import HtmlForm\n'), ((5676, 5727), 'questions_three.html_form.HtmlForm', 'HtmlForm', ([], {'http_client': 'fake', 'url': 'url', 'xpath': '"""//form"""'}), "(http_client=fake, url=url, xpath='//form')\n", (5684, 5727), False, 'from questions_three.html_form import HtmlForm\n'), ((6391, 6442), 'questions_three.html_form.HtmlForm', 'HtmlForm', ([], {'http_client': 'fake', 'url': 'url', 'xpath': '"""//form"""'}), "(http_client=fake, url=url, xpath='//form')\n", (6399, 6442), False, 'from questions_three.html_form import HtmlForm\n'), ((7160, 7211), 'questions_three.html_form.HtmlForm', 'HtmlForm', ([], {'http_client': 'fake', 'url': 'url', 'xpath': '"""//form"""'}), "(http_client=fake, url=url, xpath='//form')\n", (7168, 7211), False, 'from questions_three.html_form import HtmlForm\n'), ((7912, 7963), 'questions_three.html_form.HtmlForm', 'HtmlForm', ([], {'http_client': 'fake', 'url': 'url', 'xpath': '"""//form"""'}), "(http_client=fake, url=url, xpath='//form')\n", (7920, 7963), False, 'from questions_three.html_form import HtmlForm\n'), ((8564, 8615), 'questions_three.html_form.HtmlForm', 'HtmlForm', ([], {'http_client': 'fake', 'url': 'url', 'xpath': '"""//form"""'}), "(http_client=fake, url=url, xpath='//form')\n", (8572, 8615), False, 'from questions_three.html_form import HtmlForm\n'), ((9036, 9087), 'questions_three.html_form.HtmlForm', 'HtmlForm', ([], {'http_client': 'fake', 'url': 'url', 'xpath': '"""//form"""'}), "(http_client=fake, url=url, xpath='//form')\n", (9044, 9087), False, 'from questions_three.html_form import HtmlForm\n'), ((9641, 9692), 'questions_three.html_form.HtmlForm', 'HtmlForm', ([], {'http_client': 'fake', 'url': 'url', 'xpath': '"""//form"""'}), "(http_client=fake, url=url, xpath='//form')\n", (9649, 9692), False, 'from questions_three.html_form import HtmlForm\n'), ((10371, 10422), 'questions_three.html_form.HtmlForm', 'HtmlForm', ([], {'http_client': 'fake', 'url': 'url', 'xpath': '"""//form"""'}), "(http_client=fake, url=url, xpath='//form')\n", (10379, 10422), False, 'from questions_three.html_form import HtmlForm\n'), ((1919, 1985), 'questions_three.html_form.HtmlForm', 'HtmlForm', ([], {'http_client': 'fake', 'url': 'url', 'xpath': '"""//form[@name=\'right\']"""'}), '(http_client=fake, url=url, xpath="//form[@name=\'right\']")\n', (1927, 1985), False, 'from questions_three.html_form import HtmlForm\n'), ((2014, 2047), 'twin_sister.expects_matchers.raise_ex', 'raise_ex', (['exceptions.FormNotFound'], {}), '(exceptions.FormNotFound)\n', (2022, 2047), False, 'from twin_sister.expects_matchers import raise_ex\n'), ((2355, 2406), 'questions_three.html_form.HtmlForm', 'HtmlForm', ([], {'http_client': 'fake', 'url': 'url', 'xpath': '"""//form"""'}), "(http_client=fake, url=url, xpath='//form')\n", (2363, 2406), False, 'from questions_three.html_form import HtmlForm\n'), ((2435, 2478), 'twin_sister.expects_matchers.raise_ex', 'raise_ex', (['exceptions.FormElementLacksMethod'], {}), '(exceptions.FormElementLacksMethod)\n', (2443, 2478), False, 'from twin_sister.expects_matchers import raise_ex\n'), ((3215, 3266), 'questions_three.html_form.HtmlForm', 'HtmlForm', ([], {'http_client': 'fake', 'url': 'url', 'xpath': '"""//form"""'}), "(http_client=fake, url=url, xpath='//form')\n", (3223, 3266), False, 'from questions_three.html_form import HtmlForm\n'), ((3295, 3338), 'twin_sister.expects_matchers.raise_ex', 'raise_ex', (['exceptions.FormElementLacksAction'], {}), '(exceptions.FormElementLacksAction)\n', (3303, 3338), False, 'from twin_sister.expects_matchers import raise_ex\n'), ((3854, 3867), 'expects.equal', 'equal', (['action'], {}), '(action)\n', (3859, 3867), False, 'from expects import expect, be_empty, contain, equal\n'), ((5871, 5914), 'twin_sister.expects_matchers.contain_key_with_value', 'contain_key_with_value', (['field_name', '[value]'], {}), '(field_name, [value])\n', (5893, 5914), False, 'from twin_sister.expects_matchers import contain_key_with_value\n'), ((6586, 6629), 'twin_sister.expects_matchers.contain_key_with_value', 'contain_key_with_value', (['field_name', '[value]'], {}), '(field_name, [value])\n', (6608, 6629), False, 'from twin_sister.expects_matchers import contain_key_with_value\n'), ((7315, 7358), 'twin_sister.expects_matchers.contain_key_with_value', 'contain_key_with_value', (['field_name', '[value]'], {}), '(field_name, [value])\n', (7337, 7358), False, 'from twin_sister.expects_matchers import contain_key_with_value\n'), ((8067, 8110), 'twin_sister.expects_matchers.contain_key_with_value', 'contain_key_with_value', (['field_name', '[value]'], {}), '(field_name, [value])\n', (8089, 8110), False, 'from twin_sister.expects_matchers import contain_key_with_value\n'), ((8730, 8749), 'expects.contain', 'contain', (['field_name'], {}), '(field_name)\n', (8737, 8749), False, 'from expects import expect, be_empty, contain, equal\n'), ((9192, 9210), 'expects.contain', 'contain', (['"""headers"""'], {}), "('headers')\n", (9199, 9210), False, 'from expects import expect, be_empty, contain, equal\n'), ((9249, 9324), 'twin_sister.expects_matchers.contain_key_with_value', 'contain_key_with_value', (['"""Content-type"""', '"""application/x-www-form-urlencoded"""'], {}), "('Content-type', 'application/x-www-form-urlencoded')\n", (9271, 9324), False, 'from twin_sister.expects_matchers import contain_key_with_value\n'), ((9946, 9961), 'expects.contain', 'contain', (['"""data"""'], {}), "('data')\n", (9953, 9961), False, 'from expects import expect, be_empty, contain, equal\n'), ((10537, 10552), 'expects.equal', 'equal', (['response'], {}), '(response)\n', (10542, 10552), False, 'from expects import expect, be_empty, contain, equal\n'), ((1995, 2010), 'expects.expect', 'expect', (['attempt'], {}), '(attempt)\n', (2001, 2010), False, 'from expects import expect, be_empty, contain, equal\n'), ((2416, 2431), 'expects.expect', 'expect', (['attempt'], {}), '(attempt)\n', (2422, 2431), False, 'from expects import expect, be_empty, contain, equal\n'), ((2867, 2892), 'expects.expect', 'expect', (['fake.put_requests'], {}), '(fake.put_requests)\n', (2873, 2892), False, 'from expects import expect, be_empty, contain, equal\n'), ((3276, 3291), 'expects.expect', 'expect', (['attempt'], {}), '(attempt)\n', (3282, 3291), False, 'from expects import expect, be_empty, contain, equal\n'), ((3836, 3850), 'expects.expect', 'expect', (['actual'], {}), '(actual)\n', (3842, 3850), False, 'from expects import expect, be_empty, contain, equal\n'), ((4433, 4447), 'expects.expect', 'expect', (['actual'], {}), '(actual)\n', (4439, 4447), False, 'from expects import expect, be_empty, contain, equal\n'), ((4457, 4489), 'urllib.parse.urljoin', 'urljoin', (['scheme_and_host', 'action'], {}), '(scheme_and_host, action)\n', (4464, 4489), False, 'from urllib.parse import parse_qs, quote_plus, urlencode, urljoin\n'), ((5114, 5128), 'expects.expect', 'expect', (['actual'], {}), '(actual)\n', (5120, 5128), False, 'from expects import expect, be_empty, contain, equal\n'), ((5853, 5867), 'expects.expect', 'expect', (['fields'], {}), '(fields)\n', (5859, 5867), False, 'from expects import expect, be_empty, contain, equal\n'), ((6568, 6582), 'expects.expect', 'expect', (['fields'], {}), '(fields)\n', (6574, 6582), False, 'from expects import expect, be_empty, contain, equal\n'), ((7297, 7311), 'expects.expect', 'expect', (['fields'], {}), '(fields)\n', (7303, 7311), False, 'from expects import expect, be_empty, contain, equal\n'), ((8049, 8063), 'expects.expect', 'expect', (['fields'], {}), '(fields)\n', (8055, 8063), False, 'from expects import expect, be_empty, contain, equal\n'), ((9220, 9245), 'expects.expect', 'expect', (["kwargs['headers']"], {}), "(kwargs['headers'])\n", (9226, 9245), False, 'from expects import expect, be_empty, contain, equal\n'), ((9971, 9993), 'expects.expect', 'expect', (["kwargs['data']"], {}), "(kwargs['data'])\n", (9977, 9993), False, 'from expects import expect, be_empty, contain, equal\n'), ((10003, 10055), 'urllib.parse.urlencode', 'urlencode', (['{field_name: value}'], {'quote_via': 'quote_plus'}), '({field_name: value}, quote_via=quote_plus)\n', (10012, 10055), False, 'from urllib.parse import parse_qs, quote_plus, urlencode, urljoin\n'), ((5146, 5176), 'urllib.parse.urljoin', 'urljoin', (['scheme_and_host', 'path'], {}), '(scheme_and_host, path)\n', (5153, 5176), False, 'from urllib.parse import parse_qs, quote_plus, urlencode, urljoin\n')] |
# -*- coding: utf-8 -*-
import re
import logging
import string
import textwrap
from collections import OrderedDict
def representsInt(s):
try:
int(s)
return True
except ValueError:
return False
def representsFloat(s):
try:
float(s)
return True
except ValueError:
return False
re_digits = re.compile('^\d+$')
def hasOnlyDigits(s):
return re_digits.match(s) != None
def representsIntBetween(s, low, high):
if not representsInt(s):
return False
sInt = int(s)
if sInt>=low and sInt<=high:
return True
return False
def representsFloatBetween(s, low, high):
if not representsFloat(s):
return False
sFloat = float(s)
if sFloat>=low and sFloat<=high:
return True
return False
def numberEnumeration(list):
return [(str(x[0]), x[1]) for x in enumerate(list, 1)]
def letterEnumeration(list):
return [(chr(x[0] + 65), x[1]) for x in enumerate(list, 0)] #chd(65) = 'A'
def getIndexIfIntOrLetterInRange(input, max):
if representsInt(input):
result = int(input)
if result in range(1, max + 1):
return result
if input in list(map(chr, range(65, 65 + max))):
return ord(input) - 64 # ord('A') = 65
return None
def makeArray2D(data_list, length=2):
return [data_list[i:i+length] for i in range(0, len(data_list), length)]
def distributeElementMaxSize(seq, maxSize=5):
if len(seq)==0:
return []
lines = len(seq) / maxSize
if len(seq) % maxSize > 0:
lines += 1
avg = len(seq) / float(lines)
out = []
last = 0.0
while last < len(seq):
out.append(seq[int(last):int(last + avg)])
last += avg
return out
def segmentArrayOnMaxChars(array, maxChar=20, ignoreString=None):
#logging.debug('selected_tokens: ' + str(selected_tokens))
result = []
lineCharCount = 0
currentLine = []
for t in array:
t_strip = t.replace(ignoreString, '') if ignoreString and ignoreString in t else t
t_strip_size = len(t_strip.decode('utf-8'))
newLineCharCount = lineCharCount + t_strip_size
if not currentLine:
currentLine.append(t)
lineCharCount = newLineCharCount
elif newLineCharCount > maxChar:
#logging.debug('Line ' + str(len(result)+1) + " " + str(currentLine) + " tot char: " + str(lineCharCount))
result.append(currentLine)
currentLine = [t]
lineCharCount = t_strip_size
else:
lineCharCount = newLineCharCount
currentLine.append(t)
if currentLine:
#logging.debug('Line ' + str(len(result) + 1) + " " + str(currentLine) + " tot char: " + str(lineCharCount))
result.append(currentLine)
return result
reSplitSpace = re.compile("\s")
def splitTextOnSpaces(text):
return reSplitSpace.split(text)
def escapeMarkdown(text):
for char in '*_`[':
text = text.replace(char, '\\'+char)
return text
def containsMarkdown(text):
for char in '*_`[':
if char in text:
return True
return False
# minutes should be positive
def getHourMinFromMin(minutes):
hh = int(minutes / 60)
mm = minutes % 60
return hh, mm
def getSiNoFromBoolean(bool_value):
return 'SI' if bool_value else 'NO'
def getTimeStringFormatHHMM(minutes, rjust=False):
hh, mm = getHourMinFromMin(abs(minutes))
#return "{}h {}min".format(str(hh).zfill(2), str(mm).zfill(2))
sign = '-' if minutes<0 else ''
signHH = sign+str(hh)
if rjust:
signHH = signHH.rjust(3)
return "{}:{}".format(signHH, str(mm).zfill(2))
def unindent(s):
return re.sub('[ ]+', ' ', textwrap.dedent(s))
# sheet_tables is a dict mapping sheet names to 2array
def convert_data_to_spreadsheet(sheet_tables):
import StringIO
from pyexcel_xls import save_data
xls_data = OrderedDict()
for name, array in sheet_tables.iteritems():
xls_data.update({name: array})
#xls_data.update({"Sheet 1": sheet_tables})
output = StringIO.StringIO()
save_data(output, xls_data, encoding="UTF-8")
return output.getvalue()
def convert_arrayData_to_tsv(array):
import csv
import StringIO
output = StringIO.StringIO()
writer = csv.writer(output, dialect='excel-tab')
writer.writerows(array)
return output.getvalue()
def roundup(x, upTo):
import math
return int(math.ceil(x / float(upTo))) * upTo
def emptyStringIfNone(x):
return '' if x==None else x
def emptyStringIfZero(x):
return '' if x==0 else x
def convertToUtfIfNeeded(s):
if isinstance(s, unicode):
s = s.encode('utf-8')
return s
def flatten(L):
ret = []
for i in L:
if isinstance(i,list):
ret.extend(flatten(i))
else:
ret.append(i)
return ret
def matchInputToChoices(input, choices):
perfectMatch = True
if input in choices:
return input, perfectMatch
perfectMatch = False
from fuzzywuzzy import process
threshold = 75
# choices = ["Atlanta Falcons", "New York Jets", "New York Giants", "Dallas Cowboys"]
# process.extract("new york jets", choices, limit=2)
# -> [('New York Jets', 100), ('New York Giants', 78)]
try:
results = process.extract(input, choices, limit=2)
except:
return None, False
if results and results[0][1]>threshold:
# and (len(results)==1 or results[0][1]>results[1][1]): # no more than one
return results[0][0], perfectMatch
return None, perfectMatch
def format_distance(dst_km):
if (dst_km>=10):
return str(round(dst_km, 0)) + " Km"
if (dst_km>=1):
return str(round(dst_km, 1)) + " Km"
return str(int(dst_km * 1000)) + " m"
def makeListOfList(L):
result = [[l] for l in L]
return result
def removeDuplicatesFromList(list):
no_dupicated_list = []
for x in list:
if x not in no_dupicated_list:
no_dupicated_list.append(x)
return no_dupicated_list | [
"StringIO.StringIO",
"textwrap.dedent",
"collections.OrderedDict",
"re.compile",
"csv.writer",
"pyexcel_xls.save_data",
"fuzzywuzzy.process.extract"
] | [((354, 374), 're.compile', 're.compile', (['"""^\\\\d+$"""'], {}), "('^\\\\d+$')\n", (364, 374), False, 'import re\n'), ((2833, 2850), 're.compile', 're.compile', (['"""\\\\s"""'], {}), "('\\\\s')\n", (2843, 2850), False, 'import re\n'), ((3924, 3937), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (3935, 3937), False, 'from collections import OrderedDict\n'), ((4091, 4110), 'StringIO.StringIO', 'StringIO.StringIO', ([], {}), '()\n', (4108, 4110), False, 'import StringIO\n'), ((4115, 4160), 'pyexcel_xls.save_data', 'save_data', (['output', 'xls_data'], {'encoding': '"""UTF-8"""'}), "(output, xls_data, encoding='UTF-8')\n", (4124, 4160), False, 'from pyexcel_xls import save_data\n'), ((4276, 4295), 'StringIO.StringIO', 'StringIO.StringIO', ([], {}), '()\n', (4293, 4295), False, 'import StringIO\n'), ((4309, 4348), 'csv.writer', 'csv.writer', (['output'], {'dialect': '"""excel-tab"""'}), "(output, dialect='excel-tab')\n", (4319, 4348), False, 'import csv\n'), ((3728, 3746), 'textwrap.dedent', 'textwrap.dedent', (['s'], {}), '(s)\n', (3743, 3746), False, 'import textwrap\n'), ((5319, 5359), 'fuzzywuzzy.process.extract', 'process.extract', (['input', 'choices'], {'limit': '(2)'}), '(input, choices, limit=2)\n', (5334, 5359), False, 'from fuzzywuzzy import process\n')] |
import cv2
import numpy as np
class drawingCanvas():
def __init__(self):
self.penrange = np.load('penrange.npy')
self.cap = cv2.VideoCapture(0)
self.canvas = None
self.x1,self.y1=0,0
self.val=1
self.draw()
def draw(self):
while True:
_, self.frame = self.cap.read()
self.frame = cv2.flip( self.frame,+1)
if self.canvas is None:
self.canvas = np.zeros_like(self.frame)
mask=self.CreateMask()
contours=self.ContourDetect(mask)
self.drawLine(contours)
self.display()
k = cv2.waitKey(1) & 0xFF
self.takeAction(k)
if k == 27:
break
def CreateMask(self):
hsv = cv2.cvtColor(self.frame, cv2.COLOR_BGR2HSV)
lower_range = self.penrange[0]
upper_range = self.penrange[1]
mask = cv2.inRange(hsv, lower_range, upper_range)
return mask
def ContourDetect(self,mask):
contours, hierarchy = cv2.findContours(mask, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
return contours
def drawLine(self,contours):
if contours and cv2.contourArea(max(contours, key = cv2.contourArea)) > 100:
c = max(contours, key = cv2.contourArea)
x2,y2,w,h = cv2.boundingRect(c)
if self.x1 == 0 and self.y1 == 0:
self.x1,self.y1= x2,y2
else:
self.canvas = cv2.line(self.canvas, (self.x1,self.y1),(x2,y2), [255*self.val,0,0], 10)
self.x1,self.y1= x2,y2
else:
self.x1,self.y1 =0,0
def display(self):
self.frame = cv2.add(self.frame,self.canvas)
cv2.imshow('frame',self.frame)
cv2.imshow('canvas',self.canvas)
def takeAction(self,k):
# When c is pressed clear the entire canvas
if k == ord('c'):
self.canvas = None
#press e to change between eraser mode and writing mode
if k==ord('e'):
self.val= int(not self.val)
if __name__ == '__main__':
drawingCanvas()
cv2.destroyAllWindows()
| [
"cv2.flip",
"cv2.inRange",
"cv2.boundingRect",
"cv2.line",
"cv2.imshow",
"cv2.waitKey",
"cv2.destroyAllWindows",
"cv2.VideoCapture",
"cv2.cvtColor",
"cv2.findContours",
"numpy.load",
"numpy.zeros_like",
"cv2.add"
] | [((2327, 2350), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (2348, 2350), False, 'import cv2\n'), ((107, 130), 'numpy.load', 'np.load', (['"""penrange.npy"""'], {}), "('penrange.npy')\n", (114, 130), True, 'import numpy as np\n'), ((151, 170), 'cv2.VideoCapture', 'cv2.VideoCapture', (['(0)'], {}), '(0)\n', (167, 170), False, 'import cv2\n'), ((836, 879), 'cv2.cvtColor', 'cv2.cvtColor', (['self.frame', 'cv2.COLOR_BGR2HSV'], {}), '(self.frame, cv2.COLOR_BGR2HSV)\n', (848, 879), False, 'import cv2\n'), ((977, 1019), 'cv2.inRange', 'cv2.inRange', (['hsv', 'lower_range', 'upper_range'], {}), '(hsv, lower_range, upper_range)\n', (988, 1019), False, 'import cv2\n'), ((1113, 1179), 'cv2.findContours', 'cv2.findContours', (['mask', 'cv2.RETR_EXTERNAL', 'cv2.CHAIN_APPROX_SIMPLE'], {}), '(mask, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)\n', (1129, 1179), False, 'import cv2\n'), ((1854, 1886), 'cv2.add', 'cv2.add', (['self.frame', 'self.canvas'], {}), '(self.frame, self.canvas)\n', (1861, 1886), False, 'import cv2\n'), ((1899, 1930), 'cv2.imshow', 'cv2.imshow', (['"""frame"""', 'self.frame'], {}), "('frame', self.frame)\n", (1909, 1930), False, 'import cv2\n'), ((1939, 1972), 'cv2.imshow', 'cv2.imshow', (['"""canvas"""', 'self.canvas'], {}), "('canvas', self.canvas)\n", (1949, 1972), False, 'import cv2\n'), ((384, 408), 'cv2.flip', 'cv2.flip', (['self.frame', '(+1)'], {}), '(self.frame, +1)\n', (392, 408), False, 'import cv2\n'), ((1440, 1459), 'cv2.boundingRect', 'cv2.boundingRect', (['c'], {}), '(c)\n', (1456, 1459), False, 'import cv2\n'), ((479, 504), 'numpy.zeros_like', 'np.zeros_like', (['self.frame'], {}), '(self.frame)\n', (492, 504), True, 'import numpy as np\n'), ((684, 698), 'cv2.waitKey', 'cv2.waitKey', (['(1)'], {}), '(1)\n', (695, 698), False, 'import cv2\n'), ((1621, 1700), 'cv2.line', 'cv2.line', (['self.canvas', '(self.x1, self.y1)', '(x2, y2)', '[255 * self.val, 0, 0]', '(10)'], {}), '(self.canvas, (self.x1, self.y1), (x2, y2), [255 * self.val, 0, 0], 10)\n', (1629, 1700), False, 'import cv2\n')] |
import argparse
from solitude.tools import Solc, GanacheCli, EthLint
from solitude.common import update_global_config
from conftest import (
SOLIDITY_ALL_VERSIONS, GANACHE_ALL_VERSIONS, ETHLINT_ALL_VERSIONS, LOCAL_TOOLDIR)
def main():
p = argparse.ArgumentParser()
p.add_argument(
"--nolocks",
action="store_true",
help="Do not use lockfiles in the global config")
args = p.parse_args()
if args.nolocks:
update_global_config({
"GanacheCli.PackageLock": None,
"EthLint.PackageLock": None
})
TOOLS = [
(Solc, SOLIDITY_ALL_VERSIONS),
(GanacheCli, GANACHE_ALL_VERSIONS),
(EthLint, ETHLINT_ALL_VERSIONS)
]
for tool_class, tool_versions in TOOLS:
for version in tool_versions:
tool = tool_class(tooldir=LOCAL_TOOLDIR, version=version)
if tool.have():
print("Found %s-%s" % (tool.name, tool.version))
else:
print("Installing %s-%s... " % (tool.name, tool.version), end="", flush=True)
tool.add()
print("OK")
if __name__ == "__main__":
main()
| [
"solitude.common.update_global_config",
"argparse.ArgumentParser"
] | [((249, 274), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (272, 274), False, 'import argparse\n'), ((459, 546), 'solitude.common.update_global_config', 'update_global_config', (["{'GanacheCli.PackageLock': None, 'EthLint.PackageLock': None}"], {}), "({'GanacheCli.PackageLock': None, 'EthLint.PackageLock':\n None})\n", (479, 546), False, 'from solitude.common import update_global_config\n')] |
# Copyright 2018 ETH Zurich
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
from code_base.links import Links
from code_base.tc_logic import EgressQdisc, IngressQdisc, DefaultClass, TcClass, ClassifierFilter, RedirectFilter
from code_base.constants import Constants
from code_base.virtual_interfaces_manager import VirtualInterfacesManager
from code_base.cmd_executor import CmdExecutor
from code_base.tc_command_generator import TCCommandGenerator
from code_base.systeminfo import get_interface_names
class BandwidthConfigurator:
def __init__(self):
links = Links()
self.links = links
def limit(self):
"""
Limit the bandwidth according to the link_info.json file
:return:
"""
self.reset()
print("###################### Setting up virtual interfaces ######################")
self.links.set_up_virtual_interfaces()
default_bandwidth = 0
try:
with open(Constants.path_to_config_file, "r") as jsonFile:
data = json.load(jsonFile)
default_bandwidth = int(data['DefaultBandwidth'])
except json.JSONDecodeError as e:
print("Reading json file failed!")
exit(1)
for dev in self.links.used_interfaces:
virtual_dev = self.links.virtual_interfaces[dev.name]
print("###################### Configure interface "+dev.name+" ######################")
root_egress_qdisc = EgressQdisc(dev=dev)
ingress_qdisc = IngressQdisc(dev=dev)
virtual_qdisc = EgressQdisc(dev=virtual_dev)
default_egress_class = DefaultClass(dev=dev, bandwidth=default_bandwidth)
default_virtual_class = DefaultClass(dev=virtual_dev, bandwidth=default_bandwidth)
redirect_filter_ipv4 = RedirectFilter(dev=dev, target_dev=virtual_dev, ip_version=4)
redirect_filter_ipv6 = RedirectFilter(dev=dev, target_dev=virtual_dev, ip_version=6)
root_egress_qdisc.add_default_class(default_egress_class)
virtual_qdisc.add_default_class(default_virtual_class)
ingress_qdisc.add_filter(redirect_filter_ipv4)
ingress_qdisc.add_filter(redirect_filter_ipv6)
for link in self.links.links:
if link.dev.name == dev.name and link.is_user_as:
# Configure interface for this link
egress_class = TcClass(dev=dev, classid=link.as_id, bandwidth=link.bandwidth)
egress_filter = ClassifierFilter(dev=dev, ip_addr=link.ip_addr, target_class=link.as_id)
root_egress_qdisc.add_class(egress_class)
root_egress_qdisc.add_filter(egress_filter)
virtual_class = TcClass(dev=virtual_dev, classid=link.as_id, bandwidth=link.bandwidth)
virtual_filter = ClassifierFilter(dev=virtual_dev, ip_addr=link.ip_addr, target_class=link.as_id)
virtual_qdisc.add_class(virtual_class)
virtual_qdisc.add_filter(virtual_filter)
root_egress_qdisc.make()
ingress_qdisc.make()
virtual_qdisc.make()
def reset(self):
"""
Reset previously set bandwidth limitations
:return:
"""
vim = VirtualInterfacesManager()
tcg = TCCommandGenerator()
for dev in self.links.used_interfaces:
print("###################### Reset interface " + dev.name + " ######################")
out = CmdExecutor.run_and_return_result_and_print_command(tcg.delete_root_qdisc(dev.name))
if not out == "":
print("Root QDISC did not exist...")
out = CmdExecutor.run_and_return_result_and_print_command(tcg.delete_ingress_qdisc(dev.name))
if not out == "":
print("Ingress QDISC did not exist...")
print("###################### Delete virtual interfaces ######################")
vim.delete_virtual_interfaces()
def show(self):
"""
Show the current TC configuration
:return:
"""
interfaces = get_interface_names()
tcg = TCCommandGenerator()
print("#############################QDISC#############################")
for dev in interfaces:
print("-----------------------------DEV=" + dev + "-----------------------------")
CmdExecutor.run_and_print(tcg.show_qdiscs(iface_name=dev))
print("#############################CLASS#############################")
for dev in interfaces:
print("-----------------------------DEV=" + dev + "-----------------------------")
CmdExecutor.run_and_print(tcg.show_classes(iface_name=dev))
print("#############################FILTER#############################")
for dev in interfaces:
print("-----------------------------DEV=" + dev + "-----------------------------")
print("*****************************Egress Filters*****************************")
CmdExecutor.run_and_print(tcg.show_egress_filter(iface_name=dev))
print("*****************************Ingress Filters*****************************")
CmdExecutor.run_and_print(tcg.show_ingress_filter(iface_name=dev))
| [
"code_base.tc_logic.DefaultClass",
"code_base.virtual_interfaces_manager.VirtualInterfacesManager",
"code_base.tc_logic.TcClass",
"code_base.tc_command_generator.TCCommandGenerator",
"code_base.links.Links",
"json.load",
"code_base.systeminfo.get_interface_names",
"code_base.tc_logic.RedirectFilter",
... | [((1083, 1090), 'code_base.links.Links', 'Links', ([], {}), '()\n', (1088, 1090), False, 'from code_base.links import Links\n'), ((3817, 3843), 'code_base.virtual_interfaces_manager.VirtualInterfacesManager', 'VirtualInterfacesManager', ([], {}), '()\n', (3841, 3843), False, 'from code_base.virtual_interfaces_manager import VirtualInterfacesManager\n'), ((3858, 3878), 'code_base.tc_command_generator.TCCommandGenerator', 'TCCommandGenerator', ([], {}), '()\n', (3876, 3878), False, 'from code_base.tc_command_generator import TCCommandGenerator\n'), ((4659, 4680), 'code_base.systeminfo.get_interface_names', 'get_interface_names', ([], {}), '()\n', (4678, 4680), False, 'from code_base.systeminfo import get_interface_names\n'), ((4695, 4715), 'code_base.tc_command_generator.TCCommandGenerator', 'TCCommandGenerator', ([], {}), '()\n', (4713, 4715), False, 'from code_base.tc_command_generator import TCCommandGenerator\n'), ((1985, 2005), 'code_base.tc_logic.EgressQdisc', 'EgressQdisc', ([], {'dev': 'dev'}), '(dev=dev)\n', (1996, 2005), False, 'from code_base.tc_logic import EgressQdisc, IngressQdisc, DefaultClass, TcClass, ClassifierFilter, RedirectFilter\n'), ((2034, 2055), 'code_base.tc_logic.IngressQdisc', 'IngressQdisc', ([], {'dev': 'dev'}), '(dev=dev)\n', (2046, 2055), False, 'from code_base.tc_logic import EgressQdisc, IngressQdisc, DefaultClass, TcClass, ClassifierFilter, RedirectFilter\n'), ((2084, 2112), 'code_base.tc_logic.EgressQdisc', 'EgressQdisc', ([], {'dev': 'virtual_dev'}), '(dev=virtual_dev)\n', (2095, 2112), False, 'from code_base.tc_logic import EgressQdisc, IngressQdisc, DefaultClass, TcClass, ClassifierFilter, RedirectFilter\n'), ((2148, 2198), 'code_base.tc_logic.DefaultClass', 'DefaultClass', ([], {'dev': 'dev', 'bandwidth': 'default_bandwidth'}), '(dev=dev, bandwidth=default_bandwidth)\n', (2160, 2198), False, 'from code_base.tc_logic import EgressQdisc, IngressQdisc, DefaultClass, TcClass, ClassifierFilter, RedirectFilter\n'), ((2235, 2293), 'code_base.tc_logic.DefaultClass', 'DefaultClass', ([], {'dev': 'virtual_dev', 'bandwidth': 'default_bandwidth'}), '(dev=virtual_dev, bandwidth=default_bandwidth)\n', (2247, 2293), False, 'from code_base.tc_logic import EgressQdisc, IngressQdisc, DefaultClass, TcClass, ClassifierFilter, RedirectFilter\n'), ((2329, 2390), 'code_base.tc_logic.RedirectFilter', 'RedirectFilter', ([], {'dev': 'dev', 'target_dev': 'virtual_dev', 'ip_version': '(4)'}), '(dev=dev, target_dev=virtual_dev, ip_version=4)\n', (2343, 2390), False, 'from code_base.tc_logic import EgressQdisc, IngressQdisc, DefaultClass, TcClass, ClassifierFilter, RedirectFilter\n'), ((2426, 2487), 'code_base.tc_logic.RedirectFilter', 'RedirectFilter', ([], {'dev': 'dev', 'target_dev': 'virtual_dev', 'ip_version': '(6)'}), '(dev=dev, target_dev=virtual_dev, ip_version=6)\n', (2440, 2487), False, 'from code_base.tc_logic import EgressQdisc, IngressQdisc, DefaultClass, TcClass, ClassifierFilter, RedirectFilter\n'), ((1545, 1564), 'json.load', 'json.load', (['jsonFile'], {}), '(jsonFile)\n', (1554, 1564), False, 'import json\n'), ((2942, 3004), 'code_base.tc_logic.TcClass', 'TcClass', ([], {'dev': 'dev', 'classid': 'link.as_id', 'bandwidth': 'link.bandwidth'}), '(dev=dev, classid=link.as_id, bandwidth=link.bandwidth)\n', (2949, 3004), False, 'from code_base.tc_logic import EgressQdisc, IngressQdisc, DefaultClass, TcClass, ClassifierFilter, RedirectFilter\n'), ((3041, 3113), 'code_base.tc_logic.ClassifierFilter', 'ClassifierFilter', ([], {'dev': 'dev', 'ip_addr': 'link.ip_addr', 'target_class': 'link.as_id'}), '(dev=dev, ip_addr=link.ip_addr, target_class=link.as_id)\n', (3057, 3113), False, 'from code_base.tc_logic import EgressQdisc, IngressQdisc, DefaultClass, TcClass, ClassifierFilter, RedirectFilter\n'), ((3276, 3346), 'code_base.tc_logic.TcClass', 'TcClass', ([], {'dev': 'virtual_dev', 'classid': 'link.as_id', 'bandwidth': 'link.bandwidth'}), '(dev=virtual_dev, classid=link.as_id, bandwidth=link.bandwidth)\n', (3283, 3346), False, 'from code_base.tc_logic import EgressQdisc, IngressQdisc, DefaultClass, TcClass, ClassifierFilter, RedirectFilter\n'), ((3384, 3469), 'code_base.tc_logic.ClassifierFilter', 'ClassifierFilter', ([], {'dev': 'virtual_dev', 'ip_addr': 'link.ip_addr', 'target_class': 'link.as_id'}), '(dev=virtual_dev, ip_addr=link.ip_addr, target_class=link.as_id\n )\n', (3400, 3469), False, 'from code_base.tc_logic import EgressQdisc, IngressQdisc, DefaultClass, TcClass, ClassifierFilter, RedirectFilter\n')] |