code
stringlengths 22
1.05M
| apis
listlengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
|---|---|---|
from flask import current_app as app
from .cart_product import Cart_Product
class Cart:
def __init__(self, id, pid, quantity):
self.id = id
self.pid = pid
self.quantity = quantity
@staticmethod
def get(id):
try:
rows = app.db.execute('''
SELECT p.id, c.seller_id, CONCAT(u.firstname, u.lastname), p.name, CAST((c.quantity * i.price) AS numeric(36, 2)), c.quantity
FROM Cart c, Products p, Inventory i, Users u
WHERE c.id = :id AND c.pid = p.id AND p.id = i.product_id AND c.seller_id = i.seller_id AND c.seller_id = u.id;
''',
id=id)
print([Cart_Product(*row) for row in rows])
#eturn [Product(*row) for row in rows]
return [Cart_Product(*row) for row in rows] #if rows is not None else None
except Exception as e:
print(e)
return None
@staticmethod
def get_all():
rows = app.db.execute('''
SELECT *
FROM Cart
''')
return [Cart(*row) for row in rows]
@staticmethod
def checkIfProductInCart(id, uid, sid):
try:
rows = app.db.execute('''
SELECT *
FROM Cart
WHERE id = :uid AND pid = :pid AND seller_id = :sid
''',
uid = uid,
pid = id,
sid = sid
)
if len(rows) == 0:
return False
else:
return True
except Exception as e:
print(e)
return False
@staticmethod
def update_product(id, uid, sid, state):
try:
rows = app.db.execute(
'''
SELECT quantity
FROM Cart
WHERE id = :uid AND pid = :pid AND seller_id = :sid
''',
uid = uid,
pid = id,
sid = sid
)
curr_quantity = int(rows[0][0])
if state == "add":
new_quantity = curr_quantity + 1
else:
new_quantity = curr_quantity - 1
if new_quantity == 0:
app.db.execute('''
DELETE FROM Cart WHERE pid = :pid AND id = :uid AND seller_id = :sid
''',
uid = uid,
pid = id,
sid = sid
)
else:
app.db.execute(
'''
UPDATE Cart
SET quantity = :new_quantity
WHERE id = :uid AND pid = :pid AND seller_id= :sid
''',
new_quantity = new_quantity,
uid = uid,
pid = id,
sid = sid
)
except Exception as e:
print(e)
@staticmethod
def add_product(id, uid, sid):
#quantity = request.args.get("quantity")
try:
app.db.execute(
'''
INSERT INTO Cart VALUES(:uid, :id, :sid, 1)
''',
id = id,
uid = uid,
sid = sid
)
except Exception as e:
print(e)
@staticmethod
def remove_product(pid, uid, seller_id):
try:
app.db.execute(
'''
DELETE FROM Cart WHERE pid = :pid AND id = :uid AND seller_id = :sid
''',
uid = uid,
pid = pid,
sid = seller_id
)
except Exception as e:
print(e)
@staticmethod
def save_for_later(uid, pid, seller_id, quantity):
try:
rows = app.db.execute(
'''
DELETE FROM Cart c WHERE c.pid = :pid AND c.id = :uid AND c.seller_id = :sid
RETURNING c.quantity
''',
uid = uid,
pid = pid,
sid = seller_id
)
app.db.execute(
'''
INSERT INTO Saved_Cart VALUES(:id, :pid, :sid, :quantity)
''',
id = uid,
pid = pid,
sid = seller_id,
quantity = quantity
)
except Exception as e:
print(e)
@staticmethod
def get_saved(uid):
try:
rows = app.db.execute('''
SELECT p.id, s.seller_id, CONCAT(u.firstname, u.lastname), p.name, CAST((s.quantity * i.price) AS numeric(36, 2)), s.quantity
FROM Saved_Cart s, Products p, Inventory i, Users u
WHERE s.id = :id AND s.pid = p.id AND p.id = i.product_id AND s.seller_id = i.seller_id AND s.seller_id = u.id;
''',
id=uid)
print([Cart_Product(*row) for row in rows])
#eturn [Product(*row) for row in rows]
return [Cart_Product(*row) for row in rows] #if rows is not None else None
except Exception as e:
print(e)
return None
@staticmethod
def get_total(uid):
try:
rows = app.db.execute(
'''
SELECT CAST(SUM(c.quantity * i.price) as numeric(36,2)) AS total
FROM Cart c, Inventory i
WHERE c.pid = i.product_id AND c.id = :uid AND c.seller_id = i.seller_id
''',
uid = uid
)
return rows[0][0]
except Exception as e:
print(e)
@staticmethod
def remove_from_saved(uid, pid, sid):
try:
app.db.execute(
'''
DELETE FROM Saved_Cart WHERE pid = :pid AND id = :uid AND seller_id = :sid
''',
uid = uid,
pid = pid,
sid = sid
)
except Exception as e:
print(e)
@staticmethod
def move_to_cart(uid, pid, sid, quantity):
try:
rows = app.db.execute(
'''
DELETE FROM Saved_Cart WHERE pid = :pid AND id = :uid AND seller_id = :sid
RETURNING pid
''',
uid = uid,
pid = pid,
sid = sid
)
pid = rows[0][0]
app.db.execute(
'''
INSERT INTO Cart VALUES(:id, :pid, :sid, :quantity)
''',
id = uid,
pid = pid,
sid = sid,
quantity = quantity
)
except Exception as e:
print(e)
|
[
"flask.current_app.db.execute"
] |
[((997, 1063), 'flask.current_app.db.execute', 'app.db.execute', (['"""\n SELECT *\n FROM Cart\n """'], {}), '("""\n SELECT *\n FROM Cart\n """)\n', (1011, 1063), True, 'from flask import current_app as app\n'), ((278, 650), 'flask.current_app.db.execute', 'app.db.execute', (['"""\n SELECT p.id, c.seller_id, CONCAT(u.firstname, u.lastname), p.name, CAST((c.quantity * i.price) AS numeric(36, 2)), c.quantity\n FROM Cart c, Products p, Inventory i, Users u\n WHERE c.id = :id AND c.pid = p.id AND p.id = i.product_id AND c.seller_id = i.seller_id AND c.seller_id = u.id;\n """'], {'id': 'id'}), '(\n """\n SELECT p.id, c.seller_id, CONCAT(u.firstname, u.lastname), p.name, CAST((c.quantity * i.price) AS numeric(36, 2)), c.quantity\n FROM Cart c, Products p, Inventory i, Users u\n WHERE c.id = :id AND c.pid = p.id AND p.id = i.product_id AND c.seller_id = i.seller_id AND c.seller_id = u.id;\n """\n , id=id)\n', (292, 650), True, 'from flask import current_app as app\n'), ((1202, 1381), 'flask.current_app.db.execute', 'app.db.execute', (['"""\n SELECT * \n FROM Cart\n WHERE id = :uid AND pid = :pid AND seller_id = :sid\n """'], {'uid': 'uid', 'pid': 'id', 'sid': 'sid'}), '(\n """\n SELECT * \n FROM Cart\n WHERE id = :uid AND pid = :pid AND seller_id = :sid\n """\n , uid=uid, pid=id, sid=sid)\n', (1216, 1381), True, 'from flask import current_app as app\n'), ((1768, 1985), 'flask.current_app.db.execute', 'app.db.execute', (['"""\n SELECT quantity\n FROM Cart\n WHERE id = :uid AND pid = :pid AND seller_id = :sid\n """'], {'uid': 'uid', 'pid': 'id', 'sid': 'sid'}), '(\n """\n SELECT quantity\n FROM Cart\n WHERE id = :uid AND pid = :pid AND seller_id = :sid\n """\n , uid=uid, pid=id, sid=sid)\n', (1782, 1985), True, 'from flask import current_app as app\n'), ((3404, 3538), 'flask.current_app.db.execute', 'app.db.execute', (['"""\n INSERT INTO Cart VALUES(:uid, :id, :sid, 1)\n """'], {'id': 'id', 'uid': 'uid', 'sid': 'sid'}), '(\n """\n INSERT INTO Cart VALUES(:uid, :id, :sid, 1)\n """\n , id=id, uid=uid, sid=sid)\n', (3418, 3538), True, 'from flask import current_app as app\n'), ((3850, 4017), 'flask.current_app.db.execute', 'app.db.execute', (['"""\n DELETE FROM Cart WHERE pid = :pid AND id = :uid AND seller_id = :sid\n """'], {'uid': 'uid', 'pid': 'pid', 'sid': 'seller_id'}), '(\n """\n DELETE FROM Cart WHERE pid = :pid AND id = :uid AND seller_id = :sid\n """\n , uid=uid, pid=pid, sid=seller_id)\n', (3864, 4017), True, 'from flask import current_app as app\n'), ((4353, 4565), 'flask.current_app.db.execute', 'app.db.execute', (['"""\n DELETE FROM Cart c WHERE c.pid = :pid AND c.id = :uid AND c.seller_id = :sid\n RETURNING c.quantity\n """'], {'uid': 'uid', 'pid': 'pid', 'sid': 'seller_id'}), '(\n """\n DELETE FROM Cart c WHERE c.pid = :pid AND c.id = :uid AND c.seller_id = :sid\n RETURNING c.quantity\n """\n , uid=uid, pid=pid, sid=seller_id)\n', (4367, 4565), True, 'from flask import current_app as app\n'), ((4755, 4929), 'flask.current_app.db.execute', 'app.db.execute', (['"""\n INSERT INTO Saved_Cart VALUES(:id, :pid, :sid, :quantity)\n """'], {'id': 'uid', 'pid': 'pid', 'sid': 'seller_id', 'quantity': 'quantity'}), '(\n """\n INSERT INTO Saved_Cart VALUES(:id, :pid, :sid, :quantity)\n """\n , id=uid, pid=pid, sid=seller_id, quantity=quantity)\n', (4769, 4929), True, 'from flask import current_app as app\n'), ((5213, 5592), 'flask.current_app.db.execute', 'app.db.execute', (['"""\n SELECT p.id, s.seller_id, CONCAT(u.firstname, u.lastname), p.name, CAST((s.quantity * i.price) AS numeric(36, 2)), s.quantity\n FROM Saved_Cart s, Products p, Inventory i, Users u\n WHERE s.id = :id AND s.pid = p.id AND p.id = i.product_id AND s.seller_id = i.seller_id AND s.seller_id = u.id;\n """'], {'id': 'uid'}), '(\n """\n SELECT p.id, s.seller_id, CONCAT(u.firstname, u.lastname), p.name, CAST((s.quantity * i.price) AS numeric(36, 2)), s.quantity\n FROM Saved_Cart s, Products p, Inventory i, Users u\n WHERE s.id = :id AND s.pid = p.id AND p.id = i.product_id AND s.seller_id = i.seller_id AND s.seller_id = u.id;\n """\n , id=uid)\n', (5227, 5592), True, 'from flask import current_app as app\n'), ((5961, 6231), 'flask.current_app.db.execute', 'app.db.execute', (['"""\n SELECT CAST(SUM(c.quantity * i.price) as numeric(36,2)) AS total \n FROM Cart c, Inventory i\n WHERE c.pid = i.product_id AND c.id = :uid AND c.seller_id = i.seller_id\n """'], {'uid': 'uid'}), '(\n """\n SELECT CAST(SUM(c.quantity * i.price) as numeric(36,2)) AS total \n FROM Cart c, Inventory i\n WHERE c.pid = i.product_id AND c.id = :uid AND c.seller_id = i.seller_id\n """\n , uid=uid)\n', (5975, 6231), True, 'from flask import current_app as app\n'), ((6473, 6640), 'flask.current_app.db.execute', 'app.db.execute', (['"""\n DELETE FROM Saved_Cart WHERE pid = :pid AND id = :uid AND seller_id = :sid\n """'], {'uid': 'uid', 'pid': 'pid', 'sid': 'sid'}), '(\n """\n DELETE FROM Saved_Cart WHERE pid = :pid AND id = :uid AND seller_id = :sid\n """\n , uid=uid, pid=pid, sid=sid)\n', (6487, 6640), True, 'from flask import current_app as app\n'), ((6968, 7165), 'flask.current_app.db.execute', 'app.db.execute', (['"""\n DELETE FROM Saved_Cart WHERE pid = :pid AND id = :uid AND seller_id = :sid\n RETURNING pid\n """'], {'uid': 'uid', 'pid': 'pid', 'sid': 'sid'}), '(\n """\n DELETE FROM Saved_Cart WHERE pid = :pid AND id = :uid AND seller_id = :sid\n RETURNING pid\n """\n , uid=uid, pid=pid, sid=sid)\n', (6982, 7165), True, 'from flask import current_app as app\n'), ((7384, 7546), 'flask.current_app.db.execute', 'app.db.execute', (['"""\n INSERT INTO Cart VALUES(:id, :pid, :sid, :quantity)\n """'], {'id': 'uid', 'pid': 'pid', 'sid': 'sid', 'quantity': 'quantity'}), '(\n """\n INSERT INTO Cart VALUES(:id, :pid, :sid, :quantity)\n """\n , id=uid, pid=pid, sid=sid, quantity=quantity)\n', (7398, 7546), True, 'from flask import current_app as app\n'), ((2386, 2546), 'flask.current_app.db.execute', 'app.db.execute', (['"""\n DELETE FROM Cart WHERE pid = :pid AND id = :uid AND seller_id = :sid\n """'], {'uid': 'uid', 'pid': 'id', 'sid': 'sid'}), '(\n """\n DELETE FROM Cart WHERE pid = :pid AND id = :uid AND seller_id = :sid\n """\n , uid=uid, pid=id, sid=sid)\n', (2400, 2546), True, 'from flask import current_app as app\n'), ((2744, 3019), 'flask.current_app.db.execute', 'app.db.execute', (['"""\n UPDATE Cart\n SET quantity = :new_quantity\n WHERE id = :uid AND pid = :pid AND seller_id= :sid\n\n """'], {'new_quantity': 'new_quantity', 'uid': 'uid', 'pid': 'id', 'sid': 'sid'}), '(\n """\n UPDATE Cart\n SET quantity = :new_quantity\n WHERE id = :uid AND pid = :pid AND seller_id= :sid\n\n """\n , new_quantity=new_quantity, uid=uid, pid=id, sid=sid)\n', (2758, 3019), True, 'from flask import current_app as app\n')]
|
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name="simple_peewee_flask_webapi",
version="1.0.0",
author="<NAME>",
author_email="<EMAIL>",
description="Simple peewee Flask WEB-API",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/prbpedro/simple_peewee_flask_webapi",
install_requires=[
"Flask==1.1.1",
"peewee==3.10.0"],
packages=setuptools.find_packages(exclude=("tests",)),
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
)
|
[
"setuptools.find_packages"
] |
[((522, 566), 'setuptools.find_packages', 'setuptools.find_packages', ([], {'exclude': "('tests',)"}), "(exclude=('tests',))\n", (546, 566), False, 'import setuptools\n')]
|
#!/usr/bin/python
import os
import io
import sys
import json
import shutil
import pandas
import dataLib
import datetime
# PIL - the Python Image Library, used for bitmap image manipulation.
import PIL
import PIL.ImageFont
import PIL.ImageDraw
# ReportLab - used for PDF document generation.
import reportlab.lib.units
import reportlab.lib.utils
import reportlab.lib.colors
import reportlab.pdfgen.canvas
import reportlab.lib.pagesizes
import reportlab.graphics.renderPM
def intToConstrainedPercentage(theValue, theMin, theMax):
result = theValue
if result < theMin:
result = theMin
if result > theMax:
result = theMax
return (result - theMin) / (theMax - theMin)
def roundDatetime(theDate):
return theDate.replace(hour=0, minute=0, second=0, microsecond=0)
def dateToWorkingDaysAgo(theDate):
if theDate == "Never":
return "Never"
daysAgo = 0
today = roundDatetime(datetime.datetime.now())
currentDate = roundDatetime(datetime.datetime.strptime(theDate, "%Y-%m-%dT%H:%M:%S.%fZ"))
while currentDate < today:
if not currentDate.isoweekday() in [6, 7]:
daysAgo = daysAgo + 1
currentDate = currentDate + datetime.timedelta(days=1)
return daysAgo
def parseDate(theDate):
if theDate == "Never":
return "Never"
return datetime.datetime.strptime(theDate, "%Y-%m-%dT%H:%M:%S.%fZ")
# Load the config file (set by the system administrator).
config = dataLib.loadConfig(["dataFolder"])
# Make sure the output folder exists.
reportsRoot = config["dataFolder"] + os.sep + "Reports"
outputRoot = reportsRoot + os.sep + "Pupil Engagement"
historyRoot = outputRoot + os.sep + "History"
os.makedirs(historyRoot, exist_ok=True)
pupils = pandas.read_csv(config["dataFolder"] + os.sep + "pupils.csv", header=0)
activity = pandas.read_csv(config["dataFolder"] + os.sep + "Reports" + os.sep + "userActivity.csv", header=0)
columnPos = {"Name":0,"Username":70,"Year":105,"Login":None,"Classroom":None,"Last Active(Working Days)":125}
columnNames = columnPos.keys()
report = pandas.DataFrame(columns=columnNames)
yearGroups = {}
for pupilsIndex, pupilsValues in pupils.iterrows():
yearGroups[dataLib.yearCohortToGroup(pupilsValues["YearGroup"])] = 1
reportIndex = 0
todaysDate = datetime.datetime.now()
todaysDateString = todaysDate.strftime("%d-%m-%Y")
reportTitle = "report-" + todaysDateString + ".csv"
mostRecentDate = datetime.datetime(2000, 1, 1)
print("Processing data by year group...")
for yearGroup in yearGroups.keys():
print("Processing " + yearGroup + "...")
for pupilsIndex, pupilsValues in pupils.iterrows():
if dataLib.yearCohortToGroup(pupilsValues["YearGroup"]) == yearGroup:
for activityIndex, activityValues in activity.iterrows():
username = ""
if activityValues["email"] == pupilsValues["Username"] + "@knightsbridgeschool.com":
username = pupilsValues["Username"]
altUsername = pupilsValues["OldUsername"]
elif activityValues["email"] == pupilsValues["OldUsername"] + "@knightsbridgeschool.com":
username = pupilsValues["OldUsername"]
altUsername = pupilsValues["Username"]
if not username == "":
indexToUse = reportIndex
usernameList = report["Username"].tolist()
if altUsername in usernameList:
altUsernameIndex = usernameList.index(altUsername)
if report.at[altUsernameIndex, "Login"] == "Never":
indexToUse = altUsernameIndex
else:
reportIndex = reportIndex + 1
report.at[indexToUse, "Name"] = pupilsValues["GivenName"] + " " + pupilsValues["FamilyName"]
report.at[indexToUse, "Username"] = username
report.at[indexToUse, "Year"] = dataLib.yearCohortToGroup(yearGroup)
report.at[indexToUse, "Login"] = activityValues["accounts:last_login_time"]
report.at[indexToUse, "Classroom"] = activityValues["classroom:last_interaction_time"]
lastLogin = parseDate(activityValues["accounts:last_login_time"])
if (not lastLogin == "Never") and lastLogin > mostRecentDate:
mostRecentDate = lastLogin
lastLoginDays = dateToWorkingDaysAgo(activityValues["accounts:last_login_time"])
lastClassroom = parseDate(activityValues["classroom:last_interaction_time"])
if (not lastClassroom == "Never") and lastClassroom > mostRecentDate:
mostRecentDate = lastClassroom
lastClassroomDays = dateToWorkingDaysAgo(activityValues["classroom:last_interaction_time"])
if lastLogin == "Never":
lastActive = lastClassroom
lastActiveDays = lastClassroomDays
elif lastClassroom == "Never":
lastActive = lastLogin
lastActiveDays = lastLoginDays
elif lastClassroom > lastLogin:
lastActive = lastClassroom
lastActiveDays = lastClassroomDays
else:
lastActive = lastLogin
lastActiveDays = lastLoginDays
if lastActive == "Never":
report.at[indexToUse, "Last Active(Working Days)"] = "Never"
else:
report.at[indexToUse, "Last Active(Working Days)"] = lastActive.strftime("%d/%m/%Y") + "(" + str(lastActiveDays) + ")"
# pdfCanvas.setFillColorRGB(colourValue,1-colourValue,0)
# Write out the CSV report.
report.to_csv(outputRoot + os.sep + reportTitle, index=False)
for item in os.listdir(outputRoot):
if item.endswith(".csv") and not item == reportTitle:
print("Moving historial report " + item + " to " + historyRoot)
shutil.move(outputRoot + os.sep + item, historyRoot + os.sep + item)
# Get ready to write out a formatted PDF document per year group.
# We are printing on A4 paper - set the page size and borders, in mm.
pageWidth = 210
pageHeight = 297
lineHeight = 8
leftBorder = 10
topBorder = 10
# A mid-gray background to make following lines on the page a bit easier.
lineImage = PIL.Image.new("RGB", (pageWidth-(leftBorder*2), lineHeight), (200, 200, 200))
for yearGroup in yearGroups.keys():
print("Generating report: " + yearGroup + ".pdf")
lineNumber = 1
pdfCanvas = reportlab.pdfgen.canvas.Canvas(outputRoot + os.sep + yearGroup + ".pdf")
for reportIndex, reportValues in report.iterrows():
# Draw the report name and column headers.
if lineNumber == 1:
pdfCanvas.drawString(leftBorder*reportlab.lib.units.mm, (pageHeight-topBorder)*reportlab.lib.units.mm, "Year: " + str(yearGroup) + ", Most recent date: " + roundDatetime(mostRecentDate).strftime("%d/%m/%Y"))
for columnName in columnNames:
if not columnPos[columnName] == None:
pdfCanvas.drawString((leftBorder+columnPos[columnName])*reportlab.lib.units.mm, ((pageHeight-lineHeight)-topBorder)*reportlab.lib.units.mm, columnName)
lineNumber = 2
if reportValues["Year"] == yearGroup:
for columnName in columnNames:
if not columnPos[columnName] == None:
if lineNumber % 2 == 0:
pdfCanvas.drawInlineImage(lineImage, leftBorder*reportlab.lib.units.mm, ((pageHeight-(lineHeight*(lineNumber+1))-(int(lineHeight/4)))-topBorder)*reportlab.lib.units.mm, (pageWidth-(leftBorder*2))*reportlab.lib.units.mm, lineHeight*reportlab.lib.units.mm)
pdfCanvas.setFillColorRGB(0,0,0)
columnValue = str(reportValues[columnName])
if columnName == "Year":
columnValue = columnValue.replace("Reception","Rec").replace("Year ","")
pdfCanvas.drawString((leftBorder+columnPos[columnName])*reportlab.lib.units.mm, ((pageHeight-(lineHeight*lineNumber))-topBorder)*reportlab.lib.units.mm, columnValue)
lineNumber = lineNumber + 1
if lineNumber == 36:
pdfCanvas.showPage()
lineNumber = 1
# Save the PDF document.
pdfCanvas.save()
|
[
"pandas.DataFrame",
"PIL.Image.new",
"os.makedirs",
"pandas.read_csv",
"dataLib.loadConfig",
"dataLib.yearCohortToGroup",
"datetime.datetime",
"datetime.datetime.strptime",
"datetime.timedelta",
"shutil.move",
"datetime.datetime.now",
"os.listdir"
] |
[((1375, 1409), 'dataLib.loadConfig', 'dataLib.loadConfig', (["['dataFolder']"], {}), "(['dataFolder'])\n", (1393, 1409), False, 'import dataLib\n'), ((1606, 1645), 'os.makedirs', 'os.makedirs', (['historyRoot'], {'exist_ok': '(True)'}), '(historyRoot, exist_ok=True)\n', (1617, 1645), False, 'import os\n'), ((1656, 1727), 'pandas.read_csv', 'pandas.read_csv', (["(config['dataFolder'] + os.sep + 'pupils.csv')"], {'header': '(0)'}), "(config['dataFolder'] + os.sep + 'pupils.csv', header=0)\n", (1671, 1727), False, 'import pandas\n'), ((1739, 1841), 'pandas.read_csv', 'pandas.read_csv', (["(config['dataFolder'] + os.sep + 'Reports' + os.sep + 'userActivity.csv')"], {'header': '(0)'}), "(config['dataFolder'] + os.sep + 'Reports' + os.sep +\n 'userActivity.csv', header=0)\n", (1754, 1841), False, 'import pandas\n'), ((1989, 2026), 'pandas.DataFrame', 'pandas.DataFrame', ([], {'columns': 'columnNames'}), '(columns=columnNames)\n', (2005, 2026), False, 'import pandas\n'), ((2196, 2219), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (2217, 2219), False, 'import datetime\n'), ((2340, 2369), 'datetime.datetime', 'datetime.datetime', (['(2000)', '(1)', '(1)'], {}), '(2000, 1, 1)\n', (2357, 2369), False, 'import datetime\n'), ((5138, 5160), 'os.listdir', 'os.listdir', (['outputRoot'], {}), '(outputRoot)\n', (5148, 5160), False, 'import os\n'), ((5657, 5736), 'PIL.Image.new', 'PIL.Image.new', (['"""RGB"""', '(pageWidth - leftBorder * 2, lineHeight)', '(200, 200, 200)'], {}), "('RGB', (pageWidth - leftBorder * 2, lineHeight), (200, 200, 200))\n", (5670, 5736), False, 'import PIL\n'), ((1244, 1304), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (['theDate', '"""%Y-%m-%dT%H:%M:%S.%fZ"""'], {}), "(theDate, '%Y-%m-%dT%H:%M:%S.%fZ')\n", (1270, 1304), False, 'import datetime\n'), ((883, 906), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (904, 906), False, 'import datetime\n'), ((937, 997), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (['theDate', '"""%Y-%m-%dT%H:%M:%S.%fZ"""'], {}), "(theDate, '%Y-%m-%dT%H:%M:%S.%fZ')\n", (963, 997), False, 'import datetime\n'), ((2108, 2160), 'dataLib.yearCohortToGroup', 'dataLib.yearCohortToGroup', (["pupilsValues['YearGroup']"], {}), "(pupilsValues['YearGroup'])\n", (2133, 2160), False, 'import dataLib\n'), ((5285, 5353), 'shutil.move', 'shutil.move', (['(outputRoot + os.sep + item)', '(historyRoot + os.sep + item)'], {}), '(outputRoot + os.sep + item, historyRoot + os.sep + item)\n', (5296, 5353), False, 'import shutil\n'), ((1127, 1153), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(1)'}), '(days=1)\n', (1145, 1153), False, 'import datetime\n'), ((2548, 2600), 'dataLib.yearCohortToGroup', 'dataLib.yearCohortToGroup', (["pupilsValues['YearGroup']"], {}), "(pupilsValues['YearGroup'])\n", (2573, 2600), False, 'import dataLib\n'), ((3579, 3615), 'dataLib.yearCohortToGroup', 'dataLib.yearCohortToGroup', (['yearGroup'], {}), '(yearGroup)\n', (3604, 3615), False, 'import dataLib\n')]
|
from django.http import HttpResponseRedirect
from django.core.urlresolvers import reverse
from django.contrib.auth.forms import UserCreationForm
from django.shortcuts import render, get_object_or_404
from django.contrib.auth.decorators import login_required
from main.forms import SignupForm
from django.core.paginator import Paginator, PageNotAnInteger, EmptyPage
from main.models import ClubList
from django.views.decorators.csrf import csrf_exempt
# Create your views here.
'''
def login(request):
return render(request, 'main/login.html')
'''
def home(request, tag=None):
clublist = ClubList.objects.order_by('-ClubMemberSum')
return render(request, "index.html", {
'clublists': clublist
})
def signup(request):
"""signup
to register users
"""
if request.method == "POST":
signupform = SignupForm(request.POST)
if signupform.is_valid():
user = signupform.save(commit=False)
user.save()
return HttpResponseRedirect(
reverse("signup_ok")
)
elif request.method == "GET":
signupform = SignupForm()
return render(request, "registration/signup.html",{
'signupform' : signupform
})
@login_required
@csrf_exempt
def join(request):
clublist = ClubList.objects.order_by('-ClubMemberSum')
if request.method == "POST":
join_name = request.POST.get('username')
join_club = request.POST.get('clubname')
ClubMemberSum = ClubList.objects.get(ClubName = join_club).ClubMemberSum
ClubList.objects.filter(ClubName = join_club).update(ClubMemberSum=ClubMemberSum+1)
return render(request, "index.html",{
'clublists': clublist
})
|
[
"main.models.ClubList.objects.get",
"django.core.urlresolvers.reverse",
"main.models.ClubList.objects.filter",
"django.shortcuts.render",
"main.forms.SignupForm",
"main.models.ClubList.objects.order_by"
] |
[((595, 638), 'main.models.ClubList.objects.order_by', 'ClubList.objects.order_by', (['"""-ClubMemberSum"""'], {}), "('-ClubMemberSum')\n", (620, 638), False, 'from main.models import ClubList\n'), ((650, 704), 'django.shortcuts.render', 'render', (['request', '"""index.html"""', "{'clublists': clublist}"], {}), "(request, 'index.html', {'clublists': clublist})\n", (656, 704), False, 'from django.shortcuts import render, get_object_or_404\n'), ((1142, 1213), 'django.shortcuts.render', 'render', (['request', '"""registration/signup.html"""', "{'signupform': signupform}"], {}), "(request, 'registration/signup.html', {'signupform': signupform})\n", (1148, 1213), False, 'from django.shortcuts import render, get_object_or_404\n'), ((1288, 1331), 'main.models.ClubList.objects.order_by', 'ClubList.objects.order_by', (['"""-ClubMemberSum"""'], {}), "('-ClubMemberSum')\n", (1313, 1331), False, 'from main.models import ClubList\n'), ((1618, 1672), 'django.shortcuts.render', 'render', (['request', '"""index.html"""', "{'clublists': clublist}"], {}), "(request, 'index.html', {'clublists': clublist})\n", (1624, 1672), False, 'from django.shortcuts import render, get_object_or_404\n'), ((837, 861), 'main.forms.SignupForm', 'SignupForm', (['request.POST'], {}), '(request.POST)\n', (847, 861), False, 'from main.forms import SignupForm\n'), ((1117, 1129), 'main.forms.SignupForm', 'SignupForm', ([], {}), '()\n', (1127, 1129), False, 'from main.forms import SignupForm\n'), ((1466, 1506), 'main.models.ClubList.objects.get', 'ClubList.objects.get', ([], {'ClubName': 'join_club'}), '(ClubName=join_club)\n', (1486, 1506), False, 'from main.models import ClubList\n'), ((1027, 1047), 'django.core.urlresolvers.reverse', 'reverse', (['"""signup_ok"""'], {}), "('signup_ok')\n", (1034, 1047), False, 'from django.core.urlresolvers import reverse\n'), ((1525, 1568), 'main.models.ClubList.objects.filter', 'ClubList.objects.filter', ([], {'ClubName': 'join_club'}), '(ClubName=join_club)\n', (1548, 1568), False, 'from main.models import ClubList\n')]
|
# Copyright (c) 2020 <NAME>
# Licensed under the MIT License
"""A module that contains caching helpers.
"""
from functools import update_wrapper
__all__ = ["cached"]
def cached(func):
"""Decorator that caches result of method or function.
"""
cache = {}
def wrapper(*args, **kwargs):
key = tuple(args) + tuple(kwargs.items())
if key not in cache:
result = func(*args, **kwargs)
cache[key] = result
return result
else:
return cache[key]
return update_wrapper(wrapper, func)
|
[
"functools.update_wrapper"
] |
[((542, 571), 'functools.update_wrapper', 'update_wrapper', (['wrapper', 'func'], {}), '(wrapper, func)\n', (556, 571), False, 'from functools import update_wrapper\n')]
|
from collections import deque
import random
import rank_based
class ReplayBuffer(object):
def __init__(self, buffer_size, batch_size=32, learn_start=2000, steps=100000, rand_s=False):
self.buffer_size = buffer_size
self.num_experiences = 0
self.buffer = deque()
self.rand_s = rand_s
conf = {'size': self.buffer_size,
'learn_start': learn_start,
'partition_num': 32,
'steps': steps,
'batch_size': batch_size}
self.replay_memory = rank_based.Experience(conf)
def getBatch(self, batch_size):
# random draw N
if self.rand_s:
return random.sample(self.buffer, batch_size), None, None
batch, w, e_id = self.replay_memory.sample(self.num_experiences)
self.e_id = e_id
self.w_id = w
'''#state t
self.state_t_batch = [item[0] for item in batch]
self.state_t_batch = np.array(self.state_t_batch)
#state t+1
self.state_t_1_batch = [item[1] for item in batch]
self.state_t_1_batch = np.array( self.state_t_1_batch)
self.action_batch = [item[2] for item in batch]
self.action_batch = np.array(self.action_batch)
self.action_batch = np.reshape(self.action_batch,[len(self.action_batch),self.num_actions])
self.reward_batch = [item[3] for item in batch]
self.reward_batch = np.array(self.reward_batch)
self.done_batch = [item[4] for item in batch]
self.done_batch = np.array(self.done_batch)'''
return batch, self.w_id, self.e_id
def size(self):
return self.buffer_size
def add(self, state, action, reward, next_state, done): # add(self, state, next_state, action, reward, done):
new_experience = (state, action, reward, next_state, done)#(state, action, reward, next_state, done)
self.num_experiences += 1
if self.rand_s:
if self.num_experiences < self.buffer_size:
self.buffer.append(new_experience)
else:
self.buffer.popleft()
self.buffer.append(new_experience)
else:
self.replay_memory.store(new_experience)
def count(self):
# if buffer is full, return buffer size
# otherwise, return experience counter
return self.num_experiences
# def erase(self):
# self.buffer = deque()
# self.num_experiences = 0
def rebalance(self):
self.replay_memory.rebalance()
def update_priority(self, indices, delta):
self.replay_memory.update_priority(indices, delta)
|
[
"random.sample",
"rank_based.Experience",
"collections.deque"
] |
[((285, 292), 'collections.deque', 'deque', ([], {}), '()\n', (290, 292), False, 'from collections import deque\n'), ((548, 575), 'rank_based.Experience', 'rank_based.Experience', (['conf'], {}), '(conf)\n', (569, 575), False, 'import rank_based\n'), ((680, 718), 'random.sample', 'random.sample', (['self.buffer', 'batch_size'], {}), '(self.buffer, batch_size)\n', (693, 718), False, 'import random\n')]
|
import json
import os
from google.oauth2 import service_account
from googleapiclient.discovery import build
SCOPES = [
"https://www.googleapis.com/auth/spreadsheets",
"https://www.googleapis.com/auth/drive",
]
credentials = service_account.Credentials.from_service_account_info(
json.loads(os.environ["GOOGLE_SERVICE_ACCOUNT"]), scopes=SCOPES
)
spreadsheet_service = build("sheets", "v4", credentials=credentials)
|
[
"googleapiclient.discovery.build",
"json.loads"
] |
[((381, 427), 'googleapiclient.discovery.build', 'build', (['"""sheets"""', '"""v4"""'], {'credentials': 'credentials'}), "('sheets', 'v4', credentials=credentials)\n", (386, 427), False, 'from googleapiclient.discovery import build\n'), ((293, 341), 'json.loads', 'json.loads', (["os.environ['GOOGLE_SERVICE_ACCOUNT']"], {}), "(os.environ['GOOGLE_SERVICE_ACCOUNT'])\n", (303, 341), False, 'import json\n')]
|
import pytest
import numpy as np
from spexxy.grid import GridAxis, ValuesGrid
@pytest.fixture()
def number_grid():
# define grid
grid = np.array([
[1, 2, 3, 4, 5],
[3, 4, 5, 6, 7],
[2, 3, 4, 5, 6],
[4, 5, 6, 7, 8]
])
# define axes
ax1 = GridAxis(name='x', values=list(range(grid.shape[1])))
ax2 = GridAxis(name='y', values=list(range(grid.shape[0])))
# combine values
values = {}
for x in ax1.values:
for y in ax2.values:
values[(x, y)] = grid[y, x]
# return new grid
return ValuesGrid([ax1, ax2], values)
|
[
"numpy.array",
"spexxy.grid.ValuesGrid",
"pytest.fixture"
] |
[((82, 98), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (96, 98), False, 'import pytest\n'), ((147, 225), 'numpy.array', 'np.array', (['[[1, 2, 3, 4, 5], [3, 4, 5, 6, 7], [2, 3, 4, 5, 6], [4, 5, 6, 7, 8]]'], {}), '([[1, 2, 3, 4, 5], [3, 4, 5, 6, 7], [2, 3, 4, 5, 6], [4, 5, 6, 7, 8]])\n', (155, 225), True, 'import numpy as np\n'), ((577, 607), 'spexxy.grid.ValuesGrid', 'ValuesGrid', (['[ax1, ax2]', 'values'], {}), '([ax1, ax2], values)\n', (587, 607), False, 'from spexxy.grid import GridAxis, ValuesGrid\n')]
|
from CybORG.Agents import BaseAgent
from CybORG.Shared import Results
from CybORG.Shared.Actions import PrivilegeEscalate, ExploitRemoteService, DiscoverRemoteSystems, Impact, \
DiscoverNetworkServices, Sleep
class B_lineAgent(BaseAgent):
def __init__(self):
self.action = 0
self.target_ip_address = None
self.last_subnet = None
self.last_ip_address = None
self.action_history = {}
self.jumps = [0,1,2,2,2,2,5,5,5,5,9,9,9,12,13]
def train(self, results: Results):
"""allows an agent to learn a policy"""
pass
def get_action(self, observation, action_space):
# print(self.action)
"""gets an action from the agent that should be performed based on the agent's internal state and provided observation and action space"""
session = 0
while True:
if observation['success'] == True:
self.action += 1 if self.action < 14 else 0
else:
self.action = self.jumps[self.action]
if self.action in self.action_history:
action = self.action_history[self.action]
# Discover Remote Systems
elif self.action == 0:
self.last_subnet = observation['User0']['Interface'][0]['Subnet']
action = DiscoverRemoteSystems(session=session, agent='Red', subnet=self.last_subnet)
# Discover Network Services- new IP address found
elif self.action == 1:
self.last_ip_address = [value for key, value in observation.items() if key != 'success'][1]['Interface'][0]['IP Address']
action =DiscoverNetworkServices(session=session, agent='Red', ip_address=self.last_ip_address)
# Exploit User1
elif self.action == 2:
action = ExploitRemoteService(session=session, agent='Red', ip_address=self.last_ip_address)
# Privilege escalation on User1
elif self.action == 3:
hostname = [value for key, value in observation.items() if key != 'success' and 'System info' in value][0]['System info']['Hostname']
action = PrivilegeEscalate(agent='Red', hostname=hostname, session=session)
# Discover Network Services- new IP address found
elif self.action == 4:
self.last_ip_address = observation['Enterprise1']['Interface'][0]['IP Address']
action = DiscoverNetworkServices(session=session, agent='Red', ip_address=self.last_ip_address)
# Exploit- Enterprise1
elif self.action == 5:
self.target_ip_address = [value for key, value in observation.items() if key != 'success'][0]['Interface'][0]['IP Address']
action = ExploitRemoteService(session=session, agent='Red', ip_address=self.target_ip_address)
# Privilege escalation on Enterprise1
elif self.action == 6:
hostname = [value for key, value in observation.items() if key != 'success' and 'System info' in value][0]['System info']['Hostname']
action = PrivilegeEscalate(agent='Red', hostname=hostname, session=session)
# Scanning the new subnet found.
elif self.action == 7:
self.last_subnet = observation['Enterprise1']['Interface'][0]['Subnet']
action = DiscoverRemoteSystems(subnet=self.last_subnet, agent='Red', session=session)
# Discover Network Services- Enterprise2
elif self.action == 8:
self.target_ip_address = [value for key, value in observation.items() if key != 'success'][2]['Interface'][0]['IP Address']
action = DiscoverNetworkServices(session=session, agent='Red', ip_address=self.target_ip_address)
# Exploit- Enterprise2
elif self.action == 9:
self.target_ip_address = [value for key, value in observation.items() if key != 'success'][0]['Interface'][0]['IP Address']
action = ExploitRemoteService(session=session, agent='Red', ip_address=self.target_ip_address)
# Privilege escalation on Enterprise2
elif self.action == 10:
hostname = [value for key, value in observation.items() if key != 'success' and 'System info' in value][0]['System info']['Hostname']
action = PrivilegeEscalate(agent='Red', hostname=hostname, session=session)
# Discover Network Services- Op_Server0
elif self.action == 11:
action = DiscoverNetworkServices(session=session, agent='Red', ip_address=observation['Op_Server0']['Interface'][0]['IP Address'])
# Exploit- Op_Server0
elif self.action == 12:
info = [value for key, value in observation.items() if key != 'success']
if len(info) > 0:
action = ExploitRemoteService(agent='Red', session=session, ip_address=info[0]['Interface'][0]['IP Address'])
else:
self.action = 0
continue
# Privilege escalation on Op_Server0
elif self.action == 13:
action = PrivilegeEscalate(agent='Red', hostname='Op_Server0', session=session)
# Impact on Op_server0
elif self.action == 14:
action = Impact(agent='Red', session=session, hostname='Op_Server0')
if self.action not in self.action_history:
self.action_history[self.action] = action
return action
def end_episode(self):
self.action = 0
self.target_ip_address = None
self.last_subnet = None
self.last_ip_address = None
self.action_history = {}
def set_initial_values(self, action_space, observation):
pass
|
[
"CybORG.Shared.Actions.DiscoverNetworkServices",
"CybORG.Shared.Actions.Impact",
"CybORG.Shared.Actions.ExploitRemoteService",
"CybORG.Shared.Actions.DiscoverRemoteSystems",
"CybORG.Shared.Actions.PrivilegeEscalate"
] |
[((1329, 1405), 'CybORG.Shared.Actions.DiscoverRemoteSystems', 'DiscoverRemoteSystems', ([], {'session': 'session', 'agent': '"""Red"""', 'subnet': 'self.last_subnet'}), "(session=session, agent='Red', subnet=self.last_subnet)\n", (1350, 1405), False, 'from CybORG.Shared.Actions import PrivilegeEscalate, ExploitRemoteService, DiscoverRemoteSystems, Impact, DiscoverNetworkServices, Sleep\n'), ((1665, 1756), 'CybORG.Shared.Actions.DiscoverNetworkServices', 'DiscoverNetworkServices', ([], {'session': 'session', 'agent': '"""Red"""', 'ip_address': 'self.last_ip_address'}), "(session=session, agent='Red', ip_address=self.\n last_ip_address)\n", (1688, 1756), False, 'from CybORG.Shared.Actions import PrivilegeEscalate, ExploitRemoteService, DiscoverRemoteSystems, Impact, DiscoverNetworkServices, Sleep\n'), ((1842, 1930), 'CybORG.Shared.Actions.ExploitRemoteService', 'ExploitRemoteService', ([], {'session': 'session', 'agent': '"""Red"""', 'ip_address': 'self.last_ip_address'}), "(session=session, agent='Red', ip_address=self.\n last_ip_address)\n", (1862, 1930), False, 'from CybORG.Shared.Actions import PrivilegeEscalate, ExploitRemoteService, DiscoverRemoteSystems, Impact, DiscoverNetworkServices, Sleep\n'), ((2181, 2247), 'CybORG.Shared.Actions.PrivilegeEscalate', 'PrivilegeEscalate', ([], {'agent': '"""Red"""', 'hostname': 'hostname', 'session': 'session'}), "(agent='Red', hostname=hostname, session=session)\n", (2198, 2247), False, 'from CybORG.Shared.Actions import PrivilegeEscalate, ExploitRemoteService, DiscoverRemoteSystems, Impact, DiscoverNetworkServices, Sleep\n'), ((2467, 2558), 'CybORG.Shared.Actions.DiscoverNetworkServices', 'DiscoverNetworkServices', ([], {'session': 'session', 'agent': '"""Red"""', 'ip_address': 'self.last_ip_address'}), "(session=session, agent='Red', ip_address=self.\n last_ip_address)\n", (2490, 2558), False, 'from CybORG.Shared.Actions import PrivilegeEscalate, ExploitRemoteService, DiscoverRemoteSystems, Impact, DiscoverNetworkServices, Sleep\n'), ((2790, 2880), 'CybORG.Shared.Actions.ExploitRemoteService', 'ExploitRemoteService', ([], {'session': 'session', 'agent': '"""Red"""', 'ip_address': 'self.target_ip_address'}), "(session=session, agent='Red', ip_address=self.\n target_ip_address)\n", (2810, 2880), False, 'from CybORG.Shared.Actions import PrivilegeEscalate, ExploitRemoteService, DiscoverRemoteSystems, Impact, DiscoverNetworkServices, Sleep\n'), ((3137, 3203), 'CybORG.Shared.Actions.PrivilegeEscalate', 'PrivilegeEscalate', ([], {'agent': '"""Red"""', 'hostname': 'hostname', 'session': 'session'}), "(agent='Red', hostname=hostname, session=session)\n", (3154, 3203), False, 'from CybORG.Shared.Actions import PrivilegeEscalate, ExploitRemoteService, DiscoverRemoteSystems, Impact, DiscoverNetworkServices, Sleep\n'), ((3398, 3474), 'CybORG.Shared.Actions.DiscoverRemoteSystems', 'DiscoverRemoteSystems', ([], {'subnet': 'self.last_subnet', 'agent': '"""Red"""', 'session': 'session'}), "(subnet=self.last_subnet, agent='Red', session=session)\n", (3419, 3474), False, 'from CybORG.Shared.Actions import PrivilegeEscalate, ExploitRemoteService, DiscoverRemoteSystems, Impact, DiscoverNetworkServices, Sleep\n'), ((3729, 3822), 'CybORG.Shared.Actions.DiscoverNetworkServices', 'DiscoverNetworkServices', ([], {'session': 'session', 'agent': '"""Red"""', 'ip_address': 'self.target_ip_address'}), "(session=session, agent='Red', ip_address=self.\n target_ip_address)\n", (3752, 3822), False, 'from CybORG.Shared.Actions import PrivilegeEscalate, ExploitRemoteService, DiscoverRemoteSystems, Impact, DiscoverNetworkServices, Sleep\n'), ((4054, 4144), 'CybORG.Shared.Actions.ExploitRemoteService', 'ExploitRemoteService', ([], {'session': 'session', 'agent': '"""Red"""', 'ip_address': 'self.target_ip_address'}), "(session=session, agent='Red', ip_address=self.\n target_ip_address)\n", (4074, 4144), False, 'from CybORG.Shared.Actions import PrivilegeEscalate, ExploitRemoteService, DiscoverRemoteSystems, Impact, DiscoverNetworkServices, Sleep\n'), ((4402, 4468), 'CybORG.Shared.Actions.PrivilegeEscalate', 'PrivilegeEscalate', ([], {'agent': '"""Red"""', 'hostname': 'hostname', 'session': 'session'}), "(agent='Red', hostname=hostname, session=session)\n", (4419, 4468), False, 'from CybORG.Shared.Actions import PrivilegeEscalate, ExploitRemoteService, DiscoverRemoteSystems, Impact, DiscoverNetworkServices, Sleep\n'), ((4583, 4709), 'CybORG.Shared.Actions.DiscoverNetworkServices', 'DiscoverNetworkServices', ([], {'session': 'session', 'agent': '"""Red"""', 'ip_address': "observation['Op_Server0']['Interface'][0]['IP Address']"}), "(session=session, agent='Red', ip_address=\n observation['Op_Server0']['Interface'][0]['IP Address'])\n", (4606, 4709), False, 'from CybORG.Shared.Actions import PrivilegeEscalate, ExploitRemoteService, DiscoverRemoteSystems, Impact, DiscoverNetworkServices, Sleep\n'), ((4928, 5033), 'CybORG.Shared.Actions.ExploitRemoteService', 'ExploitRemoteService', ([], {'agent': '"""Red"""', 'session': 'session', 'ip_address': "info[0]['Interface'][0]['IP Address']"}), "(agent='Red', session=session, ip_address=info[0][\n 'Interface'][0]['IP Address'])\n", (4948, 5033), False, 'from CybORG.Shared.Actions import PrivilegeEscalate, ExploitRemoteService, DiscoverRemoteSystems, Impact, DiscoverNetworkServices, Sleep\n'), ((5226, 5296), 'CybORG.Shared.Actions.PrivilegeEscalate', 'PrivilegeEscalate', ([], {'agent': '"""Red"""', 'hostname': '"""Op_Server0"""', 'session': 'session'}), "(agent='Red', hostname='Op_Server0', session=session)\n", (5243, 5296), False, 'from CybORG.Shared.Actions import PrivilegeEscalate, ExploitRemoteService, DiscoverRemoteSystems, Impact, DiscoverNetworkServices, Sleep\n'), ((5393, 5452), 'CybORG.Shared.Actions.Impact', 'Impact', ([], {'agent': '"""Red"""', 'session': 'session', 'hostname': '"""Op_Server0"""'}), "(agent='Red', session=session, hostname='Op_Server0')\n", (5399, 5452), False, 'from CybORG.Shared.Actions import PrivilegeEscalate, ExploitRemoteService, DiscoverRemoteSystems, Impact, DiscoverNetworkServices, Sleep\n')]
|
import time
from .daemon import Daemon
def main():
daemon = Daemon()
while True:
daemon.tick()
files = daemon.get_email_files()
for file in files:
try:
daemon.handle_email(file)
except Exception:
daemon.on_error()
time.sleep(5)
if __name__ == '__main__':
main()
|
[
"time.sleep"
] |
[((314, 327), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (324, 327), False, 'import time\n')]
|
from __future__ import division
import json
from time import time
from random import randint, choice
from threading import Timer
from tornado.ioloop import IOLoop
from tornado.web import Application
from tornado.websocket import WebSocketHandler
class RepeatedTimer(object):
def __init__(self, interval, function, *args, **kwargs):
self._timer = None
self.interval = interval
self.function = function
self.args = args
self.kwargs = kwargs
self.is_running = False
self.start()
def _run(self):
self.is_running = False
self.function(*self.args, **self.kwargs)
self.start()
def start(self):
if not self.is_running:
self._timer = Timer(self.interval, self._run)
self._timer.start()
self.is_running = True
def stop(self):
self._timer.cancel()
self.is_running = False
class FakeRobot(WebSocketHandler):
period = 1 / 10
verbose = False
def open(self):
if self.verbose:
print('WebSocket connection open.')
self.set_nodelay(True)
self.rt = RepeatedTimer(self.period, self.proxy_pub)
def on_message(self, message):
if self.verbose:
print('{}: Received {}'.format(time(), message))
self.handle_command(json.loads(message))
if (message == '{detection:}'):
self.ioloop.add_callback(self.pub_routing_table)
def on_close(self):
if self.verbose:
print('WebSocket closed {}.'.format(self.close_reason))
self.rt.stop()
def proxy_pub(self):
self.ioloop.add_callback(self.pub_state)
def pub_routing_table(self):
state = {'routing_table': [{'uuid': [4456498, 1347571976, 540555569], 'port_table': [65535, 2], 'services': [{'type': 'Gate', 'id': 1, 'alias': 'gate'}]}, {'uuid': [3932192, 1194612503, 540554032], 'port_table': [3, 1], 'services': [{'type': 'Angle', 'id': 2, 'alias': 'potentiometer_m'}]}, {'uuid': [2949157, 1194612501, 540554032], 'port_table': [65535, 2], 'services': [{'type': 'Gate', 'id': 3, 'alias': 'gate1'}]}]}
def pub_state(self):
state = {
'services': [
{
'alias': 'my_gate',
'id': 1,
'type': 'Gate',
},
{
'alias': 'my_led',
'id': 2,
'type': 'Color',
},
{
'alias': 'my_servo',
'id': 3,
'type': 'Servo',
},
{
'alias': 'my_button',
'id': 4,
'type': 'State',
'state': choice((0, 1)),
},
{
'alias': 'my_potentiometer',
'id': 5,
'type': 'Angle',
'position': randint(0, 4096),
},
{
'alias': 'my_relay',
'id': 6,
'type': 'relay',
},
{
'alias': 'my_distance',
'id': 7,
'type': 'Distance',
'distance': randint(0, 2000),
},
{
'alias': 'my_dxl_1',
'id': 8,
'type': 'DynamixelMotor',
'position': randint(-180, 180),
},
{
'alias': 'my_dxl_2',
'id': 9,
'type': 'DynamixelMotor',
'position': randint(-180, 180),
},
]
}
self.write_message(json.dumps(state))
def handle_command(self, message):
pass
def check_origin(self, origin):
return True
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--port', type=int, default=9342)
parser.add_argument('--verbose', action='store_true', default=False)
args = parser.parse_args()
loop = IOLoop()
port = args.port
FakeRobot.verbose = args.verbose
FakeRobot.ioloop = loop
app = Application([
(r'/', FakeRobot)
])
app.listen(port)
url = 'ws://{}:{}'.format('127.0.0.1', port)
if args.verbose:
print('Fake robot serving on {}'.format(url))
loop.start()
|
[
"threading.Timer",
"argparse.ArgumentParser",
"json.loads",
"random.randint",
"tornado.ioloop.IOLoop",
"random.choice",
"json.dumps",
"time.time",
"tornado.web.Application"
] |
[((4029, 4054), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (4052, 4054), False, 'import argparse\n'), ((4229, 4237), 'tornado.ioloop.IOLoop', 'IOLoop', ([], {}), '()\n', (4235, 4237), False, 'from tornado.ioloop import IOLoop\n'), ((4336, 4367), 'tornado.web.Application', 'Application', (["[('/', FakeRobot)]"], {}), "([('/', FakeRobot)])\n", (4347, 4367), False, 'from tornado.web import Application\n'), ((744, 775), 'threading.Timer', 'Timer', (['self.interval', 'self._run'], {}), '(self.interval, self._run)\n', (749, 775), False, 'from threading import Timer\n'), ((1340, 1359), 'json.loads', 'json.loads', (['message'], {}), '(message)\n', (1350, 1359), False, 'import json\n'), ((3837, 3854), 'json.dumps', 'json.dumps', (['state'], {}), '(state)\n', (3847, 3854), False, 'import json\n'), ((1293, 1299), 'time.time', 'time', ([], {}), '()\n', (1297, 1299), False, 'from time import time\n'), ((2794, 2808), 'random.choice', 'choice', (['(0, 1)'], {}), '((0, 1))\n', (2800, 2808), False, 'from random import randint, choice\n'), ((2994, 3010), 'random.randint', 'randint', (['(0)', '(4096)'], {}), '(0, 4096)\n', (3001, 3010), False, 'from random import randint, choice\n'), ((3338, 3354), 'random.randint', 'randint', (['(0)', '(2000)'], {}), '(0, 2000)\n', (3345, 3354), False, 'from random import randint, choice\n'), ((3541, 3559), 'random.randint', 'randint', (['(-180)', '(180)'], {}), '(-180, 180)\n', (3548, 3559), False, 'from random import randint, choice\n'), ((3746, 3764), 'random.randint', 'randint', (['(-180)', '(180)'], {}), '(-180, 180)\n', (3753, 3764), False, 'from random import randint, choice\n')]
|
###################################################
#
# Script to:
# - Load the images and extract the patches
# - Define the neural network
# - define the training
#
##################################################
import numpy as np
import configparser
from keras.utils import multi_gpu_model
from keras.models import Model
from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout,Add,Convolution2D,merge,Conv3D, MaxPooling3D,Multiply
from keras.optimizers import Adam
from keras.callbacks import ModelCheckpoint, LearningRateScheduler
from keras import backend as K
from keras.utils.vis_utils import plot_model as plot
from keras.optimizers import SGD
import h5py
import sys
sys.path.insert(0, './lib/')
#from help_functions import *
from keras.layers import BatchNormalization,SpatialDropout3D,Reshape,GlobalMaxPooling3D,GlobalAveragePooling2D
#function to obtain data for training/testing (validation)
#from extract_patches import get_data_training
from keras.layers.core import Dropout, Activation
from keras import backend as K
import tensorflow as tf
print(K.backend())
from data_feed import *
from keras import optimizers
#from pre_processing import my_PreProc
import math
import sys
sys.setrecursionlimit(4000)
#Define the neural network
def focal_loss(gamma=2,alpha=0.75):
def focal_loss_fixed(y_true,y_pred):
pt_1=tf.where(tf.equal(y_true,1),y_pred,tf.ones_like(y_pred))
pt_0=tf.where(tf.equal(y_true,0),y_pred,tf.zeros_like(y_pred))
return -K.sum(alpha*K.pow(1.-pt_1,gamma)*K.log(pt_1))-K.sum((1-alpha)*K.pow(pt_0,gamma)*K.log(1.-pt_0))
return focal_loss_fixed
def block_2_conv(input,num_filter):
conv1=Conv2D(num_filter,(3,3),strides=(1,1),padding='same',data_format='channels_first')(input)
conv1_bn=BatchNormalization(axis=1)(conv1)
conv1_relu=Activation('relu')(conv1_bn)
conv2=Conv2D(num_filter,(3,3),strides=(1,1),padding='same',data_format='channels_first')(conv1_relu)
conv2_bn=BatchNormalization(axis=1)(conv2)
conv2_add=Add()([input,conv2_bn])
conv2_relu=Activation('relu')(conv2_add)
return conv2_relu
def block_2_conv3D(input,num_filter):
conv1 = Conv3D(num_filter, (3, 3,3), strides=(1, 1,1), padding='same', data_format='channels_first')(input)
conv1_bn = BatchNormalization(axis=1)(conv1)
conv1_relu = Activation('relu')(conv1_bn)
conv2 = Conv3D(num_filter, (3, 3,3), strides=(1, 1,1), padding='same', data_format='channels_first')(conv1_relu)
conv2_bn = BatchNormalization(axis=1)(conv2)
conv2_add = Add()([input, conv2_bn])
conv2_relu = Activation('relu')(conv2_add)
return conv2_relu
def attention_block(input,iter,depth):
global_pool=GlobalMaxPooling3D(data_format='channels_first')(input)
global_pool1=Reshape((depth,1,1,1))(global_pool)
conv_1x1=Conv3D(depth,(1,1,1),padding='same',data_format='channels_first')(global_pool1)
relu_out=Activation('relu')(conv_1x1)
conv_2x1=Conv3D(depth,(1,1,1),strides=(1,1,1),padding='same',data_format='channels_first')(relu_out)
sigmoid_out=Activation('sigmoid')(conv_2x1)
concat1=sigmoid_out
#print("***********1")
#print(concat1.shape)
for i in range(4-1):
concat1=concatenate([concat1,sigmoid_out],axis=2)
concat2=concat1
for j in range(iter-1):
concat2=concatenate([concat2,concat1],axis=3)
concat3=concat2
for k in range(iter-1):
concat3=concatenate([concat3,concat2],axis=4)
#print("************2")
#print(concat3.shape)
out=Multiply()([input,concat3])
return out
def saliency_map_attention_block(input,depth):
conv_1x1=Conv3D(depth,(1,1,1),padding='same',data_format='channels_first')(input)
relu_out=Activation('relu')(conv_1x1)
conv_2x1=Conv3D(depth,(1,1,1),padding='same',data_format='channels_first')(relu_out)
sigmoid_out=Activation('sigmoid')(conv_2x1)
out1=Multiply()([input,sigmoid_out])
out=Add()([input,out1])
return out
def channel_attnetion_block(low_input,high_input,depth,size):
input=concatenate([low_input,high_input],axis=1)
global_pool=GlobalAveragePooling2D(data_format='channels_first')(input)
global_pool1 = Reshape((2*depth, 1, 1))(global_pool)
conv_1x1 = Conv2D(depth, (1, 1), padding='same', data_format='channels_first')(global_pool1)
relu_out = Activation('relu')(conv_1x1)
conv_2x1 = Conv2D(depth, (1, 1), strides=(1, 1), padding='same', data_format='channels_first')(relu_out)
sigmoid_out = Activation('sigmoid')(conv_2x1)
concat1 = sigmoid_out
for i in range(size-1):
concat1=concatenate([concat1,sigmoid_out],axis=2)
concat2=concat1
for j in range(size-1):
concat2=concatenate([concat2,concat1],axis=3)
out1 = Multiply()([low_input, concat2])
out2=Add()([out1,high_input])
return out2
# F1 score: harmonic mean of precision and sensitivity DICE = 2*TP/(2*TP + FN + FP)
def DiceCoef(y_true, y_pred):
y_true_f = K.flatten(y_true)
y_pred_f = K.flatten(y_pred)
intersection = K.sum(y_true_f*y_pred_f)
return (2.*intersection)/(K.sum(y_true_f) + K.sum(y_pred_f) + 0.00001)
def DiceCoefLoss(y_true, y_pred):
return -DiceCoef(y_true, y_pred)
def get_unet3D_new_4_fram_2(n_ch,frame,patch_height,patch_width):
inputs = Input(shape=(n_ch, frame,patch_height, patch_width))
conv0 = Conv3D(8, (1, 1,1), padding='same')(inputs)
conv1 = block_2_conv3D(conv0, 8)
## channel attention
#out1=attention_block(conv1,512,8)
###特征输出
conv1_3d_2d = Conv3D(8, (4, 1, 1), strides=(1, 1, 1), data_format='channels_first')(conv1)
#conv1_3d_2d = Conv3D(8, (4, 1, 1), strides=(1, 1, 1), data_format='channels_first')(out1)
conv1_trans_2d = Reshape((8, 512, 512))(conv1_3d_2d)
conv1_1 = Conv3D(16, (2, 2,2), strides=(1,2,2),padding='same', data_format='channels_first')(conv1)
conv1_1 = BatchNormalization(axis=1)(conv1_1)
conv1_1 = Activation('relu')(conv1_1)
conv2 = block_2_conv3D(conv1_1, 16)
## channel attention
#out2 = attention_block(conv2, 256, 16)
###特征输出
conv2_3d_2d = Conv3D(16, (4, 1, 1), strides=(1, 1, 1), data_format='channels_first')(conv2)
#conv2_3d_2d = Conv3D(16, (4, 1, 1), strides=(1, 1, 1), data_format='channels_first')(out2)
conv2_trans_2d = Reshape((16, 256, 256))(conv2_3d_2d)
conv2_1 = Conv3D(32, (2, 2,2), strides=(1,2,2), padding='same',data_format='channels_first')(conv2)
conv2_1 = BatchNormalization(axis=1)(conv2_1)
conv2_1 = Activation('relu')(conv2_1)
conv3 = block_2_conv3D(conv2_1, 32)
## channel attention
#out3 = attention_block(conv3, 128, 32)
###特征输出
conv3_3d_2d = Conv3D(32, (4, 1, 1), strides=(1, 1, 1), data_format='channels_first')(conv3)
#conv3_3d_2d = Conv3D(32, (4, 1, 1), strides=(1, 1, 1), data_format='channels_first')(out3)
conv3_trans_2d = Reshape((32, 128, 128))(conv3_3d_2d)
conv3_1 = Conv3D(64, (2, 2,2), strides=(1,2,2),padding='same', data_format='channels_first')(conv3)
conv3_1 = BatchNormalization(axis=1)(conv3_1)
conv3_1 = Activation('relu')(conv3_1)
conv4 = block_2_conv3D(conv3_1, 64)
##saliency_map
out4_1=saliency_map_attention_block(conv4,64)
## channel attention
#out4 = attention_block(conv4, 64, 64)
out4 = attention_block(out4_1, 64, 64)
###特征输出
#conv4_3d_2d = Conv3D(64, (4, 1, 1), strides=(1, 1, 1), data_format='channels_first')(conv4)
conv4_3d_2d = Conv3D(64, (4, 1, 1), strides=(1, 1, 1), data_format='channels_first')(out4)
conv4_trans_2d = Reshape((64, 64, 64))(conv4_3d_2d)
conv4_1 = Conv3D(128, (2, 2,2), strides=(1,2,2), padding='same',data_format='channels_first')(conv4)
conv4_1 = BatchNormalization(axis=1)(conv4_1)
conv4_1 = Activation('relu')(conv4_1)
conv5 = block_2_conv3D(conv4_1, 128)
## channel attention
out5 = attention_block(conv5, 32, 128)
###特征输出
#conv5_3d_2d = Conv3D(128, (4, 1, 1), strides=(1, 1, 1), data_format='channels_first')(conv5)
conv5_3d_2d = Conv3D(128, (4, 1, 1), strides=(1, 1, 1), data_format='channels_first')(out5)
conv5_trans_2d = Reshape((128, 32, 32))(conv5_3d_2d)
conv5_dropout = SpatialDropout3D(0.5,data_format='channels_first')(conv5)
conv5_1 = Conv3D(256, (2, 2,2), strides=(1,2,2), padding='same',data_format='channels_first')(conv5_dropout)
conv5_1 = BatchNormalization(axis=1)(conv5_1)
conv5_1 = Activation('relu')(conv5_1)
conv6 = block_2_conv3D(conv5_1, 256)
## channel attention
out6 = attention_block(conv6, 16, 256)
###特征输出
#conv6_3d_2d=Conv3D(256,(4,1,1),strides=(1,1,1),data_format='channels_first')(conv6)
conv6_3d_2d = Conv3D(256, (4, 1, 1), strides=(1, 1, 1), data_format='channels_first')(out6)
conv6_trans_2d=Reshape((256,16,16))(conv6_3d_2d)
conv6_dropout = SpatialDropout3D(0.5,data_format='channels_first')(conv6)
conv6_1 = Conv3D(512, (2, 2,2), strides=(1,2,2), padding='same',data_format='channels_first')(conv6_dropout)
conv6_1 = BatchNormalization(axis=1)(conv6_1)
conv6_1 = Activation('relu')(conv6_1)
conv3d_2d=Conv3D(512,(4,1,1),strides=(1,1,1),data_format='channels_first')(conv6_1)
#print(conv3d_2d.shape)
conv_trans_2d=Reshape((512,8,8))(conv3d_2d)
up1 = UpSampling2D(size=(2, 2))(conv_trans_2d)
up1_1 = Conv2D(256, (2, 2), strides=1, padding='same', data_format='channels_first')(up1)
up1_1 = BatchNormalization(axis=1)(up1_1)
up1_1 = Activation('relu')(up1_1)
up1_2 = concatenate([ conv6_trans_2d , up1_1], axis=1)
up1_3 = block_2_conv(up1_2, 512)
up2 = UpSampling2D(size=(2, 2))(up1_3)
up2_1 = Conv2D(128, (2, 2), strides=1, padding='same', data_format='channels_first')(up2)
up2_1 = BatchNormalization(axis=1)(up2_1)
up2_1 = Activation('relu')(up2_1)
up2_2 = concatenate([conv5_trans_2d, up2_1], axis=1)
up2_3 = block_2_conv(up2_2, 256)
up3 = UpSampling2D(size=(2, 2))(up2_3)
up3_1 = Conv2D(64, (2, 2), strides=1, padding='same', data_format='channels_first')(up3)
up3_1 = BatchNormalization(axis=1)(up3_1)
up3_1 = Activation('relu')(up3_1)
up3_2 = concatenate([conv4_trans_2d, up3_1], axis=1)
up3_3 = block_2_conv(up3_2, 128)
up4 = UpSampling2D(size=(2, 2))(up3_3)
up4_1 = Conv2D(32, (2, 2), strides=1, padding='same', data_format='channels_first')(up4)
up4_1 = BatchNormalization(axis=1)(up4_1)
up4_1 = Activation('relu')(up4_1)
up4_2 = concatenate([conv3_trans_2d, up4_1], axis=1)
up4_3 = block_2_conv(up4_2, 64)
up5 = UpSampling2D(size=(2, 2))(up4_3)
up5_1 = Conv2D(16, (2, 2), strides=1, padding='same', data_format='channels_first')(up5)
up5_1 = BatchNormalization(axis=1)(up5_1)
up5_1 = Activation('relu')(up5_1)
up5_2 = concatenate([conv2_trans_2d, up5_1], axis=1)
up5_3 = block_2_conv(up5_2, 32)
up6 = UpSampling2D(size=(2, 2))(up5_3)
up6_1 = Conv2D(8, (2, 2), strides=1, padding='same', data_format='channels_first')(up6)
up6_1 = BatchNormalization(axis=1)(up6_1)
up6_1 = Activation('relu')(up6_1)
up6_2 = concatenate([conv1_trans_2d, up6_1], axis=1)
up6_3 = block_2_conv(up6_2, 16)
outputs = Conv2D(1, (1, 1), activation='sigmoid')(up6_3)
model = Model(inputs=inputs, outputs=outputs)
#model.compile(optimizer='sgd', loss=DiceCoefLoss, metrics=[DiceCoef])
return model
def get_unet3D_new_4_fram_2_new(n_ch,frame,patch_height,patch_width):
inputs = Input(shape=(n_ch, frame,patch_height, patch_width))
conv0 = Conv3D(8, (1, 1,1), padding='same')(inputs)
conv1 = block_2_conv3D(conv0, 8)
## channel attention
#out1=attention_block(conv1,512,8)
###特征输出
conv1_3d_2d = Conv3D(8, (4, 1, 1), strides=(1, 1, 1), data_format='channels_first')(conv1)
#conv1_3d_2d = Conv3D(8, (4, 1, 1), strides=(1, 1, 1), data_format='channels_first')(out1)
conv1_trans_2d = Reshape((8, 512, 512))(conv1_3d_2d)
conv1_1 = Conv3D(16, (2, 2,2), strides=(1,2,2),padding='same', data_format='channels_first')(conv1)
conv1_1 = BatchNormalization(axis=1)(conv1_1)
conv1_1 = Activation('relu')(conv1_1)
conv2 = block_2_conv3D(conv1_1, 16)
## channel attention
#out2 = attention_block(conv2, 256, 16)
###特征输出
conv2_3d_2d = Conv3D(16, (4, 1, 1), strides=(1, 1, 1), data_format='channels_first')(conv2)
#conv2_3d_2d = Conv3D(16, (4, 1, 1), strides=(1, 1, 1), data_format='channels_first')(out2)
conv2_trans_2d = Reshape((16, 256, 256))(conv2_3d_2d)
conv2_1 = Conv3D(32, (2, 2,2), strides=(1,2,2), padding='same',data_format='channels_first')(conv2)
conv2_1 = BatchNormalization(axis=1)(conv2_1)
conv2_1 = Activation('relu')(conv2_1)
conv3 = block_2_conv3D(conv2_1, 32)
## channel attention
#out3 = attention_block(conv3, 128, 32)
###特征输出
conv3_3d_2d = Conv3D(32, (4, 1, 1), strides=(1, 1, 1), data_format='channels_first')(conv3)
#conv3_3d_2d = Conv3D(32, (4, 1, 1), strides=(1, 1, 1), data_format='channels_first')(out3)
conv3_trans_2d = Reshape((32, 128, 128))(conv3_3d_2d)
conv3_1 = Conv3D(64, (2, 2,2), strides=(1,2,2),padding='same', data_format='channels_first')(conv3)
conv3_1 = BatchNormalization(axis=1)(conv3_1)
conv3_1 = Activation('relu')(conv3_1)
conv4 = block_2_conv3D(conv3_1, 64)
##saliency_map
#out4_1=saliency_map_attention_block(conv4,64)
## channel attention
#out4 = attention_block(conv4, 64, 64)
#out4 = attention_block(out4_1, 64, 64)
###特征输出
#conv4_3d_2d = Conv3D(64, (4, 1, 1), strides=(1, 1, 1), data_format='channels_first')(conv4)
conv4_3d_2d = Conv3D(64, (4, 1, 1), strides=(1, 1, 1), data_format='channels_first')(conv4)
conv4_trans_2d = Reshape((64, 64, 64))(conv4_3d_2d)
conv4_1 = Conv3D(128, (2, 2,2), strides=(1,2,2), padding='same',data_format='channels_first')(conv4)
conv4_1 = BatchNormalization(axis=1)(conv4_1)
conv4_1 = Activation('relu')(conv4_1)
conv5 = block_2_conv3D(conv4_1, 128)
## channel attention
#out5 = attention_block(conv5, 32, 128)
###特征输出
#conv5_3d_2d = Conv3D(128, (4, 1, 1), strides=(1, 1, 1), data_format='channels_first')(conv5)
conv5_3d_2d = Conv3D(128, (4, 1, 1), strides=(1, 1, 1), data_format='channels_first')(conv5)
conv5_trans_2d = Reshape((128, 32, 32))(conv5_3d_2d)
conv5_dropout = SpatialDropout3D(0.5,data_format='channels_first')(conv5)
conv5_1 = Conv3D(256, (2, 2,2), strides=(1,2,2), padding='same',data_format='channels_first')(conv5_dropout)
conv5_1 = BatchNormalization(axis=1)(conv5_1)
conv5_1 = Activation('relu')(conv5_1)
conv6 = block_2_conv3D(conv5_1, 256)
## channel attention
out6 = attention_block(conv6, 16, 256)
###特征输出
#conv6_3d_2d=Conv3D(256,(4,1,1),strides=(1,1,1),data_format='channels_first')(conv6)
conv6_3d_2d = Conv3D(256, (4, 1, 1), strides=(1, 1, 1), data_format='channels_first')(out6)
conv6_trans_2d=Reshape((256,16,16))(conv6_3d_2d)
conv6_dropout = SpatialDropout3D(0.5,data_format='channels_first')(conv6)
conv6_1 = Conv3D(512, (2, 2,2), strides=(1,2,2), padding='same',data_format='channels_first')(conv6_dropout)
conv6_1 = BatchNormalization(axis=1)(conv6_1)
conv6_1 = Activation('relu')(conv6_1)
conv3d_2d=Conv3D(512,(4,1,1),strides=(1,1,1),data_format='channels_first')(conv6_1)
#print(conv3d_2d.shape)
conv_trans_2d=Reshape((512,8,8))(conv3d_2d)
up1 = UpSampling2D(size=(2, 2))(conv_trans_2d)
up1_1 = Conv2D(256, (2, 2), strides=1, padding='same', data_format='channels_first')(up1)
up1_1 = BatchNormalization(axis=1)(up1_1)
up1_1 = Activation('relu')(up1_1)
up1_2=channel_attnetion_block(conv6_trans_2d,up1_1,256,16)
#up1_2 = concatenate([ conv6_trans_2d , up1_1], axis=1)
up1_3 = block_2_conv(up1_2, 256)
up2 = UpSampling2D(size=(2, 2))(up1_3)
up2_1 = Conv2D(128, (2, 2), strides=1, padding='same', data_format='channels_first')(up2)
up2_1 = BatchNormalization(axis=1)(up2_1)
up2_1 = Activation('relu')(up2_1)
up2_2=channel_attnetion_block(conv5_trans_2d,up2_1,128,32)
#up2_2 = concatenate([conv5_trans_2d, up2_1], axis=1)
up2_3 = block_2_conv(up2_2, 128)
up3 = UpSampling2D(size=(2, 2))(up2_3)
up3_1 = Conv2D(64, (2, 2), strides=1, padding='same', data_format='channels_first')(up3)
up3_1 = BatchNormalization(axis=1)(up3_1)
up3_1 = Activation('relu')(up3_1)
up3_2=channel_attnetion_block(conv4_trans_2d,up3_1,64,64)
#up3_2 = concatenate([conv4_trans_2d, up3_1], axis=1)
up3_3 = block_2_conv(up3_2, 64)
up4 = UpSampling2D(size=(2, 2))(up3_3)
up4_1 = Conv2D(32, (2, 2), strides=1, padding='same', data_format='channels_first')(up4)
up4_1 = BatchNormalization(axis=1)(up4_1)
up4_1 = Activation('relu')(up4_1)
up4_2=channel_attnetion_block(conv3_trans_2d,up4_1,32,128)
#up4_2 = concatenate([conv3_trans_2d, up4_1], axis=1)
up4_3 = block_2_conv(up4_2, 32)
up5 = UpSampling2D(size=(2, 2))(up4_3)
up5_1 = Conv2D(16, (2, 2), strides=1, padding='same', data_format='channels_first')(up5)
up5_1 = BatchNormalization(axis=1)(up5_1)
up5_1 = Activation('relu')(up5_1)
up5_2=channel_attnetion_block(conv2_trans_2d,up5_1,16,256)
# up5_2 = concatenate([conv2_trans_2d, up5_1], axis=1)
up5_3 = block_2_conv(up5_2, 16)
up6 = UpSampling2D(size=(2, 2))(up5_3)
up6_1 = Conv2D(8, (2, 2), strides=1, padding='same', data_format='channels_first')(up6)
up6_1 = BatchNormalization(axis=1)(up6_1)
up6_1 = Activation('relu')(up6_1)
up6_2=channel_attnetion_block(conv1_trans_2d,up6_1,8,512)
#up6_2 = concatenate([conv1_trans_2d, up6_1], axis=1)
up6_3 = block_2_conv(up6_2, 8)
outputs = Conv2D(1, (1, 1), activation='sigmoid')(up6_3)
model = Model(inputs=inputs, outputs=outputs)
#model.compile(optimizer='sgd', loss=DiceCoefLoss, metrics=[DiceCoef])
return model
#========= Load settings from Config file
config = configparser.RawConfigParser()
config.read('configuration.txt')
#patch to the datasets
path_data = config.get('data paths', 'path_local')
#Experiment name
name_experiment = config.get('experiment name', 'name')
#training settings
N_epochs = int(config.get('training settings', 'N_epochs'))
_batchSize = int(config.get('training settings', 'batch_size'))
n_ch=1
frame=4
patch_height=512
patch_width=512
model = get_unet3D_new_4_fram_2_new(n_ch, frame,patch_height, patch_width) #the U-net model
## data parallel
#parallel_model=multi_gpu_model(model,gpus=2)
parallel_model=model
sgd= optimizers.SGD(lr=0.01, decay=1e-6, momentum=0.9, nesterov=True)
parallel_model.compile(optimizer=sgd, loss=DiceCoefLoss, metrics=[DiceCoef])
#parallel_model.compile(optimizer='sgd', loss=[focal_loss(gamma=2,alpha=0.25)], metrics=[DiceCoef])
print ("Check: final output of the network:")
print (parallel_model.output_shape)
#plot(model, to_file='./'+name_experiment+'/'+name_experiment + '_model.png') #check how the model looks like
json_string = model.to_json()
open('./'+name_experiment+'/'+name_experiment +'_architecture.json', 'w').write(json_string)
new_train_imgs_original = path_data + config.get('data paths', 'train_imgs_original')
new_train_imgs_groundTruth=path_data + config.get('data paths', 'train_groundTruth')
train_data_ori= h5py.File(new_train_imgs_original,'r')
train_data_gt=h5py.File(new_train_imgs_groundTruth,'r')
train_imgs_original= np.array(train_data_ori['image'])
train_groundTruth=np.array(train_data_gt['image'])
train_imgs = train_imgs_original/np.max(train_imgs_original)
train_masks = train_groundTruth/np.max(train_groundTruth)
#check masks are within 0-1
#assert(np.min(train_masks)==0 and np.max(train_masks)==1)
print("imgs max value:")
print(np.max(train_imgs))
print("imgs min value")
print(np.min(train_imgs))
print("label max value")
print(np.max(train_masks))
print("label min value")
print(np.min(train_masks))
print ("\ntrain images/masks shape:")
print (train_imgs.shape)
print ("train images range (min-max): " +str(np.min(train_imgs)) +' - '+str(np.max(train_imgs)))
print ("train masks are within 0-1\n")
#============ Training ==================================
checkpoint_test = ModelCheckpoint(filepath='./'+name_experiment+'/'+name_experiment +'_best_weights.h5', monitor='val_loss', save_best_only=True,save_weights_only=True) #save at each epoch if the validation decreased
checkpoint = ModelCheckpoint(filepath='./'+name_experiment+'/'+name_experiment + "bestTrainWeight" + ".h5", monitor='loss', save_best_only=True, save_weights_only=True)
def step_decay(epoch):
lrate = 0.01 #the initial learning rate (by default in keras)
if epoch%200==0:
lrate=lrate*0.1
return lrate
lrate_drop = LearningRateScheduler(step_decay)
keepPctOriginal = 0.5
hflip = True
vflip = True
iter_times=250
num=train_imgs_original.shape[0]
np.random.seed(0)
index=list(np.random.permutation(num))
_X_train=train_imgs[index][0:174]
_Y_train=train_masks[index][0:174]
print(_X_train.shape)
print(_Y_train.shape)
_X_vali=train_imgs[index][174:219]
_Y_vali=train_masks[index][174:219]
print(_X_vali.shape)
print(_Y_vali.shape)
def ImgGenerator():
for image in train_generator(_X_train, _Y_train,_batchSize, iter_times, _keepPctOriginal=0.5,
_intensity=INTENSITY_FACTOR, _hflip=True, _vflip=True):
yield image
def valiGenerator():
for image in validation_generator(_X_vali, _Y_vali,_batchSize):
yield image
stepsPerEpoch = math.ceil((num-40) / _batchSize)
validationSteps = math.ceil(40 / _batchSize)
history = parallel_model.fit_generator(ImgGenerator(), verbose=2, workers=1,
validation_data=valiGenerator(),
steps_per_epoch=stepsPerEpoch, epochs=N_epochs,
validation_steps=validationSteps,
callbacks=[lrate_drop,checkpoint,checkpoint_test])
model.summary()
#========== Save and test the last model ===================
model.save_weights('./'+name_experiment+'/'+name_experiment +'_last_weights.h5', overwrite=True)
|
[
"numpy.random.seed",
"tensorflow.zeros_like",
"keras.models.Model",
"keras.layers.Input",
"keras.callbacks.LearningRateScheduler",
"keras.layers.concatenate",
"sys.setrecursionlimit",
"keras.layers.Reshape",
"keras.backend.pow",
"keras.optimizers.SGD",
"keras.backend.flatten",
"configparser.RawConfigParser",
"keras.layers.core.Activation",
"keras.layers.GlobalAveragePooling2D",
"numpy.max",
"keras.layers.GlobalMaxPooling3D",
"keras.layers.Multiply",
"tensorflow.equal",
"h5py.File",
"math.ceil",
"keras.callbacks.ModelCheckpoint",
"keras.backend.backend",
"tensorflow.ones_like",
"numpy.min",
"keras.layers.Conv2D",
"keras.layers.UpSampling2D",
"numpy.random.permutation",
"keras.layers.SpatialDropout3D",
"keras.layers.BatchNormalization",
"keras.backend.sum",
"sys.path.insert",
"keras.layers.Add",
"keras.layers.Conv3D",
"keras.backend.log",
"numpy.array"
] |
[((759, 787), 'sys.path.insert', 'sys.path.insert', (['(0)', '"""./lib/"""'], {}), "(0, './lib/')\n", (774, 787), False, 'import sys\n'), ((1289, 1316), 'sys.setrecursionlimit', 'sys.setrecursionlimit', (['(4000)'], {}), '(4000)\n', (1310, 1316), False, 'import sys\n'), ((18411, 18441), 'configparser.RawConfigParser', 'configparser.RawConfigParser', ([], {}), '()\n', (18439, 18441), False, 'import configparser\n'), ((19020, 19085), 'keras.optimizers.SGD', 'optimizers.SGD', ([], {'lr': '(0.01)', 'decay': '(1e-06)', 'momentum': '(0.9)', 'nesterov': '(True)'}), '(lr=0.01, decay=1e-06, momentum=0.9, nesterov=True)\n', (19034, 19085), False, 'from keras import optimizers\n'), ((19788, 19827), 'h5py.File', 'h5py.File', (['new_train_imgs_original', '"""r"""'], {}), "(new_train_imgs_original, 'r')\n", (19797, 19827), False, 'import h5py\n'), ((19842, 19884), 'h5py.File', 'h5py.File', (['new_train_imgs_groundTruth', '"""r"""'], {}), "(new_train_imgs_groundTruth, 'r')\n", (19851, 19884), False, 'import h5py\n'), ((19908, 19941), 'numpy.array', 'np.array', (["train_data_ori['image']"], {}), "(train_data_ori['image'])\n", (19916, 19941), True, 'import numpy as np\n'), ((19961, 19993), 'numpy.array', 'np.array', (["train_data_gt['image']"], {}), "(train_data_gt['image'])\n", (19969, 19993), True, 'import numpy as np\n'), ((20703, 20869), 'keras.callbacks.ModelCheckpoint', 'ModelCheckpoint', ([], {'filepath': "('./' + name_experiment + '/' + name_experiment + '_best_weights.h5')", 'monitor': '"""val_loss"""', 'save_best_only': '(True)', 'save_weights_only': '(True)'}), "(filepath='./' + name_experiment + '/' + name_experiment +\n '_best_weights.h5', monitor='val_loss', save_best_only=True,\n save_weights_only=True)\n", (20718, 20869), False, 'from keras.callbacks import ModelCheckpoint, LearningRateScheduler\n'), ((20916, 21085), 'keras.callbacks.ModelCheckpoint', 'ModelCheckpoint', ([], {'filepath': "('./' + name_experiment + '/' + name_experiment + 'bestTrainWeight' + '.h5')", 'monitor': '"""loss"""', 'save_best_only': '(True)', 'save_weights_only': '(True)'}), "(filepath='./' + name_experiment + '/' + name_experiment +\n 'bestTrainWeight' + '.h5', monitor='loss', save_best_only=True,\n save_weights_only=True)\n", (20931, 21085), False, 'from keras.callbacks import ModelCheckpoint, LearningRateScheduler\n'), ((21257, 21290), 'keras.callbacks.LearningRateScheduler', 'LearningRateScheduler', (['step_decay'], {}), '(step_decay)\n', (21278, 21290), False, 'from keras.callbacks import ModelCheckpoint, LearningRateScheduler\n'), ((21395, 21412), 'numpy.random.seed', 'np.random.seed', (['(0)'], {}), '(0)\n', (21409, 21412), True, 'import numpy as np\n'), ((22056, 22090), 'math.ceil', 'math.ceil', (['((num - 40) / _batchSize)'], {}), '((num - 40) / _batchSize)\n', (22065, 22090), False, 'import math\n'), ((22108, 22134), 'math.ceil', 'math.ceil', (['(40 / _batchSize)'], {}), '(40 / _batchSize)\n', (22117, 22134), False, 'import math\n'), ((1155, 1166), 'keras.backend.backend', 'K.backend', ([], {}), '()\n', (1164, 1166), True, 'from keras import backend as K\n'), ((4155, 4199), 'keras.layers.concatenate', 'concatenate', (['[low_input, high_input]'], {'axis': '(1)'}), '([low_input, high_input], axis=1)\n', (4166, 4199), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((5095, 5112), 'keras.backend.flatten', 'K.flatten', (['y_true'], {}), '(y_true)\n', (5104, 5112), True, 'from keras import backend as K\n'), ((5126, 5143), 'keras.backend.flatten', 'K.flatten', (['y_pred'], {}), '(y_pred)\n', (5135, 5143), True, 'from keras import backend as K\n'), ((5161, 5187), 'keras.backend.sum', 'K.sum', (['(y_true_f * y_pred_f)'], {}), '(y_true_f * y_pred_f)\n', (5166, 5187), True, 'from keras import backend as K\n'), ((5419, 5472), 'keras.layers.Input', 'Input', ([], {'shape': '(n_ch, frame, patch_height, patch_width)'}), '(shape=(n_ch, frame, patch_height, patch_width))\n', (5424, 5472), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((9733, 9777), 'keras.layers.concatenate', 'concatenate', (['[conv6_trans_2d, up1_1]'], {'axis': '(1)'}), '([conv6_trans_2d, up1_1], axis=1)\n', (9744, 9777), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((10058, 10102), 'keras.layers.concatenate', 'concatenate', (['[conv5_trans_2d, up2_1]'], {'axis': '(1)'}), '([conv5_trans_2d, up2_1], axis=1)\n', (10069, 10102), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((10380, 10424), 'keras.layers.concatenate', 'concatenate', (['[conv4_trans_2d, up3_1]'], {'axis': '(1)'}), '([conv4_trans_2d, up3_1], axis=1)\n', (10391, 10424), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((10702, 10746), 'keras.layers.concatenate', 'concatenate', (['[conv3_trans_2d, up4_1]'], {'axis': '(1)'}), '([conv3_trans_2d, up4_1], axis=1)\n', (10713, 10746), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((11023, 11067), 'keras.layers.concatenate', 'concatenate', (['[conv2_trans_2d, up5_1]'], {'axis': '(1)'}), '([conv2_trans_2d, up5_1], axis=1)\n', (11034, 11067), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((11343, 11387), 'keras.layers.concatenate', 'concatenate', (['[conv1_trans_2d, up6_1]'], {'axis': '(1)'}), '([conv1_trans_2d, up6_1], axis=1)\n', (11354, 11387), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((11504, 11541), 'keras.models.Model', 'Model', ([], {'inputs': 'inputs', 'outputs': 'outputs'}), '(inputs=inputs, outputs=outputs)\n', (11509, 11541), False, 'from keras.models import Model\n'), ((11728, 11781), 'keras.layers.Input', 'Input', ([], {'shape': '(n_ch, frame, patch_height, patch_width)'}), '(shape=(n_ch, frame, patch_height, patch_width))\n', (11733, 11781), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((18206, 18243), 'keras.models.Model', 'Model', ([], {'inputs': 'inputs', 'outputs': 'outputs'}), '(inputs=inputs, outputs=outputs)\n', (18211, 18243), False, 'from keras.models import Model\n'), ((20030, 20057), 'numpy.max', 'np.max', (['train_imgs_original'], {}), '(train_imgs_original)\n', (20036, 20057), True, 'import numpy as np\n'), ((20091, 20116), 'numpy.max', 'np.max', (['train_groundTruth'], {}), '(train_groundTruth)\n', (20097, 20116), True, 'import numpy as np\n'), ((20241, 20259), 'numpy.max', 'np.max', (['train_imgs'], {}), '(train_imgs)\n', (20247, 20259), True, 'import numpy as np\n'), ((20293, 20311), 'numpy.min', 'np.min', (['train_imgs'], {}), '(train_imgs)\n', (20299, 20311), True, 'import numpy as np\n'), ((20346, 20365), 'numpy.max', 'np.max', (['train_masks'], {}), '(train_masks)\n', (20352, 20365), True, 'import numpy as np\n'), ((20400, 20419), 'numpy.min', 'np.min', (['train_masks'], {}), '(train_masks)\n', (20406, 20419), True, 'import numpy as np\n'), ((21425, 21451), 'numpy.random.permutation', 'np.random.permutation', (['num'], {}), '(num)\n', (21446, 21451), True, 'import numpy as np\n'), ((1757, 1850), 'keras.layers.Conv2D', 'Conv2D', (['num_filter', '(3, 3)'], {'strides': '(1, 1)', 'padding': '"""same"""', 'data_format': '"""channels_first"""'}), "(num_filter, (3, 3), strides=(1, 1), padding='same', data_format=\n 'channels_first')\n", (1763, 1850), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((1861, 1887), 'keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': '(1)'}), '(axis=1)\n', (1879, 1887), False, 'from keras.layers import BatchNormalization, SpatialDropout3D, Reshape, GlobalMaxPooling3D, GlobalAveragePooling2D\n'), ((1911, 1929), 'keras.layers.core.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (1921, 1929), False, 'from keras.layers.core import Dropout, Activation\n'), ((1951, 2044), 'keras.layers.Conv2D', 'Conv2D', (['num_filter', '(3, 3)'], {'strides': '(1, 1)', 'padding': '"""same"""', 'data_format': '"""channels_first"""'}), "(num_filter, (3, 3), strides=(1, 1), padding='same', data_format=\n 'channels_first')\n", (1957, 2044), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((2060, 2086), 'keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': '(1)'}), '(axis=1)\n', (2078, 2086), False, 'from keras.layers import BatchNormalization, SpatialDropout3D, Reshape, GlobalMaxPooling3D, GlobalAveragePooling2D\n'), ((2109, 2114), 'keras.layers.Add', 'Add', ([], {}), '()\n', (2112, 2114), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((2149, 2167), 'keras.layers.core.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (2159, 2167), False, 'from keras.layers.core import Dropout, Activation\n'), ((2256, 2354), 'keras.layers.Conv3D', 'Conv3D', (['num_filter', '(3, 3, 3)'], {'strides': '(1, 1, 1)', 'padding': '"""same"""', 'data_format': '"""channels_first"""'}), "(num_filter, (3, 3, 3), strides=(1, 1, 1), padding='same',\n data_format='channels_first')\n", (2262, 2354), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((2372, 2398), 'keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': '(1)'}), '(axis=1)\n', (2390, 2398), False, 'from keras.layers import BatchNormalization, SpatialDropout3D, Reshape, GlobalMaxPooling3D, GlobalAveragePooling2D\n'), ((2424, 2442), 'keras.layers.core.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (2434, 2442), False, 'from keras.layers.core import Dropout, Activation\n'), ((2466, 2564), 'keras.layers.Conv3D', 'Conv3D', (['num_filter', '(3, 3, 3)'], {'strides': '(1, 1, 1)', 'padding': '"""same"""', 'data_format': '"""channels_first"""'}), "(num_filter, (3, 3, 3), strides=(1, 1, 1), padding='same',\n data_format='channels_first')\n", (2472, 2564), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((2587, 2613), 'keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': '(1)'}), '(axis=1)\n', (2605, 2613), False, 'from keras.layers import BatchNormalization, SpatialDropout3D, Reshape, GlobalMaxPooling3D, GlobalAveragePooling2D\n'), ((2638, 2643), 'keras.layers.Add', 'Add', ([], {}), '()\n', (2641, 2643), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((2681, 2699), 'keras.layers.core.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (2691, 2699), False, 'from keras.layers.core import Dropout, Activation\n'), ((2791, 2839), 'keras.layers.GlobalMaxPooling3D', 'GlobalMaxPooling3D', ([], {'data_format': '"""channels_first"""'}), "(data_format='channels_first')\n", (2809, 2839), False, 'from keras.layers import BatchNormalization, SpatialDropout3D, Reshape, GlobalMaxPooling3D, GlobalAveragePooling2D\n'), ((2865, 2890), 'keras.layers.Reshape', 'Reshape', (['(depth, 1, 1, 1)'], {}), '((depth, 1, 1, 1))\n', (2872, 2890), False, 'from keras.layers import BatchNormalization, SpatialDropout3D, Reshape, GlobalMaxPooling3D, GlobalAveragePooling2D\n'), ((2915, 2985), 'keras.layers.Conv3D', 'Conv3D', (['depth', '(1, 1, 1)'], {'padding': '"""same"""', 'data_format': '"""channels_first"""'}), "(depth, (1, 1, 1), padding='same', data_format='channels_first')\n", (2921, 2985), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((3009, 3027), 'keras.layers.core.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (3019, 3027), False, 'from keras.layers.core import Dropout, Activation\n'), ((3052, 3146), 'keras.layers.Conv3D', 'Conv3D', (['depth', '(1, 1, 1)'], {'strides': '(1, 1, 1)', 'padding': '"""same"""', 'data_format': '"""channels_first"""'}), "(depth, (1, 1, 1), strides=(1, 1, 1), padding='same', data_format=\n 'channels_first')\n", (3058, 3146), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((3161, 3182), 'keras.layers.core.Activation', 'Activation', (['"""sigmoid"""'], {}), "('sigmoid')\n", (3171, 3182), False, 'from keras.layers.core import Dropout, Activation\n'), ((3316, 3359), 'keras.layers.concatenate', 'concatenate', (['[concat1, sigmoid_out]'], {'axis': '(2)'}), '([concat1, sigmoid_out], axis=2)\n', (3327, 3359), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((3425, 3464), 'keras.layers.concatenate', 'concatenate', (['[concat2, concat1]'], {'axis': '(3)'}), '([concat2, concat1], axis=3)\n', (3436, 3464), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((3530, 3569), 'keras.layers.concatenate', 'concatenate', (['[concat3, concat2]'], {'axis': '(4)'}), '([concat3, concat2], axis=4)\n', (3541, 3569), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((3633, 3643), 'keras.layers.Multiply', 'Multiply', ([], {}), '()\n', (3641, 3643), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((3739, 3809), 'keras.layers.Conv3D', 'Conv3D', (['depth', '(1, 1, 1)'], {'padding': '"""same"""', 'data_format': '"""channels_first"""'}), "(depth, (1, 1, 1), padding='same', data_format='channels_first')\n", (3745, 3809), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((3826, 3844), 'keras.layers.core.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (3836, 3844), False, 'from keras.layers.core import Dropout, Activation\n'), ((3869, 3939), 'keras.layers.Conv3D', 'Conv3D', (['depth', '(1, 1, 1)'], {'padding': '"""same"""', 'data_format': '"""channels_first"""'}), "(depth, (1, 1, 1), padding='same', data_format='channels_first')\n", (3875, 3939), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((3962, 3983), 'keras.layers.core.Activation', 'Activation', (['"""sigmoid"""'], {}), "('sigmoid')\n", (3972, 3983), False, 'from keras.layers.core import Dropout, Activation\n'), ((4004, 4014), 'keras.layers.Multiply', 'Multiply', ([], {}), '()\n', (4012, 4014), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((4045, 4050), 'keras.layers.Add', 'Add', ([], {}), '()\n', (4048, 4050), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((4215, 4267), 'keras.layers.GlobalAveragePooling2D', 'GlobalAveragePooling2D', ([], {'data_format': '"""channels_first"""'}), "(data_format='channels_first')\n", (4237, 4267), False, 'from keras.layers import BatchNormalization, SpatialDropout3D, Reshape, GlobalMaxPooling3D, GlobalAveragePooling2D\n'), ((4295, 4321), 'keras.layers.Reshape', 'Reshape', (['(2 * depth, 1, 1)'], {}), '((2 * depth, 1, 1))\n', (4302, 4321), False, 'from keras.layers import BatchNormalization, SpatialDropout3D, Reshape, GlobalMaxPooling3D, GlobalAveragePooling2D\n'), ((4349, 4416), 'keras.layers.Conv2D', 'Conv2D', (['depth', '(1, 1)'], {'padding': '"""same"""', 'data_format': '"""channels_first"""'}), "(depth, (1, 1), padding='same', data_format='channels_first')\n", (4355, 4416), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((4447, 4465), 'keras.layers.core.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (4457, 4465), False, 'from keras.layers.core import Dropout, Activation\n'), ((4492, 4580), 'keras.layers.Conv2D', 'Conv2D', (['depth', '(1, 1)'], {'strides': '(1, 1)', 'padding': '"""same"""', 'data_format': '"""channels_first"""'}), "(depth, (1, 1), strides=(1, 1), padding='same', data_format=\n 'channels_first')\n", (4498, 4580), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((4605, 4626), 'keras.layers.core.Activation', 'Activation', (['"""sigmoid"""'], {}), "('sigmoid')\n", (4615, 4626), False, 'from keras.layers.core import Dropout, Activation\n'), ((4710, 4753), 'keras.layers.concatenate', 'concatenate', (['[concat1, sigmoid_out]'], {'axis': '(2)'}), '([concat1, sigmoid_out], axis=2)\n', (4721, 4753), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((4819, 4858), 'keras.layers.concatenate', 'concatenate', (['[concat2, concat1]'], {'axis': '(3)'}), '([concat2, concat1], axis=3)\n', (4830, 4858), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((4869, 4879), 'keras.layers.Multiply', 'Multiply', ([], {}), '()\n', (4877, 4879), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((4912, 4917), 'keras.layers.Add', 'Add', ([], {}), '()\n', (4915, 4917), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((5485, 5521), 'keras.layers.Conv3D', 'Conv3D', (['(8)', '(1, 1, 1)'], {'padding': '"""same"""'}), "(8, (1, 1, 1), padding='same')\n", (5491, 5521), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((5669, 5738), 'keras.layers.Conv3D', 'Conv3D', (['(8)', '(4, 1, 1)'], {'strides': '(1, 1, 1)', 'data_format': '"""channels_first"""'}), "(8, (4, 1, 1), strides=(1, 1, 1), data_format='channels_first')\n", (5675, 5738), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((5864, 5886), 'keras.layers.Reshape', 'Reshape', (['(8, 512, 512)'], {}), '((8, 512, 512))\n', (5871, 5886), False, 'from keras.layers import BatchNormalization, SpatialDropout3D, Reshape, GlobalMaxPooling3D, GlobalAveragePooling2D\n'), ((5917, 6008), 'keras.layers.Conv3D', 'Conv3D', (['(16)', '(2, 2, 2)'], {'strides': '(1, 2, 2)', 'padding': '"""same"""', 'data_format': '"""channels_first"""'}), "(16, (2, 2, 2), strides=(1, 2, 2), padding='same', data_format=\n 'channels_first')\n", (5923, 6008), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((6022, 6048), 'keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': '(1)'}), '(axis=1)\n', (6040, 6048), False, 'from keras.layers import BatchNormalization, SpatialDropout3D, Reshape, GlobalMaxPooling3D, GlobalAveragePooling2D\n'), ((6073, 6091), 'keras.layers.core.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (6083, 6091), False, 'from keras.layers.core import Dropout, Activation\n'), ((6253, 6323), 'keras.layers.Conv3D', 'Conv3D', (['(16)', '(4, 1, 1)'], {'strides': '(1, 1, 1)', 'data_format': '"""channels_first"""'}), "(16, (4, 1, 1), strides=(1, 1, 1), data_format='channels_first')\n", (6259, 6323), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((6450, 6473), 'keras.layers.Reshape', 'Reshape', (['(16, 256, 256)'], {}), '((16, 256, 256))\n', (6457, 6473), False, 'from keras.layers import BatchNormalization, SpatialDropout3D, Reshape, GlobalMaxPooling3D, GlobalAveragePooling2D\n'), ((6504, 6595), 'keras.layers.Conv3D', 'Conv3D', (['(32)', '(2, 2, 2)'], {'strides': '(1, 2, 2)', 'padding': '"""same"""', 'data_format': '"""channels_first"""'}), "(32, (2, 2, 2), strides=(1, 2, 2), padding='same', data_format=\n 'channels_first')\n", (6510, 6595), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((6609, 6635), 'keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': '(1)'}), '(axis=1)\n', (6627, 6635), False, 'from keras.layers import BatchNormalization, SpatialDropout3D, Reshape, GlobalMaxPooling3D, GlobalAveragePooling2D\n'), ((6660, 6678), 'keras.layers.core.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (6670, 6678), False, 'from keras.layers.core import Dropout, Activation\n'), ((6840, 6910), 'keras.layers.Conv3D', 'Conv3D', (['(32)', '(4, 1, 1)'], {'strides': '(1, 1, 1)', 'data_format': '"""channels_first"""'}), "(32, (4, 1, 1), strides=(1, 1, 1), data_format='channels_first')\n", (6846, 6910), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((7037, 7060), 'keras.layers.Reshape', 'Reshape', (['(32, 128, 128)'], {}), '((32, 128, 128))\n', (7044, 7060), False, 'from keras.layers import BatchNormalization, SpatialDropout3D, Reshape, GlobalMaxPooling3D, GlobalAveragePooling2D\n'), ((7091, 7182), 'keras.layers.Conv3D', 'Conv3D', (['(64)', '(2, 2, 2)'], {'strides': '(1, 2, 2)', 'padding': '"""same"""', 'data_format': '"""channels_first"""'}), "(64, (2, 2, 2), strides=(1, 2, 2), padding='same', data_format=\n 'channels_first')\n", (7097, 7182), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((7196, 7222), 'keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': '(1)'}), '(axis=1)\n', (7214, 7222), False, 'from keras.layers import BatchNormalization, SpatialDropout3D, Reshape, GlobalMaxPooling3D, GlobalAveragePooling2D\n'), ((7247, 7265), 'keras.layers.core.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (7257, 7265), False, 'from keras.layers.core import Dropout, Activation\n'), ((7639, 7709), 'keras.layers.Conv3D', 'Conv3D', (['(64)', '(4, 1, 1)'], {'strides': '(1, 1, 1)', 'data_format': '"""channels_first"""'}), "(64, (4, 1, 1), strides=(1, 1, 1), data_format='channels_first')\n", (7645, 7709), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((7738, 7759), 'keras.layers.Reshape', 'Reshape', (['(64, 64, 64)'], {}), '((64, 64, 64))\n', (7745, 7759), False, 'from keras.layers import BatchNormalization, SpatialDropout3D, Reshape, GlobalMaxPooling3D, GlobalAveragePooling2D\n'), ((7790, 7882), 'keras.layers.Conv3D', 'Conv3D', (['(128)', '(2, 2, 2)'], {'strides': '(1, 2, 2)', 'padding': '"""same"""', 'data_format': '"""channels_first"""'}), "(128, (2, 2, 2), strides=(1, 2, 2), padding='same', data_format=\n 'channels_first')\n", (7796, 7882), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((7896, 7922), 'keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': '(1)'}), '(axis=1)\n', (7914, 7922), False, 'from keras.layers import BatchNormalization, SpatialDropout3D, Reshape, GlobalMaxPooling3D, GlobalAveragePooling2D\n'), ((7947, 7965), 'keras.layers.core.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (7957, 7965), False, 'from keras.layers.core import Dropout, Activation\n'), ((8226, 8297), 'keras.layers.Conv3D', 'Conv3D', (['(128)', '(4, 1, 1)'], {'strides': '(1, 1, 1)', 'data_format': '"""channels_first"""'}), "(128, (4, 1, 1), strides=(1, 1, 1), data_format='channels_first')\n", (8232, 8297), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((8326, 8348), 'keras.layers.Reshape', 'Reshape', (['(128, 32, 32)'], {}), '((128, 32, 32))\n', (8333, 8348), False, 'from keras.layers import BatchNormalization, SpatialDropout3D, Reshape, GlobalMaxPooling3D, GlobalAveragePooling2D\n'), ((8385, 8436), 'keras.layers.SpatialDropout3D', 'SpatialDropout3D', (['(0.5)'], {'data_format': '"""channels_first"""'}), "(0.5, data_format='channels_first')\n", (8401, 8436), False, 'from keras.layers import BatchNormalization, SpatialDropout3D, Reshape, GlobalMaxPooling3D, GlobalAveragePooling2D\n'), ((8458, 8550), 'keras.layers.Conv3D', 'Conv3D', (['(256)', '(2, 2, 2)'], {'strides': '(1, 2, 2)', 'padding': '"""same"""', 'data_format': '"""channels_first"""'}), "(256, (2, 2, 2), strides=(1, 2, 2), padding='same', data_format=\n 'channels_first')\n", (8464, 8550), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((8572, 8598), 'keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': '(1)'}), '(axis=1)\n', (8590, 8598), False, 'from keras.layers import BatchNormalization, SpatialDropout3D, Reshape, GlobalMaxPooling3D, GlobalAveragePooling2D\n'), ((8623, 8641), 'keras.layers.core.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (8633, 8641), False, 'from keras.layers.core import Dropout, Activation\n'), ((8893, 8964), 'keras.layers.Conv3D', 'Conv3D', (['(256)', '(4, 1, 1)'], {'strides': '(1, 1, 1)', 'data_format': '"""channels_first"""'}), "(256, (4, 1, 1), strides=(1, 1, 1), data_format='channels_first')\n", (8899, 8964), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((8991, 9013), 'keras.layers.Reshape', 'Reshape', (['(256, 16, 16)'], {}), '((256, 16, 16))\n', (8998, 9013), False, 'from keras.layers import BatchNormalization, SpatialDropout3D, Reshape, GlobalMaxPooling3D, GlobalAveragePooling2D\n'), ((9048, 9099), 'keras.layers.SpatialDropout3D', 'SpatialDropout3D', (['(0.5)'], {'data_format': '"""channels_first"""'}), "(0.5, data_format='channels_first')\n", (9064, 9099), False, 'from keras.layers import BatchNormalization, SpatialDropout3D, Reshape, GlobalMaxPooling3D, GlobalAveragePooling2D\n'), ((9121, 9213), 'keras.layers.Conv3D', 'Conv3D', (['(512)', '(2, 2, 2)'], {'strides': '(1, 2, 2)', 'padding': '"""same"""', 'data_format': '"""channels_first"""'}), "(512, (2, 2, 2), strides=(1, 2, 2), padding='same', data_format=\n 'channels_first')\n", (9127, 9213), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((9235, 9261), 'keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': '(1)'}), '(axis=1)\n', (9253, 9261), False, 'from keras.layers import BatchNormalization, SpatialDropout3D, Reshape, GlobalMaxPooling3D, GlobalAveragePooling2D\n'), ((9286, 9304), 'keras.layers.core.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (9296, 9304), False, 'from keras.layers.core import Dropout, Activation\n'), ((9331, 9402), 'keras.layers.Conv3D', 'Conv3D', (['(512)', '(4, 1, 1)'], {'strides': '(1, 1, 1)', 'data_format': '"""channels_first"""'}), "(512, (4, 1, 1), strides=(1, 1, 1), data_format='channels_first')\n", (9337, 9402), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((9453, 9473), 'keras.layers.Reshape', 'Reshape', (['(512, 8, 8)'], {}), '((512, 8, 8))\n', (9460, 9473), False, 'from keras.layers import BatchNormalization, SpatialDropout3D, Reshape, GlobalMaxPooling3D, GlobalAveragePooling2D\n'), ((9498, 9523), 'keras.layers.UpSampling2D', 'UpSampling2D', ([], {'size': '(2, 2)'}), '(size=(2, 2))\n', (9510, 9523), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((9552, 9628), 'keras.layers.Conv2D', 'Conv2D', (['(256)', '(2, 2)'], {'strides': '(1)', 'padding': '"""same"""', 'data_format': '"""channels_first"""'}), "(256, (2, 2), strides=1, padding='same', data_format='channels_first')\n", (9558, 9628), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((9647, 9673), 'keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': '(1)'}), '(axis=1)\n', (9665, 9673), False, 'from keras.layers import BatchNormalization, SpatialDropout3D, Reshape, GlobalMaxPooling3D, GlobalAveragePooling2D\n'), ((9694, 9712), 'keras.layers.core.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (9704, 9712), False, 'from keras.layers.core import Dropout, Activation\n'), ((9831, 9856), 'keras.layers.UpSampling2D', 'UpSampling2D', ([], {'size': '(2, 2)'}), '(size=(2, 2))\n', (9843, 9856), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((9877, 9953), 'keras.layers.Conv2D', 'Conv2D', (['(128)', '(2, 2)'], {'strides': '(1)', 'padding': '"""same"""', 'data_format': '"""channels_first"""'}), "(128, (2, 2), strides=1, padding='same', data_format='channels_first')\n", (9883, 9953), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((9972, 9998), 'keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': '(1)'}), '(axis=1)\n', (9990, 9998), False, 'from keras.layers import BatchNormalization, SpatialDropout3D, Reshape, GlobalMaxPooling3D, GlobalAveragePooling2D\n'), ((10019, 10037), 'keras.layers.core.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (10029, 10037), False, 'from keras.layers.core import Dropout, Activation\n'), ((10154, 10179), 'keras.layers.UpSampling2D', 'UpSampling2D', ([], {'size': '(2, 2)'}), '(size=(2, 2))\n', (10166, 10179), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((10200, 10275), 'keras.layers.Conv2D', 'Conv2D', (['(64)', '(2, 2)'], {'strides': '(1)', 'padding': '"""same"""', 'data_format': '"""channels_first"""'}), "(64, (2, 2), strides=1, padding='same', data_format='channels_first')\n", (10206, 10275), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((10294, 10320), 'keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': '(1)'}), '(axis=1)\n', (10312, 10320), False, 'from keras.layers import BatchNormalization, SpatialDropout3D, Reshape, GlobalMaxPooling3D, GlobalAveragePooling2D\n'), ((10341, 10359), 'keras.layers.core.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (10351, 10359), False, 'from keras.layers.core import Dropout, Activation\n'), ((10476, 10501), 'keras.layers.UpSampling2D', 'UpSampling2D', ([], {'size': '(2, 2)'}), '(size=(2, 2))\n', (10488, 10501), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((10522, 10597), 'keras.layers.Conv2D', 'Conv2D', (['(32)', '(2, 2)'], {'strides': '(1)', 'padding': '"""same"""', 'data_format': '"""channels_first"""'}), "(32, (2, 2), strides=1, padding='same', data_format='channels_first')\n", (10528, 10597), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((10616, 10642), 'keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': '(1)'}), '(axis=1)\n', (10634, 10642), False, 'from keras.layers import BatchNormalization, SpatialDropout3D, Reshape, GlobalMaxPooling3D, GlobalAveragePooling2D\n'), ((10663, 10681), 'keras.layers.core.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (10673, 10681), False, 'from keras.layers.core import Dropout, Activation\n'), ((10797, 10822), 'keras.layers.UpSampling2D', 'UpSampling2D', ([], {'size': '(2, 2)'}), '(size=(2, 2))\n', (10809, 10822), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((10843, 10918), 'keras.layers.Conv2D', 'Conv2D', (['(16)', '(2, 2)'], {'strides': '(1)', 'padding': '"""same"""', 'data_format': '"""channels_first"""'}), "(16, (2, 2), strides=1, padding='same', data_format='channels_first')\n", (10849, 10918), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((10937, 10963), 'keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': '(1)'}), '(axis=1)\n', (10955, 10963), False, 'from keras.layers import BatchNormalization, SpatialDropout3D, Reshape, GlobalMaxPooling3D, GlobalAveragePooling2D\n'), ((10984, 11002), 'keras.layers.core.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (10994, 11002), False, 'from keras.layers.core import Dropout, Activation\n'), ((11118, 11143), 'keras.layers.UpSampling2D', 'UpSampling2D', ([], {'size': '(2, 2)'}), '(size=(2, 2))\n', (11130, 11143), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((11164, 11238), 'keras.layers.Conv2D', 'Conv2D', (['(8)', '(2, 2)'], {'strides': '(1)', 'padding': '"""same"""', 'data_format': '"""channels_first"""'}), "(8, (2, 2), strides=1, padding='same', data_format='channels_first')\n", (11170, 11238), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((11257, 11283), 'keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': '(1)'}), '(axis=1)\n', (11275, 11283), False, 'from keras.layers import BatchNormalization, SpatialDropout3D, Reshape, GlobalMaxPooling3D, GlobalAveragePooling2D\n'), ((11304, 11322), 'keras.layers.core.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (11314, 11322), False, 'from keras.layers.core import Dropout, Activation\n'), ((11442, 11481), 'keras.layers.Conv2D', 'Conv2D', (['(1)', '(1, 1)'], {'activation': '"""sigmoid"""'}), "(1, (1, 1), activation='sigmoid')\n", (11448, 11481), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((11794, 11830), 'keras.layers.Conv3D', 'Conv3D', (['(8)', '(1, 1, 1)'], {'padding': '"""same"""'}), "(8, (1, 1, 1), padding='same')\n", (11800, 11830), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((11978, 12047), 'keras.layers.Conv3D', 'Conv3D', (['(8)', '(4, 1, 1)'], {'strides': '(1, 1, 1)', 'data_format': '"""channels_first"""'}), "(8, (4, 1, 1), strides=(1, 1, 1), data_format='channels_first')\n", (11984, 12047), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((12173, 12195), 'keras.layers.Reshape', 'Reshape', (['(8, 512, 512)'], {}), '((8, 512, 512))\n', (12180, 12195), False, 'from keras.layers import BatchNormalization, SpatialDropout3D, Reshape, GlobalMaxPooling3D, GlobalAveragePooling2D\n'), ((12226, 12317), 'keras.layers.Conv3D', 'Conv3D', (['(16)', '(2, 2, 2)'], {'strides': '(1, 2, 2)', 'padding': '"""same"""', 'data_format': '"""channels_first"""'}), "(16, (2, 2, 2), strides=(1, 2, 2), padding='same', data_format=\n 'channels_first')\n", (12232, 12317), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((12331, 12357), 'keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': '(1)'}), '(axis=1)\n', (12349, 12357), False, 'from keras.layers import BatchNormalization, SpatialDropout3D, Reshape, GlobalMaxPooling3D, GlobalAveragePooling2D\n'), ((12382, 12400), 'keras.layers.core.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (12392, 12400), False, 'from keras.layers.core import Dropout, Activation\n'), ((12562, 12632), 'keras.layers.Conv3D', 'Conv3D', (['(16)', '(4, 1, 1)'], {'strides': '(1, 1, 1)', 'data_format': '"""channels_first"""'}), "(16, (4, 1, 1), strides=(1, 1, 1), data_format='channels_first')\n", (12568, 12632), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((12759, 12782), 'keras.layers.Reshape', 'Reshape', (['(16, 256, 256)'], {}), '((16, 256, 256))\n', (12766, 12782), False, 'from keras.layers import BatchNormalization, SpatialDropout3D, Reshape, GlobalMaxPooling3D, GlobalAveragePooling2D\n'), ((12813, 12904), 'keras.layers.Conv3D', 'Conv3D', (['(32)', '(2, 2, 2)'], {'strides': '(1, 2, 2)', 'padding': '"""same"""', 'data_format': '"""channels_first"""'}), "(32, (2, 2, 2), strides=(1, 2, 2), padding='same', data_format=\n 'channels_first')\n", (12819, 12904), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((12918, 12944), 'keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': '(1)'}), '(axis=1)\n', (12936, 12944), False, 'from keras.layers import BatchNormalization, SpatialDropout3D, Reshape, GlobalMaxPooling3D, GlobalAveragePooling2D\n'), ((12969, 12987), 'keras.layers.core.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (12979, 12987), False, 'from keras.layers.core import Dropout, Activation\n'), ((13149, 13219), 'keras.layers.Conv3D', 'Conv3D', (['(32)', '(4, 1, 1)'], {'strides': '(1, 1, 1)', 'data_format': '"""channels_first"""'}), "(32, (4, 1, 1), strides=(1, 1, 1), data_format='channels_first')\n", (13155, 13219), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((13346, 13369), 'keras.layers.Reshape', 'Reshape', (['(32, 128, 128)'], {}), '((32, 128, 128))\n', (13353, 13369), False, 'from keras.layers import BatchNormalization, SpatialDropout3D, Reshape, GlobalMaxPooling3D, GlobalAveragePooling2D\n'), ((13400, 13491), 'keras.layers.Conv3D', 'Conv3D', (['(64)', '(2, 2, 2)'], {'strides': '(1, 2, 2)', 'padding': '"""same"""', 'data_format': '"""channels_first"""'}), "(64, (2, 2, 2), strides=(1, 2, 2), padding='same', data_format=\n 'channels_first')\n", (13406, 13491), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((13505, 13531), 'keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': '(1)'}), '(axis=1)\n', (13523, 13531), False, 'from keras.layers import BatchNormalization, SpatialDropout3D, Reshape, GlobalMaxPooling3D, GlobalAveragePooling2D\n'), ((13556, 13574), 'keras.layers.core.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (13566, 13574), False, 'from keras.layers.core import Dropout, Activation\n'), ((13950, 14020), 'keras.layers.Conv3D', 'Conv3D', (['(64)', '(4, 1, 1)'], {'strides': '(1, 1, 1)', 'data_format': '"""channels_first"""'}), "(64, (4, 1, 1), strides=(1, 1, 1), data_format='channels_first')\n", (13956, 14020), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((14050, 14071), 'keras.layers.Reshape', 'Reshape', (['(64, 64, 64)'], {}), '((64, 64, 64))\n', (14057, 14071), False, 'from keras.layers import BatchNormalization, SpatialDropout3D, Reshape, GlobalMaxPooling3D, GlobalAveragePooling2D\n'), ((14102, 14194), 'keras.layers.Conv3D', 'Conv3D', (['(128)', '(2, 2, 2)'], {'strides': '(1, 2, 2)', 'padding': '"""same"""', 'data_format': '"""channels_first"""'}), "(128, (2, 2, 2), strides=(1, 2, 2), padding='same', data_format=\n 'channels_first')\n", (14108, 14194), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((14208, 14234), 'keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': '(1)'}), '(axis=1)\n', (14226, 14234), False, 'from keras.layers import BatchNormalization, SpatialDropout3D, Reshape, GlobalMaxPooling3D, GlobalAveragePooling2D\n'), ((14259, 14277), 'keras.layers.core.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (14269, 14277), False, 'from keras.layers.core import Dropout, Activation\n'), ((14539, 14610), 'keras.layers.Conv3D', 'Conv3D', (['(128)', '(4, 1, 1)'], {'strides': '(1, 1, 1)', 'data_format': '"""channels_first"""'}), "(128, (4, 1, 1), strides=(1, 1, 1), data_format='channels_first')\n", (14545, 14610), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((14640, 14662), 'keras.layers.Reshape', 'Reshape', (['(128, 32, 32)'], {}), '((128, 32, 32))\n', (14647, 14662), False, 'from keras.layers import BatchNormalization, SpatialDropout3D, Reshape, GlobalMaxPooling3D, GlobalAveragePooling2D\n'), ((14699, 14750), 'keras.layers.SpatialDropout3D', 'SpatialDropout3D', (['(0.5)'], {'data_format': '"""channels_first"""'}), "(0.5, data_format='channels_first')\n", (14715, 14750), False, 'from keras.layers import BatchNormalization, SpatialDropout3D, Reshape, GlobalMaxPooling3D, GlobalAveragePooling2D\n'), ((14772, 14864), 'keras.layers.Conv3D', 'Conv3D', (['(256)', '(2, 2, 2)'], {'strides': '(1, 2, 2)', 'padding': '"""same"""', 'data_format': '"""channels_first"""'}), "(256, (2, 2, 2), strides=(1, 2, 2), padding='same', data_format=\n 'channels_first')\n", (14778, 14864), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((14886, 14912), 'keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': '(1)'}), '(axis=1)\n', (14904, 14912), False, 'from keras.layers import BatchNormalization, SpatialDropout3D, Reshape, GlobalMaxPooling3D, GlobalAveragePooling2D\n'), ((14937, 14955), 'keras.layers.core.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (14947, 14955), False, 'from keras.layers.core import Dropout, Activation\n'), ((15207, 15278), 'keras.layers.Conv3D', 'Conv3D', (['(256)', '(4, 1, 1)'], {'strides': '(1, 1, 1)', 'data_format': '"""channels_first"""'}), "(256, (4, 1, 1), strides=(1, 1, 1), data_format='channels_first')\n", (15213, 15278), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((15305, 15327), 'keras.layers.Reshape', 'Reshape', (['(256, 16, 16)'], {}), '((256, 16, 16))\n', (15312, 15327), False, 'from keras.layers import BatchNormalization, SpatialDropout3D, Reshape, GlobalMaxPooling3D, GlobalAveragePooling2D\n'), ((15362, 15413), 'keras.layers.SpatialDropout3D', 'SpatialDropout3D', (['(0.5)'], {'data_format': '"""channels_first"""'}), "(0.5, data_format='channels_first')\n", (15378, 15413), False, 'from keras.layers import BatchNormalization, SpatialDropout3D, Reshape, GlobalMaxPooling3D, GlobalAveragePooling2D\n'), ((15435, 15527), 'keras.layers.Conv3D', 'Conv3D', (['(512)', '(2, 2, 2)'], {'strides': '(1, 2, 2)', 'padding': '"""same"""', 'data_format': '"""channels_first"""'}), "(512, (2, 2, 2), strides=(1, 2, 2), padding='same', data_format=\n 'channels_first')\n", (15441, 15527), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((15549, 15575), 'keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': '(1)'}), '(axis=1)\n', (15567, 15575), False, 'from keras.layers import BatchNormalization, SpatialDropout3D, Reshape, GlobalMaxPooling3D, GlobalAveragePooling2D\n'), ((15600, 15618), 'keras.layers.core.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (15610, 15618), False, 'from keras.layers.core import Dropout, Activation\n'), ((15645, 15716), 'keras.layers.Conv3D', 'Conv3D', (['(512)', '(4, 1, 1)'], {'strides': '(1, 1, 1)', 'data_format': '"""channels_first"""'}), "(512, (4, 1, 1), strides=(1, 1, 1), data_format='channels_first')\n", (15651, 15716), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((15767, 15787), 'keras.layers.Reshape', 'Reshape', (['(512, 8, 8)'], {}), '((512, 8, 8))\n', (15774, 15787), False, 'from keras.layers import BatchNormalization, SpatialDropout3D, Reshape, GlobalMaxPooling3D, GlobalAveragePooling2D\n'), ((15812, 15837), 'keras.layers.UpSampling2D', 'UpSampling2D', ([], {'size': '(2, 2)'}), '(size=(2, 2))\n', (15824, 15837), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((15866, 15942), 'keras.layers.Conv2D', 'Conv2D', (['(256)', '(2, 2)'], {'strides': '(1)', 'padding': '"""same"""', 'data_format': '"""channels_first"""'}), "(256, (2, 2), strides=1, padding='same', data_format='channels_first')\n", (15872, 15942), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((15961, 15987), 'keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': '(1)'}), '(axis=1)\n', (15979, 15987), False, 'from keras.layers import BatchNormalization, SpatialDropout3D, Reshape, GlobalMaxPooling3D, GlobalAveragePooling2D\n'), ((16008, 16026), 'keras.layers.core.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (16018, 16026), False, 'from keras.layers.core import Dropout, Activation\n'), ((16212, 16237), 'keras.layers.UpSampling2D', 'UpSampling2D', ([], {'size': '(2, 2)'}), '(size=(2, 2))\n', (16224, 16237), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((16258, 16334), 'keras.layers.Conv2D', 'Conv2D', (['(128)', '(2, 2)'], {'strides': '(1)', 'padding': '"""same"""', 'data_format': '"""channels_first"""'}), "(128, (2, 2), strides=1, padding='same', data_format='channels_first')\n", (16264, 16334), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((16353, 16379), 'keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': '(1)'}), '(axis=1)\n', (16371, 16379), False, 'from keras.layers import BatchNormalization, SpatialDropout3D, Reshape, GlobalMaxPooling3D, GlobalAveragePooling2D\n'), ((16400, 16418), 'keras.layers.core.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (16410, 16418), False, 'from keras.layers.core import Dropout, Activation\n'), ((16600, 16625), 'keras.layers.UpSampling2D', 'UpSampling2D', ([], {'size': '(2, 2)'}), '(size=(2, 2))\n', (16612, 16625), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((16646, 16721), 'keras.layers.Conv2D', 'Conv2D', (['(64)', '(2, 2)'], {'strides': '(1)', 'padding': '"""same"""', 'data_format': '"""channels_first"""'}), "(64, (2, 2), strides=1, padding='same', data_format='channels_first')\n", (16652, 16721), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((16740, 16766), 'keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': '(1)'}), '(axis=1)\n', (16758, 16766), False, 'from keras.layers import BatchNormalization, SpatialDropout3D, Reshape, GlobalMaxPooling3D, GlobalAveragePooling2D\n'), ((16787, 16805), 'keras.layers.core.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (16797, 16805), False, 'from keras.layers.core import Dropout, Activation\n'), ((16985, 17010), 'keras.layers.UpSampling2D', 'UpSampling2D', ([], {'size': '(2, 2)'}), '(size=(2, 2))\n', (16997, 17010), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((17031, 17106), 'keras.layers.Conv2D', 'Conv2D', (['(32)', '(2, 2)'], {'strides': '(1)', 'padding': '"""same"""', 'data_format': '"""channels_first"""'}), "(32, (2, 2), strides=1, padding='same', data_format='channels_first')\n", (17037, 17106), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((17125, 17151), 'keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': '(1)'}), '(axis=1)\n', (17143, 17151), False, 'from keras.layers import BatchNormalization, SpatialDropout3D, Reshape, GlobalMaxPooling3D, GlobalAveragePooling2D\n'), ((17172, 17190), 'keras.layers.core.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (17182, 17190), False, 'from keras.layers.core import Dropout, Activation\n'), ((17371, 17396), 'keras.layers.UpSampling2D', 'UpSampling2D', ([], {'size': '(2, 2)'}), '(size=(2, 2))\n', (17383, 17396), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((17417, 17492), 'keras.layers.Conv2D', 'Conv2D', (['(16)', '(2, 2)'], {'strides': '(1)', 'padding': '"""same"""', 'data_format': '"""channels_first"""'}), "(16, (2, 2), strides=1, padding='same', data_format='channels_first')\n", (17423, 17492), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((17511, 17537), 'keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': '(1)'}), '(axis=1)\n', (17529, 17537), False, 'from keras.layers import BatchNormalization, SpatialDropout3D, Reshape, GlobalMaxPooling3D, GlobalAveragePooling2D\n'), ((17558, 17576), 'keras.layers.core.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (17568, 17576), False, 'from keras.layers.core import Dropout, Activation\n'), ((17757, 17782), 'keras.layers.UpSampling2D', 'UpSampling2D', ([], {'size': '(2, 2)'}), '(size=(2, 2))\n', (17769, 17782), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((17803, 17877), 'keras.layers.Conv2D', 'Conv2D', (['(8)', '(2, 2)'], {'strides': '(1)', 'padding': '"""same"""', 'data_format': '"""channels_first"""'}), "(8, (2, 2), strides=1, padding='same', data_format='channels_first')\n", (17809, 17877), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((17896, 17922), 'keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': '(1)'}), '(axis=1)\n', (17914, 17922), False, 'from keras.layers import BatchNormalization, SpatialDropout3D, Reshape, GlobalMaxPooling3D, GlobalAveragePooling2D\n'), ((17943, 17961), 'keras.layers.core.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (17953, 17961), False, 'from keras.layers.core import Dropout, Activation\n'), ((18144, 18183), 'keras.layers.Conv2D', 'Conv2D', (['(1)', '(1, 1)'], {'activation': '"""sigmoid"""'}), "(1, (1, 1), activation='sigmoid')\n", (18150, 18183), False, 'from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, UpSampling2D, Reshape, core, Dropout, Add, Convolution2D, merge, Conv3D, MaxPooling3D, Multiply\n'), ((1447, 1466), 'tensorflow.equal', 'tf.equal', (['y_true', '(1)'], {}), '(y_true, 1)\n', (1455, 1466), True, 'import tensorflow as tf\n'), ((1473, 1493), 'tensorflow.ones_like', 'tf.ones_like', (['y_pred'], {}), '(y_pred)\n', (1485, 1493), True, 'import tensorflow as tf\n'), ((1518, 1537), 'tensorflow.equal', 'tf.equal', (['y_true', '(0)'], {}), '(y_true, 0)\n', (1526, 1537), True, 'import tensorflow as tf\n'), ((1544, 1565), 'tensorflow.zeros_like', 'tf.zeros_like', (['y_pred'], {}), '(y_pred)\n', (1557, 1565), True, 'import tensorflow as tf\n'), ((20563, 20581), 'numpy.max', 'np.max', (['train_imgs'], {}), '(train_imgs)\n', (20569, 20581), True, 'import numpy as np\n'), ((5214, 5229), 'keras.backend.sum', 'K.sum', (['y_true_f'], {}), '(y_true_f)\n', (5219, 5229), True, 'from keras import backend as K\n'), ((5232, 5247), 'keras.backend.sum', 'K.sum', (['y_pred_f'], {}), '(y_pred_f)\n', (5237, 5247), True, 'from keras import backend as K\n'), ((1664, 1681), 'keras.backend.log', 'K.log', (['(1.0 - pt_0)'], {}), '(1.0 - pt_0)\n', (1669, 1681), True, 'from keras import backend as K\n'), ((20532, 20550), 'numpy.min', 'np.min', (['train_imgs'], {}), '(train_imgs)\n', (20538, 20550), True, 'import numpy as np\n'), ((1617, 1628), 'keras.backend.log', 'K.log', (['pt_1'], {}), '(pt_1)\n', (1622, 1628), True, 'from keras import backend as K\n'), ((1646, 1664), 'keras.backend.pow', 'K.pow', (['pt_0', 'gamma'], {}), '(pt_0, gamma)\n', (1651, 1664), True, 'from keras import backend as K\n'), ((1596, 1620), 'keras.backend.pow', 'K.pow', (['(1.0 - pt_1)', 'gamma'], {}), '(1.0 - pt_1, gamma)\n', (1601, 1620), True, 'from keras import backend as K\n')]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
import os
from loglizer import InvariantsMiner, PCA, IsolationForest, OneClassSVM, LogClustering, LR
from loglizer import dataloader, preprocessing
def main():
parser = argparse.ArgumentParser()
parser.add_argument("--output_dir", metavar="DIR", help="output directory")
parser.add_argument("--dataset_name", help="which dataset to use")
parser.add_argument('--baselines', type=str, help='options: im pca iforest svm logcluster lr')
args = parser.parse_args()
print("select baselines", args)
selected_baselines = args.baselines.split('_')
ouput_dir = os.path.expanduser(args.output_dir + args.dataset_name + "/")
(x_train, y_train), (x_test, y_test) = dataloader.load_data(data_dir=ouput_dir)
feature_extractor = preprocessing.FeatureExtractor()
x_train = feature_extractor.fit_transform(x_train)
x_test = feature_extractor.transform(x_test)
if 'im' in selected_baselines:
print("="*20 + " Model: InvariantsMiner " + "="*20)
epsilon = 0.5 # threshold for estimating invariant space
model = InvariantsMiner(epsilon=epsilon)
model.fit(x_train)
print('Train validation:')
precision, recall, f1 = model.evaluate(x_train, y_train)
print('Test validation:')
precision, recall, f1 = model.evaluate(x_test, y_test)
if 'pca' in selected_baselines:
print("="*20 + " Model: PCA " + "="*20)
model = PCA(n_components=0.95, threshold=50, c_alpha=3.2905)
model.fit(x_train)
print('Train validation:')
precision, recall, f1 = model.evaluate(x_train, y_train)
print('Test validation:')
precision, recall, f1 = model.evaluate(x_test, y_test)
if 'iforest' in selected_baselines:
print("="*20 + " Model: IsolationForest " + "="*20)
model = IsolationForest(n_estimators=100, max_samples='auto', contamination='auto', random_state=88)
model.fit(x_train)
print('Train validation:')
precision, recall, f1 = model.evaluate(x_train, y_train)
print('Test validation:')
precision, recall, f1 = model.evaluate(x_test, y_test)
if 'svm' in selected_baselines:
print("="*20 + " Model: SVM " + "="*20)
model = OneClassSVM()
model.fit(x_train, y_train)
print('Train validation:')
precision, recall, f1 = model.evaluate(x_train, y_train)
print('Test validation:')
precision, recall, f1 = model.evaluate(x_test, y_test)
if 'logcluster' in selected_baselines:
print("="*20 + " Model: LogClustering " + "="*20)
max_dist = 0.3 # the threshold to stop the clustering process
anomaly_threshold = 0.3 # the threshold for anomaly detection
model = LogClustering(max_dist=max_dist, anomaly_threshold=anomaly_threshold)
model.fit(x_train[y_train == 0, :]) # Use only normal samples for training
print('Train validation:')
precision, recall, f1 = model.evaluate(x_train, y_train)
print('Test validation:')
precision, recall, f1 = model.evaluate(x_test, y_test)
if 'lr' in selected_baselines:
print("="*20 + " Model: LR " + "="*20)
model = LR()
model.fit(x_train, y_train)
print('Train validation:')
precision, recall, f1 = model.evaluate(x_train, y_train)
print('Test validation:')
precision, recall, f1 = model.evaluate(x_test, y_test)
if __name__ == "__main__":
main()
|
[
"argparse.ArgumentParser",
"loglizer.IsolationForest",
"loglizer.InvariantsMiner",
"loglizer.LR",
"loglizer.preprocessing.FeatureExtractor",
"loglizer.dataloader.load_data",
"loglizer.PCA",
"loglizer.LogClustering",
"os.path.expanduser",
"loglizer.OneClassSVM"
] |
[((238, 263), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (261, 263), False, 'import argparse\n'), ((651, 712), 'os.path.expanduser', 'os.path.expanduser', (["(args.output_dir + args.dataset_name + '/')"], {}), "(args.output_dir + args.dataset_name + '/')\n", (669, 712), False, 'import os\n'), ((756, 796), 'loglizer.dataloader.load_data', 'dataloader.load_data', ([], {'data_dir': 'ouput_dir'}), '(data_dir=ouput_dir)\n', (776, 796), False, 'from loglizer import dataloader, preprocessing\n'), ((822, 854), 'loglizer.preprocessing.FeatureExtractor', 'preprocessing.FeatureExtractor', ([], {}), '()\n', (852, 854), False, 'from loglizer import dataloader, preprocessing\n'), ((1137, 1169), 'loglizer.InvariantsMiner', 'InvariantsMiner', ([], {'epsilon': 'epsilon'}), '(epsilon=epsilon)\n', (1152, 1169), False, 'from loglizer import InvariantsMiner, PCA, IsolationForest, OneClassSVM, LogClustering, LR\n'), ((1495, 1547), 'loglizer.PCA', 'PCA', ([], {'n_components': '(0.95)', 'threshold': '(50)', 'c_alpha': '(3.2905)'}), '(n_components=0.95, threshold=50, c_alpha=3.2905)\n', (1498, 1547), False, 'from loglizer import InvariantsMiner, PCA, IsolationForest, OneClassSVM, LogClustering, LR\n'), ((1889, 1985), 'loglizer.IsolationForest', 'IsolationForest', ([], {'n_estimators': '(100)', 'max_samples': '"""auto"""', 'contamination': '"""auto"""', 'random_state': '(88)'}), "(n_estimators=100, max_samples='auto', contamination='auto',\n random_state=88)\n", (1904, 1985), False, 'from loglizer import InvariantsMiner, PCA, IsolationForest, OneClassSVM, LogClustering, LR\n'), ((2307, 2320), 'loglizer.OneClassSVM', 'OneClassSVM', ([], {}), '()\n', (2318, 2320), False, 'from loglizer import InvariantsMiner, PCA, IsolationForest, OneClassSVM, LogClustering, LR\n'), ((2814, 2883), 'loglizer.LogClustering', 'LogClustering', ([], {'max_dist': 'max_dist', 'anomaly_threshold': 'anomaly_threshold'}), '(max_dist=max_dist, anomaly_threshold=anomaly_threshold)\n', (2827, 2883), False, 'from loglizer import InvariantsMiner, PCA, IsolationForest, OneClassSVM, LogClustering, LR\n'), ((3264, 3268), 'loglizer.LR', 'LR', ([], {}), '()\n', (3266, 3268), False, 'from loglizer import InvariantsMiner, PCA, IsolationForest, OneClassSVM, LogClustering, LR\n')]
|
#
# Classes representing available types and their P4 equivalent.
#
from typing import Dict, List, Tuple
import ctypes
from functools import lru_cache
class KnownType:
"""
Base class of available types.
"""
pass
class uint8_t(KnownType):
def get_p4_type() -> str:
return 'bit<8>'
def get_size() -> int:
return 1
def to_p4_literal(v):
return str(uint8_t.cast_value(v))
def cast_value(v):
if type(v)==int:
return ctypes.c_uint8(v).value
raise Exception('Can not recignize value {}'.format(v))
class uint16_t(KnownType):
def get_p4_type() -> str:
return 'bit<16>'
def get_size() -> int:
return 2
def to_p4_literal(v):
return str(uint16_t.cast_value(v))
def cast_value(v):
if type(v)==int:
return ctypes.c_uint16(v).value
raise Exception('Can not recignize value {}'.format(v))
class uint32_t(KnownType):
def get_p4_type() -> str:
return 'bit<32>'
def get_size() -> int:
return 4
def to_p4_literal(v):
return str(uint32_t.cast_value(v))
def cast_value(v):
if type(v)==int:
return ctypes.c_uint32(v).value
raise Exception('Can not recignize value {}'.format(v))
class uint64_t(KnownType):
def get_p4_type() -> str:
return 'bit<64>'
def get_size() -> int:
return 8
def to_p4_literal(v):
return str(uint64_t.cast_value(v))
def cast_value(v):
if type(v)==int:
return ctypes.c_uint64(v).value
raise Exception('Can not recignize value {}'.format(v))
class bool_t(KnownType):
def get_p4_type() -> str:
return 'BOOL_T' # defined as bit<8>
def get_size() -> int:
return 1
def to_p4_literal(v):
return str(bool_t.cast_value(v))
def cast_value(v):
if v==True:
return 1
if v==False:
return 0
raise Exception('Can not recignize value {}'.format(v))
def padding_t(width:int):
if width<=0:
raise ValueError('width must be positive')
class hidden_padding_t(KnownType):
def get_p4_type() -> str:
return 'bit<{}>'.format(width*8)
def get_size() -> int:
return width
def to_p4_literal(v):
raise Exception('Padding variables can not be transleted to literals.')
def cast_value(v):
raise Exception('Padding variables do not have values.')
return hidden_padding_t
@lru_cache(maxsize=None)
def string_t(width:int):
if width<=0:
raise ValueError('width must be positive')
class hidden_string_t(KnownType):
def get_p4_type() -> str:
return 'bit<{}>'.format(width*8)
def get_size() -> int:
return width
def to_p4_literal(v: bytes):
return '0x'+v.hex()
def cast_value(v: bytes):
return v[-width:]
return hidden_string_t
def hdr_len(description: List[Tuple[str, KnownType]]):
'''
Returns the length of the described header in bytes. It does NOT consider alignments.
'''
return sum(map(lambda p: p[1].get_size(), description))
|
[
"ctypes.c_uint8",
"ctypes.c_uint16",
"functools.lru_cache",
"ctypes.c_uint64",
"ctypes.c_uint32"
] |
[((2624, 2647), 'functools.lru_cache', 'lru_cache', ([], {'maxsize': 'None'}), '(maxsize=None)\n', (2633, 2647), False, 'from functools import lru_cache\n'), ((498, 515), 'ctypes.c_uint8', 'ctypes.c_uint8', (['v'], {}), '(v)\n', (512, 515), False, 'import ctypes\n'), ((866, 884), 'ctypes.c_uint16', 'ctypes.c_uint16', (['v'], {}), '(v)\n', (881, 884), False, 'import ctypes\n'), ((1235, 1253), 'ctypes.c_uint32', 'ctypes.c_uint32', (['v'], {}), '(v)\n', (1250, 1253), False, 'import ctypes\n'), ((1604, 1622), 'ctypes.c_uint64', 'ctypes.c_uint64', (['v'], {}), '(v)\n', (1619, 1622), False, 'import ctypes\n')]
|
"""Tests for gdrive_sync tasks"""
from datetime import datetime
import pytest
import pytz
from gdrive_sync import tasks
from gdrive_sync.conftest import LIST_FILE_RESPONSES, LIST_VIDEO_RESPONSES
from gdrive_sync.constants import (
DRIVE_API_FILES,
DRIVE_FILE_FIELDS,
DRIVE_FOLDER_FILES_FINAL,
DRIVE_FOLDER_VIDEOS_FINAL,
)
from gdrive_sync.factories import DriveApiQueryTrackerFactory, DriveFileFactory
from gdrive_sync.models import DriveFile
from gdrive_sync.tasks import (
create_resource_from_gdrive,
import_recent_files,
import_website_files,
transcode_drive_file_video,
)
from websites.factories import WebsiteFactory
pytestmark = pytest.mark.django_db
@pytest.mark.parametrize("shared_id", [None, "testDrive"])
@pytest.mark.parametrize("drive_creds", [None, '{"key": "value"}'])
def test_stream_drive_file_to_s3(settings, mocker, shared_id, drive_creds):
""" File should be streamed only if required settings are present"""
settings.DRIVE_SHARED_ID = shared_id
settings.DRIVE_SERVICE_ACCOUNT_CREDS = drive_creds
mock_stream = mocker.patch("gdrive_sync.tasks.api.stream_to_s3")
drive_file = DriveFileFactory.create()
tasks.stream_drive_file_to_s3.delay(drive_file.file_id)
assert mock_stream.call_count == (1 if shared_id and drive_creds else 0)
@pytest.mark.parametrize("shared_id", [None, "testDrive"])
@pytest.mark.parametrize("drive_creds", [None, '{"key": "value"}'])
def test_create_gdrive_folders(settings, mocker, shared_id, drive_creds):
""" Folder should be created if settings are present"""
settings.DRIVE_SHARED_ID = shared_id
settings.DRIVE_SERVICE_ACCOUNT_CREDS = drive_creds
mock_create_folder = mocker.patch("gdrive_sync.tasks.api.create_gdrive_folders")
tasks.create_gdrive_folders.delay("test")
assert mock_create_folder.call_count == (1 if shared_id and drive_creds else 0)
def test_transcode_drive_file_video(mocker):
""" transcode_drive_file_video should create Video object and call create_media_convert_job"""
mock_transcode_call = mocker.patch("gdrive_sync.tasks.transcode_gdrive_video")
drive_file = DriveFileFactory.create()
transcode_drive_file_video.delay(drive_file.file_id)
mock_transcode_call.assert_called_once_with(drive_file)
# pylint:disable=too-many-arguments, too-many-locals
@pytest.mark.parametrize(
"arg_last_dt",
[None, datetime.strptime("2021-01-01", "%Y-%m-%d").replace(tzinfo=pytz.UTC)],
)
@pytest.mark.parametrize(
"tracker_last_dt",
[None, datetime.strptime("2021-02-02", "%Y-%m-%d").replace(tzinfo=pytz.UTC)],
)
@pytest.mark.parametrize(
"parent_folder,parent_folder_in_ancestors",
[(None, False), ("parent", True), ("parent", False)],
)
@pytest.mark.parametrize("same_checksum", [True, False])
def test_import_recent_files_videos(
settings,
mocker,
mocked_celery,
arg_last_dt,
tracker_last_dt,
parent_folder,
parent_folder_in_ancestors,
same_checksum,
):
"""import_recent_files should created expected video objects and call s3 tasks"""
mocker.patch("gdrive_sync.tasks.is_gdrive_enabled", return_value=True)
settings.DRIVE_SHARED_ID = "test_drive"
settings.DRIVE_UPLOADS_PARENT_FOLDER_ID = parent_folder
website = WebsiteFactory.create()
DriveFileFactory.create(
file_id=LIST_VIDEO_RESPONSES[1]["files"][0]["id"],
name=LIST_VIDEO_RESPONSES[1]["files"][0]["name"],
checksum=(
LIST_VIDEO_RESPONSES[1]["files"][0]["md5Checksum"]
if same_checksum is True
else "differentmd5"
),
)
parent_tree_responses = [
[
{
"id": LIST_VIDEO_RESPONSES[0]["files"][0]["parents"][0],
"name": website.short_id,
},
{"id": "abc123", "name": DRIVE_FOLDER_VIDEOS_FINAL},
],
[
{
"id": LIST_VIDEO_RESPONSES[0]["files"][1]["parents"][0],
"name": "no-matching-website",
},
{"id": "xyz987", "name": DRIVE_FOLDER_VIDEOS_FINAL},
],
[
{
"id": LIST_VIDEO_RESPONSES[0]["files"][0]["parents"][0],
"name": website.short_id,
},
{"id": "def456", "name": DRIVE_FOLDER_VIDEOS_FINAL},
],
[
{
"id": LIST_VIDEO_RESPONSES[0]["files"][1]["parents"][0],
"name": "no-matching-website",
},
{"id": "ghi789", "name": DRIVE_FOLDER_VIDEOS_FINAL},
],
]
if parent_folder_in_ancestors:
for response in parent_tree_responses:
response.append(
{
"id": "parent",
"name": "ancestor_exists",
}
)
mocker.patch("gdrive_sync.api.get_parent_tree", side_effect=parent_tree_responses)
mock_list_files = mocker.patch(
"gdrive_sync.tasks.query_files",
return_value=LIST_VIDEO_RESPONSES[0]["files"]
+ LIST_VIDEO_RESPONSES[1]["files"],
)
mock_upload_task = mocker.patch("gdrive_sync.tasks.stream_drive_file_to_s3.s")
mock_transcode_task = mocker.patch(
"gdrive_sync.tasks.transcode_drive_file_video.si"
)
mock_sync_content_task = mocker.patch("gdrive_sync.tasks.sync_website_content.si")
tracker = DriveApiQueryTrackerFactory.create(
api_call=DRIVE_API_FILES, last_dt=tracker_last_dt
)
if parent_folder_in_ancestors or parent_folder is None:
with pytest.raises(mocked_celery.replace_exception_class):
import_recent_files.delay(last_dt=arg_last_dt)
else:
import_recent_files.delay(last_dt=arg_last_dt)
last_dt = arg_last_dt or tracker_last_dt
last_dt_str = last_dt.strftime("%Y-%m-%dT%H:%M:%S.%f") if last_dt else None
base_query = "(not trashed and not mimeType = 'application/vnd.google-apps.folder')"
expected_query = (
f"{base_query} and (modifiedTime > '{last_dt_str}' or createdTime > '{last_dt_str}')"
if last_dt
else base_query
)
mock_list_files.assert_called_once_with(
query=expected_query, fields=DRIVE_FILE_FIELDS
)
tracker.refresh_from_db()
for i in range(2):
if (i == 1 and same_checksum) or (
parent_folder and not parent_folder_in_ancestors
): # chained tasks should not be run (wrong folder, or same checksum & name)
with pytest.raises(AssertionError):
mock_upload_task.assert_any_call(
LIST_VIDEO_RESPONSES[i]["files"][0]["id"]
)
with pytest.raises(AssertionError):
mock_transcode_task.assert_any_call(
LIST_VIDEO_RESPONSES[i]["files"][0]["id"]
)
else: # chained tasks should be run
mock_upload_task.assert_any_call(LIST_VIDEO_RESPONSES[i]["files"][0]["id"])
assert (
tracker.last_dt
== datetime.strptime(
LIST_VIDEO_RESPONSES[0]["files"][0]["modifiedTime"],
"%Y-%m-%dT%H:%M:%S.%fZ",
).replace(tzinfo=pytz.utc)
)
mock_transcode_task.assert_any_call(
LIST_VIDEO_RESPONSES[i]["files"][0]["id"]
)
mock_sync_content_task.assert_any_call(website.name)
if (
not parent_folder or parent_folder_in_ancestors
): # DriveFile should be created
assert DriveFile.objects.filter(
file_id=LIST_VIDEO_RESPONSES[i]["files"][0]["id"]
).exists()
assert (
DriveFile.objects.filter(
file_id=LIST_VIDEO_RESPONSES[i]["files"][1]["id"]
).exists()
is False
)
def test_import_recent_files_nonvideos(settings, mocker, mocked_celery):
"""
import_recent_files should import non-video files
"""
mocker.patch("gdrive_sync.tasks.is_gdrive_enabled", return_value=True)
settings.DRIVE_SHARED_ID = "test_drive"
settings.DRIVE_UPLOADS_PARENT_FOLDER_ID = "parent"
website = WebsiteFactory.create()
parent_tree_responses = [
[
{
"id": "parent",
"name": "ancestor_exists",
},
{
"id": LIST_FILE_RESPONSES[0]["files"][i]["parents"][0],
"name": website.short_id,
},
{"id": "abc123", "name": DRIVE_FOLDER_FILES_FINAL},
]
for i in range(2)
]
mocker.patch("gdrive_sync.api.get_parent_tree", side_effect=parent_tree_responses)
mocker.patch(
"gdrive_sync.tasks.query_files", return_value=LIST_FILE_RESPONSES[0]["files"]
)
mock_upload_task = mocker.patch("gdrive_sync.tasks.stream_drive_file_to_s3.s")
mock_resource_task = mocker.patch(
"gdrive_sync.tasks.create_resource_from_gdrive.si"
)
with pytest.raises(mocked_celery.replace_exception_class):
import_recent_files.delay(
last_dt=datetime.strptime("2021-01-01", "%Y-%m-%d").replace(
tzinfo=pytz.UTC
),
)
with pytest.raises(AssertionError):
mock_upload_task.assert_any_call(LIST_FILE_RESPONSES[1]["files"][0]["id"])
mock_upload_task.assert_any_call(
LIST_VIDEO_RESPONSES[0]["files"][0]["id"],
prefix=website.starter.config["root-url-path"],
)
mock_resource_task.assert_any_call(LIST_VIDEO_RESPONSES[0]["files"][0]["id"])
def test_create_resource_from_gdrive(mocker):
"""create_resource_from_gdrive should call create_gdrive_resource_content"""
mocker.patch(
"gdrive_sync.api.get_s3_content_type", return_value="application/ms-word"
)
mock_create_content = mocker.patch(
"gdrive_sync.tasks.create_gdrive_resource_content"
)
drive_file = DriveFileFactory.create()
create_resource_from_gdrive.delay(drive_file.file_id)
mock_create_content.assert_called_once_with(drive_file)
def test_import_website_files(mocker, mocked_celery):
"""import_website_files should run process_file_result for each drive file and trigger tasks"""
mocker.patch("gdrive_sync.tasks.is_gdrive_enabled", return_value=True)
website = WebsiteFactory.create()
drive_files = DriveFileFactory.create_batch(2, website=website)
mock_process_file_result = mocker.patch(
"gdrive_sync.tasks.process_file_result", side_effect=drive_files
)
mock_stream_task = mocker.patch("gdrive_sync.tasks.stream_drive_file_to_s3.s")
mock_create_resource = mocker.patch(
"gdrive_sync.tasks.create_resource_from_gdrive.si"
)
mock_sync_content = mocker.patch("gdrive_sync.tasks.sync_website_content.si")
mocker.patch(
"gdrive_sync.tasks.query_files",
side_effect=[
[
{
"id": "websiteFolderId",
"name": website.short_id,
},
],
[
{
"id": "websiteVideoFinalFolderId",
"name": DRIVE_FOLDER_VIDEOS_FINAL,
},
],
[
{
"id": "websiteFileFinalFolderId",
"name": DRIVE_FOLDER_FILES_FINAL,
},
],
],
)
mocker.patch(
"gdrive_sync.tasks.walk_gdrive_folder",
side_effect=[[], LIST_FILE_RESPONSES[0]["files"]],
)
with pytest.raises(mocked_celery.replace_exception_class):
import_website_files.delay(website.short_id)
assert mock_process_file_result.call_count == 2
for drive_file in drive_files:
mock_stream_task.assert_any_call(drive_file.file_id)
mock_create_resource.assert_any_call(drive_file.file_id)
mock_sync_content.assert_called_once_with(website.name)
def test_import_website_files_dupe_site_folders(mocker):
"""import_website_files should run process_file_result for each drive file and trigger tasks"""
mocker.patch("gdrive_sync.tasks.is_gdrive_enabled", return_value=True)
website = WebsiteFactory.create()
mocker.patch(
"gdrive_sync.tasks.query_files",
return_value=[
{
"id": "websiteFolderId",
"name": website.short_id,
},
{
"id": "websiteFolderId2",
"name": website.short_id,
},
],
)
with pytest.raises(Exception) as exc:
import_website_files.delay(website.short_id)
assert exc.value.args == (
"Expected 1 drive folder for %s but found %d",
website.short_id,
2,
)
def test_import_website_files_missing_folder(mocker):
"""import_website_files should run process_file_result for each drive file and trigger tasks"""
mocker.patch("gdrive_sync.tasks.is_gdrive_enabled", return_value=True)
website = WebsiteFactory.create()
mock_log = mocker.patch("gdrive_sync.tasks.log.error")
mocker.patch(
"gdrive_sync.tasks.query_files",
side_effect=[
[
{
"id": "websiteFolderId",
"name": website.short_id,
},
],
[],
[],
],
)
import_website_files.delay(website.short_id)
for folder in [DRIVE_FOLDER_VIDEOS_FINAL, DRIVE_FOLDER_FILES_FINAL]:
mock_log.assert_any_call(
"Expected 1 drive folder for %s/%s but found %d",
website.short_id,
folder,
0,
)
|
[
"gdrive_sync.tasks.transcode_drive_file_video.delay",
"gdrive_sync.factories.DriveFileFactory.create_batch",
"gdrive_sync.tasks.stream_drive_file_to_s3.delay",
"gdrive_sync.tasks.import_website_files.delay",
"gdrive_sync.tasks.import_recent_files.delay",
"gdrive_sync.factories.DriveApiQueryTrackerFactory.create",
"pytest.raises",
"datetime.datetime.strptime",
"gdrive_sync.models.DriveFile.objects.filter",
"pytest.mark.parametrize",
"gdrive_sync.tasks.create_gdrive_folders.delay",
"gdrive_sync.factories.DriveFileFactory.create",
"websites.factories.WebsiteFactory.create",
"gdrive_sync.tasks.create_resource_from_gdrive.delay"
] |
[((697, 754), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""shared_id"""', "[None, 'testDrive']"], {}), "('shared_id', [None, 'testDrive'])\n", (720, 754), False, 'import pytest\n'), ((756, 822), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""drive_creds"""', '[None, \'{"key": "value"}\']'], {}), '(\'drive_creds\', [None, \'{"key": "value"}\'])\n', (779, 822), False, 'import pytest\n'), ((1320, 1377), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""shared_id"""', "[None, 'testDrive']"], {}), "('shared_id', [None, 'testDrive'])\n", (1343, 1377), False, 'import pytest\n'), ((1379, 1445), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""drive_creds"""', '[None, \'{"key": "value"}\']'], {}), '(\'drive_creds\', [None, \'{"key": "value"}\'])\n', (1402, 1445), False, 'import pytest\n'), ((2598, 2723), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""parent_folder,parent_folder_in_ancestors"""', "[(None, False), ('parent', True), ('parent', False)]"], {}), "('parent_folder,parent_folder_in_ancestors', [(None,\n False), ('parent', True), ('parent', False)])\n", (2621, 2723), False, 'import pytest\n'), ((2732, 2787), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""same_checksum"""', '[True, False]'], {}), "('same_checksum', [True, False])\n", (2755, 2787), False, 'import pytest\n'), ((1154, 1179), 'gdrive_sync.factories.DriveFileFactory.create', 'DriveFileFactory.create', ([], {}), '()\n', (1177, 1179), False, 'from gdrive_sync.factories import DriveApiQueryTrackerFactory, DriveFileFactory\n'), ((1184, 1239), 'gdrive_sync.tasks.stream_drive_file_to_s3.delay', 'tasks.stream_drive_file_to_s3.delay', (['drive_file.file_id'], {}), '(drive_file.file_id)\n', (1219, 1239), False, 'from gdrive_sync import tasks\n'), ((1765, 1806), 'gdrive_sync.tasks.create_gdrive_folders.delay', 'tasks.create_gdrive_folders.delay', (['"""test"""'], {}), "('test')\n", (1798, 1806), False, 'from gdrive_sync import tasks\n'), ((2137, 2162), 'gdrive_sync.factories.DriveFileFactory.create', 'DriveFileFactory.create', ([], {}), '()\n', (2160, 2162), False, 'from gdrive_sync.factories import DriveApiQueryTrackerFactory, DriveFileFactory\n'), ((2167, 2219), 'gdrive_sync.tasks.transcode_drive_file_video.delay', 'transcode_drive_file_video.delay', (['drive_file.file_id'], {}), '(drive_file.file_id)\n', (2199, 2219), False, 'from gdrive_sync.tasks import create_resource_from_gdrive, import_recent_files, import_website_files, transcode_drive_file_video\n'), ((3260, 3283), 'websites.factories.WebsiteFactory.create', 'WebsiteFactory.create', ([], {}), '()\n', (3281, 3283), False, 'from websites.factories import WebsiteFactory\n'), ((3288, 3532), 'gdrive_sync.factories.DriveFileFactory.create', 'DriveFileFactory.create', ([], {'file_id': "LIST_VIDEO_RESPONSES[1]['files'][0]['id']", 'name': "LIST_VIDEO_RESPONSES[1]['files'][0]['name']", 'checksum': "(LIST_VIDEO_RESPONSES[1]['files'][0]['md5Checksum'] if same_checksum is \n True else 'differentmd5')"}), "(file_id=LIST_VIDEO_RESPONSES[1]['files'][0]['id'],\n name=LIST_VIDEO_RESPONSES[1]['files'][0]['name'], checksum=\n LIST_VIDEO_RESPONSES[1]['files'][0]['md5Checksum'] if same_checksum is \n True else 'differentmd5')\n", (3311, 3532), False, 'from gdrive_sync.factories import DriveApiQueryTrackerFactory, DriveFileFactory\n'), ((5369, 5459), 'gdrive_sync.factories.DriveApiQueryTrackerFactory.create', 'DriveApiQueryTrackerFactory.create', ([], {'api_call': 'DRIVE_API_FILES', 'last_dt': 'tracker_last_dt'}), '(api_call=DRIVE_API_FILES, last_dt=\n tracker_last_dt)\n', (5403, 5459), False, 'from gdrive_sync.factories import DriveApiQueryTrackerFactory, DriveFileFactory\n'), ((8153, 8176), 'websites.factories.WebsiteFactory.create', 'WebsiteFactory.create', ([], {}), '()\n', (8174, 8176), False, 'from websites.factories import WebsiteFactory\n'), ((9926, 9951), 'gdrive_sync.factories.DriveFileFactory.create', 'DriveFileFactory.create', ([], {}), '()\n', (9949, 9951), False, 'from gdrive_sync.factories import DriveApiQueryTrackerFactory, DriveFileFactory\n'), ((9956, 10009), 'gdrive_sync.tasks.create_resource_from_gdrive.delay', 'create_resource_from_gdrive.delay', (['drive_file.file_id'], {}), '(drive_file.file_id)\n', (9989, 10009), False, 'from gdrive_sync.tasks import create_resource_from_gdrive, import_recent_files, import_website_files, transcode_drive_file_video\n'), ((10315, 10338), 'websites.factories.WebsiteFactory.create', 'WebsiteFactory.create', ([], {}), '()\n', (10336, 10338), False, 'from websites.factories import WebsiteFactory\n'), ((10357, 10406), 'gdrive_sync.factories.DriveFileFactory.create_batch', 'DriveFileFactory.create_batch', (['(2)'], {'website': 'website'}), '(2, website=website)\n', (10386, 10406), False, 'from gdrive_sync.factories import DriveApiQueryTrackerFactory, DriveFileFactory\n'), ((12175, 12198), 'websites.factories.WebsiteFactory.create', 'WebsiteFactory.create', ([], {}), '()\n', (12196, 12198), False, 'from websites.factories import WebsiteFactory\n'), ((12992, 13015), 'websites.factories.WebsiteFactory.create', 'WebsiteFactory.create', ([], {}), '()\n', (13013, 13015), False, 'from websites.factories import WebsiteFactory\n'), ((13366, 13410), 'gdrive_sync.tasks.import_website_files.delay', 'import_website_files.delay', (['website.short_id'], {}), '(website.short_id)\n', (13392, 13410), False, 'from gdrive_sync.tasks import create_resource_from_gdrive, import_recent_files, import_website_files, transcode_drive_file_video\n'), ((5674, 5720), 'gdrive_sync.tasks.import_recent_files.delay', 'import_recent_files.delay', ([], {'last_dt': 'arg_last_dt'}), '(last_dt=arg_last_dt)\n', (5699, 5720), False, 'from gdrive_sync.tasks import create_resource_from_gdrive, import_recent_files, import_website_files, transcode_drive_file_video\n'), ((8966, 9018), 'pytest.raises', 'pytest.raises', (['mocked_celery.replace_exception_class'], {}), '(mocked_celery.replace_exception_class)\n', (8979, 9018), False, 'import pytest\n'), ((11547, 11599), 'pytest.raises', 'pytest.raises', (['mocked_celery.replace_exception_class'], {}), '(mocked_celery.replace_exception_class)\n', (11560, 11599), False, 'import pytest\n'), ((11609, 11653), 'gdrive_sync.tasks.import_website_files.delay', 'import_website_files.delay', (['website.short_id'], {}), '(website.short_id)\n', (11635, 11653), False, 'from gdrive_sync.tasks import create_resource_from_gdrive, import_recent_files, import_website_files, transcode_drive_file_video\n'), ((12532, 12556), 'pytest.raises', 'pytest.raises', (['Exception'], {}), '(Exception)\n', (12545, 12556), False, 'import pytest\n'), ((12573, 12617), 'gdrive_sync.tasks.import_website_files.delay', 'import_website_files.delay', (['website.short_id'], {}), '(website.short_id)\n', (12599, 12617), False, 'from gdrive_sync.tasks import create_resource_from_gdrive, import_recent_files, import_website_files, transcode_drive_file_video\n'), ((5543, 5595), 'pytest.raises', 'pytest.raises', (['mocked_celery.replace_exception_class'], {}), '(mocked_celery.replace_exception_class)\n', (5556, 5595), False, 'import pytest\n'), ((5609, 5655), 'gdrive_sync.tasks.import_recent_files.delay', 'import_recent_files.delay', ([], {'last_dt': 'arg_last_dt'}), '(last_dt=arg_last_dt)\n', (5634, 5655), False, 'from gdrive_sync.tasks import create_resource_from_gdrive, import_recent_files, import_website_files, transcode_drive_file_video\n'), ((9198, 9227), 'pytest.raises', 'pytest.raises', (['AssertionError'], {}), '(AssertionError)\n', (9211, 9227), False, 'import pytest\n'), ((6469, 6498), 'pytest.raises', 'pytest.raises', (['AssertionError'], {}), '(AssertionError)\n', (6482, 6498), False, 'import pytest\n'), ((6647, 6676), 'pytest.raises', 'pytest.raises', (['AssertionError'], {}), '(AssertionError)\n', (6660, 6676), False, 'import pytest\n'), ((2391, 2434), 'datetime.datetime.strptime', 'datetime.strptime', (['"""2021-01-01"""', '"""%Y-%m-%d"""'], {}), "('2021-01-01', '%Y-%m-%d')\n", (2408, 2434), False, 'from datetime import datetime\n'), ((2524, 2567), 'datetime.datetime.strptime', 'datetime.strptime', (['"""2021-02-02"""', '"""%Y-%m-%d"""'], {}), "('2021-02-02', '%Y-%m-%d')\n", (2541, 2567), False, 'from datetime import datetime\n'), ((7530, 7605), 'gdrive_sync.models.DriveFile.objects.filter', 'DriveFile.objects.filter', ([], {'file_id': "LIST_VIDEO_RESPONSES[i]['files'][0]['id']"}), "(file_id=LIST_VIDEO_RESPONSES[i]['files'][0]['id'])\n", (7554, 7605), False, 'from gdrive_sync.models import DriveFile\n'), ((7674, 7749), 'gdrive_sync.models.DriveFile.objects.filter', 'DriveFile.objects.filter', ([], {'file_id': "LIST_VIDEO_RESPONSES[i]['files'][1]['id']"}), "(file_id=LIST_VIDEO_RESPONSES[i]['files'][1]['id'])\n", (7698, 7749), False, 'from gdrive_sync.models import DriveFile\n'), ((7016, 7115), 'datetime.datetime.strptime', 'datetime.strptime', (["LIST_VIDEO_RESPONSES[0]['files'][0]['modifiedTime']", '"""%Y-%m-%dT%H:%M:%S.%fZ"""'], {}), "(LIST_VIDEO_RESPONSES[0]['files'][0]['modifiedTime'],\n '%Y-%m-%dT%H:%M:%S.%fZ')\n", (7033, 7115), False, 'from datetime import datetime\n'), ((9075, 9118), 'datetime.datetime.strptime', 'datetime.strptime', (['"""2021-01-01"""', '"""%Y-%m-%d"""'], {}), "('2021-01-01', '%Y-%m-%d')\n", (9092, 9118), False, 'from datetime import datetime\n')]
|
# -*- coding: utf-8 -*-
from setuptools import setup
import os
from setuptools import setup, find_packages
import versioneer
long_description = open("README.md").read()
install_requires = []
setup(
name="exdir",
packages=find_packages(),
include_package_data=True,
version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(),
data_files=[
# like `jupyter nbextension install --sys-prefix`
("share/jupyter/nbextensions/exdir", [
"exdir/static/index.js",
]),
# like `jupyter nbextension enable --sys-prefix`
("etc/jupyter/nbconfig/notebook.d", [
"jupyter-config/nbconfig/notebook.d/exdir.json"
]),
# like `jupyter serverextension enable --sys-prefix`
("etc/jupyter/jupyter_notebook_config.d", [
"jupyter-config/jupyter_notebook_config.d/exdir.json"
])
],
zip_safe=False
)
|
[
"versioneer.get_version",
"setuptools.find_packages",
"versioneer.get_cmdclass"
] |
[((234, 249), 'setuptools.find_packages', 'find_packages', ([], {}), '()\n', (247, 249), False, 'from setuptools import setup, find_packages\n'), ((294, 318), 'versioneer.get_version', 'versioneer.get_version', ([], {}), '()\n', (316, 318), False, 'import versioneer\n'), ((333, 358), 'versioneer.get_cmdclass', 'versioneer.get_cmdclass', ([], {}), '()\n', (356, 358), False, 'import versioneer\n')]
|
from discord.ext import commands
from PIL import Image
import discord, datetime, re
import PIL, shutil, os, random
class MineBase:
def __init__(self, member):
self.grid = [[0 for x in range(12)] for y in range(12)]
self.grid_show = [[0 for x in range(12)] for y in range(12)]
self.file = f"cache/mine_{member.id}.png"
self.tile_size = 32
shutil.copy("assets/mine/board.png", self.file)
self.image = Image.open(self.file)
def __del__(self):
self.image.close()
try:
os.remove(self.file)
except:
pass
def generate_grid(self):
for x in range(11):
for y in range(11):
self.grid_show[x][y] = 10
n = random.randint(0,6)
self.grid[x][y] = 9 if n == 0 else 0
for x in range(11):
for y in range(11):
n = 0
if self.grid[x][y] == 9:
continue
if self.grid[x + 1][y] == 9:
n += 1
if self.grid[x][y + 1] == 9:
n += 1
if self.grid[x - 1][y] == 9:
n += 1
if self.grid[x][y - 1] == 9:
n += 1
if self.grid[x + 1][y + 1] == 9:
n += 1
if self.grid[x - 1][y - 1] == 9:
n += 1
if self.grid[x - 1][y + 1] == 9:
n += 1
if self.grid[x + 1][y - 1] == 9:
n += 1
self.grid[x][y] = n
def modify_board(self):
tiles = Image.open("assets/mine/tiles.png")
if not tiles:
return
w = self.tile_size
for x in range(11):
for y in range(11):
# left-top-right-bottom
#print(x, y, self.grid[x][y])
#print((self.grid[x][y] * w, 0, w, w))
tile = tiles.crop((self.grid_show[x][y]*w, 0, self.grid_show[x][y]*w+w, w))
self.image.paste(tile, (24+x*self.tile_size, 24+y*self.tile_size))
self.image.save(self.file)
def translate_move(self, move_code):
n = ''.join(c for c in move_code if c.isdigit()) or None
l = ''.join(c for c in move_code if c.isalpha()) or None
return n, l
class Mine(commands.Cog):
def __init__(self, bot):
print("I'M loaded")
@commands.command()
async def mine(self, context, *, move: str = ""):
test = MineBase(context.author)
test.generate_grid()
test.modify_board()
with open(test.file, "rb") as fp:
await context.send(file=discord.File(fp=fp))
#print(move)
#n, l = test.translate_move(move)
#await context.send(f'-> {n}, {l}')
del test
"""board = [[0 for x in range(12)] for y in range(12)]
#sboard = board
for x in range(11):
for y in range(11):
if random.randint(0,5) == 0:
board[x][y] = 9
else:
board[x][y] = 0
for x in range(11):
for y in range(11):
n = 0
if board[x][y] == 9:
continue
if board[x + 1][y] == 9:
n += 1
if board[x][y + 1] == 9:
n += 1
if board[x - 1][y] == 9:
n += 1
if board[x][y - 1] == 9:
n += 1
if board[x + 1][y + 1] == 9:
n += 1
if board[x - 1][y - 1] == 9:
n += 1
if board[x - 1][y + 1] == 9:
n += 1
if board[x + 1][y - 1] == 9:
n += 1
board[x][y] = n
output = "```"
for x in range(11):
for y in range(11):
output += f"{board[x][y]} "
output += "\n"
output += "```"
await context.send(output)"""
def setup(bot):
bot.add_cog(Mine(bot))
|
[
"os.remove",
"discord.ext.commands.command",
"random.randint",
"discord.File",
"PIL.Image.open",
"shutil.copy"
] |
[((2463, 2481), 'discord.ext.commands.command', 'commands.command', ([], {}), '()\n', (2479, 2481), False, 'from discord.ext import commands\n'), ((385, 432), 'shutil.copy', 'shutil.copy', (['"""assets/mine/board.png"""', 'self.file'], {}), "('assets/mine/board.png', self.file)\n", (396, 432), False, 'import PIL, shutil, os, random\n'), ((454, 475), 'PIL.Image.open', 'Image.open', (['self.file'], {}), '(self.file)\n', (464, 475), False, 'from PIL import Image\n'), ((1657, 1692), 'PIL.Image.open', 'Image.open', (['"""assets/mine/tiles.png"""'], {}), "('assets/mine/tiles.png')\n", (1667, 1692), False, 'from PIL import Image\n'), ((552, 572), 'os.remove', 'os.remove', (['self.file'], {}), '(self.file)\n', (561, 572), False, 'import PIL, shutil, os, random\n'), ((758, 778), 'random.randint', 'random.randint', (['(0)', '(6)'], {}), '(0, 6)\n', (772, 778), False, 'import PIL, shutil, os, random\n'), ((2711, 2730), 'discord.File', 'discord.File', ([], {'fp': 'fp'}), '(fp=fp)\n', (2723, 2730), False, 'import discord, datetime, re\n')]
|
#!/usr/bin/env python
from setuptools import Command, setup
import sys
class PyPandoc(Command):
description = 'Generates the documentation in reStructuredText format.'
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def convert(self, infile, outfile):
import pypandoc
with open(outfile, 'w+') as f:
f.write(pypandoc.convert(infile, 'rst'))
def run(self):
self.convert('README.md', 'rst/README.rst')
self.convert('CHANGELOG.md', 'rst/CHANGELOG.rst')
setup(name='nagios2trac',
version='0.5.1',
description='Let Nagios Create or Comment on Trac Tickets',
long_description=open('rst/README.rst').read() + '\n\n' +
open('rst/CHANGELOG.rst').read(),
author='<NAME>',
author_email='<EMAIL>',
url='https://github.com/Jimdo/nagios2trac',
license='Apache',
scripts=['nagios2trac.py'],
cmdclass={'doc': PyPandoc},
include_package_data=True,
)
|
[
"pypandoc.convert"
] |
[((415, 446), 'pypandoc.convert', 'pypandoc.convert', (['infile', '"""rst"""'], {}), "(infile, 'rst')\n", (431, 446), False, 'import pypandoc\n')]
|
import pytest
from core import WordsRepository
@pytest.mark.parametrize(
"input_words,forget_letters,remaining_words",
[
("urger,ribat,anorn,stram,sofar", "ugo", "ribat,stram")
]
)
def test_forget_letters(input_words, forget_letters, remaining_words):
input_words = tuple(input_words.split(','))
remaining_words = remaining_words.split(',')
word_solver = WordsRepository(input_words)
new_word_solver = word_solver.forget(forget_letters)
assert set(new_word_solver.remaining_words) == set(remaining_words)
@pytest.mark.parametrize(
"input_words",
[
"urger,ribat,anorn,stram,sofar"
]
)
def test_forget_letters_given_no_letters(input_words):
input_words = tuple(input_words.split(','))
word_solver = WordsRepository(input_words)
new_word_solver = word_solver.forget("")
assert set(new_word_solver.remaining_words) == set(input_words)
@pytest.mark.parametrize(
"input_words,remember_positions,remaining_words",
[
("uhuru,morro,frory,fjord", ([3], [], [0], [2]), "fjord,frory")
]
)
def test_remember_letter(input_words, remember_positions, remaining_words):
input_words = tuple(input_words.split(','))
word_solver = WordsRepository(input_words)
remaining_words = remaining_words.split(",")
for word, positions in zip(input_words, remember_positions):
word_solver = word_solver.remember_at(current_word=word, at_positions=positions)
assert set(word_solver.remaining_words) == set(remaining_words)
@pytest.mark.parametrize(
"input_words,not_at_position,remaining_words",
[
("saury,kebab,tunic,hepar,quiff", ([2], [], [3], [], []), "quiff")
]
)
def test_remember_letter_better(input_words, not_at_position, remaining_words):
input_words = tuple(input_words.split(','))
word_solver = WordsRepository(input_words)
remaining_words = remaining_words.split(",")
for current_word, position in zip(input_words, not_at_position):
word_solver = word_solver.remember_not_at(current_word, position)
assert set(word_solver.remaining_words) == set(remaining_words)
# fmt: on
@pytest.mark.parametrize(
"input_words,not_at_positions,at_positions,to_forget,remaining_words",
[
("leese,tunic,benab", ([], []), ([1], []), ("e", "i"), "leese,benab")
]
)
# fmt: off
def test_already_remembered_letters_cant_be_forgotten(
input_words,
not_at_positions,
at_positions,
to_forget,
remaining_words,
):
input_words = tuple(input_words.split(","))
word_solver = WordsRepository(input_words)
remaining_words = tuple(remaining_words.split(","))
new_word_solver = word_solver
for word, not_at, at, forget in zip(input_words, not_at_positions, at_positions, to_forget):
new_word_solver = word_solver.remember_not_at(word, not_at)
new_word_solver = new_word_solver.remember_at(word, at)
new_word_solver = new_word_solver.forget(grey_letters=forget)
assert set(remaining_words) == set(new_word_solver.remaining_words)
@pytest.mark.parametrize(
"input_words,forget_words,remaining_words",
[
("saury,kebab,tunic,hepar,quiff", "kebab,hepar,quiff", "saury,tunic")
]
)
def test_forget_word(input_words, forget_words, remaining_words):
input_words = tuple(input_words.split(","))
forget_words = tuple(forget_words.split(","))
remaining_words = tuple(remaining_words.split(","))
word_solver = WordsRepository(input_words)
new_word_solver = word_solver
for word in forget_words:
new_word_solver = new_word_solver.forget_word(word)
assert set(remaining_words) == set(new_word_solver.remaining_words)
|
[
"pytest.mark.parametrize",
"core.WordsRepository"
] |
[((51, 184), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""input_words,forget_letters,remaining_words"""', "[('urger,ribat,anorn,stram,sofar', 'ugo', 'ribat,stram')]"], {}), "('input_words,forget_letters,remaining_words', [(\n 'urger,ribat,anorn,stram,sofar', 'ugo', 'ribat,stram')])\n", (74, 184), False, 'import pytest\n'), ((551, 624), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""input_words"""', "['urger,ribat,anorn,stram,sofar']"], {}), "('input_words', ['urger,ribat,anorn,stram,sofar'])\n", (574, 624), False, 'import pytest\n'), ((915, 1060), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""input_words,remember_positions,remaining_words"""', "[('uhuru,morro,frory,fjord', ([3], [], [0], [2]), 'fjord,frory')]"], {}), "('input_words,remember_positions,remaining_words', [\n ('uhuru,morro,frory,fjord', ([3], [], [0], [2]), 'fjord,frory')])\n", (938, 1060), False, 'import pytest\n'), ((1525, 1670), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""input_words,not_at_position,remaining_words"""', "[('saury,kebab,tunic,hepar,quiff', ([2], [], [3], [], []), 'quiff')]"], {}), "('input_words,not_at_position,remaining_words', [(\n 'saury,kebab,tunic,hepar,quiff', ([2], [], [3], [], []), 'quiff')])\n", (1548, 1670), False, 'import pytest\n'), ((2138, 2314), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""input_words,not_at_positions,at_positions,to_forget,remaining_words"""', "[('leese,tunic,benab', ([], []), ([1], []), ('e', 'i'), 'leese,benab')]"], {}), "(\n 'input_words,not_at_positions,at_positions,to_forget,remaining_words',\n [('leese,tunic,benab', ([], []), ([1], []), ('e', 'i'), 'leese,benab')])\n", (2161, 2314), False, 'import pytest\n'), ((3071, 3216), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""input_words,forget_words,remaining_words"""', "[('saury,kebab,tunic,hepar,quiff', 'kebab,hepar,quiff', 'saury,tunic')]"], {}), "('input_words,forget_words,remaining_words', [(\n 'saury,kebab,tunic,hepar,quiff', 'kebab,hepar,quiff', 'saury,tunic')])\n", (3094, 3216), False, 'import pytest\n'), ((390, 418), 'core.WordsRepository', 'WordsRepository', (['input_words'], {}), '(input_words)\n', (405, 418), False, 'from core import WordsRepository\n'), ((770, 798), 'core.WordsRepository', 'WordsRepository', (['input_words'], {}), '(input_words)\n', (785, 798), False, 'from core import WordsRepository\n'), ((1222, 1250), 'core.WordsRepository', 'WordsRepository', (['input_words'], {}), '(input_words)\n', (1237, 1250), False, 'from core import WordsRepository\n'), ((1836, 1864), 'core.WordsRepository', 'WordsRepository', (['input_words'], {}), '(input_words)\n', (1851, 1864), False, 'from core import WordsRepository\n'), ((2578, 2606), 'core.WordsRepository', 'WordsRepository', (['input_words'], {}), '(input_words)\n', (2593, 2606), False, 'from core import WordsRepository\n'), ((3474, 3502), 'core.WordsRepository', 'WordsRepository', (['input_words'], {}), '(input_words)\n', (3489, 3502), False, 'from core import WordsRepository\n')]
|
import os
import sys
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from keras.applications.resnet50 import ResNet50
from keras.applications.inception_v3 import InceptionV3
from keras.applications.xception import Xception
# from efficientnet.keras import EfficientNetB3
from keras_preprocessing.image import ImageDataGenerator
from keras.models import Model
from keras.layers import Conv2D, Dense, Flatten, GlobalAveragePooling2D, GlobalMaxPool2D, Dropout, MaxPooling2D
def build_model(model_name=None, include_top=False, input_shape=(256,256,3), fine_tuning=True, layer_to_freeze=None, load_pretrained : str = None, summary=False):
pmodel_name = model_name.strip().lower()
print(pmodel_name)
if pmodel_name == 'resnet50': base_model = ResNet50(include_top=include_top, input_shape=input_shape)
elif pmodel_name == 'inception_v3' : base_model = InceptionV3(include_top=include_top, input_shape=input_shape)
elif pmodel_name == 'xception' : base_model = Xception(include_top=include_top, input_shape=input_shape)
# elif pmodel_name == 'efficient_net' : base_model = EfficientNetB3(include_top=include_top, input_shape=input_shape)
else : raise ValueError
if fine_tuning:
# Freese layers
assert layer_to_freeze != None, 'You must define layer\'s name to freese.'
fr_layer_name = layer_to_freeze
set_trainable = False
for layer in base_model.layers:
if not layer.name == fr_layer_name:
set_trainable = True
layer.trainable = set_trainable
# change last layers
last_1dconv_1 = Conv2D(1024, 1, activation='relu', kernel_initializer='he_normal')(base_model.output)
last_pool_1 = MaxPooling2D(pool_size=(2, 2), strides=(2, 2))(last_1dconv_1)
global_avg_pool = GlobalAveragePooling2D()(last_pool_1)
last_Dense_1 = Dense(512, activation='relu', kernel_initializer='he_normal')(global_avg_pool)
dropout_1 = Dropout(rate=0.5)(last_Dense_1)
last_Dense_2 = Dense(196, activation='softmax')(dropout_1)
# compile
model = Model(base_model.input, last_Dense_2)
# summary
if summary:
model.summary()
# load pretrained weights
if load_pretrained:
model.load_weights(load_pretrained)
return model
|
[
"keras.applications.xception.Xception",
"keras.layers.Dropout",
"keras.models.Model",
"keras.layers.GlobalAveragePooling2D",
"keras.applications.resnet50.ResNet50",
"keras.layers.Dense",
"keras.layers.Conv2D",
"keras.applications.inception_v3.InceptionV3",
"keras.layers.MaxPooling2D"
] |
[((2085, 2122), 'keras.models.Model', 'Model', (['base_model.input', 'last_Dense_2'], {}), '(base_model.input, last_Dense_2)\n', (2090, 2122), False, 'from keras.models import Model\n'), ((774, 832), 'keras.applications.resnet50.ResNet50', 'ResNet50', ([], {'include_top': 'include_top', 'input_shape': 'input_shape'}), '(include_top=include_top, input_shape=input_shape)\n', (782, 832), False, 'from keras.applications.resnet50 import ResNet50\n'), ((1623, 1689), 'keras.layers.Conv2D', 'Conv2D', (['(1024)', '(1)'], {'activation': '"""relu"""', 'kernel_initializer': '"""he_normal"""'}), "(1024, 1, activation='relu', kernel_initializer='he_normal')\n", (1629, 1689), False, 'from keras.layers import Conv2D, Dense, Flatten, GlobalAveragePooling2D, GlobalMaxPool2D, Dropout, MaxPooling2D\n'), ((1727, 1773), 'keras.layers.MaxPooling2D', 'MaxPooling2D', ([], {'pool_size': '(2, 2)', 'strides': '(2, 2)'}), '(pool_size=(2, 2), strides=(2, 2))\n', (1739, 1773), False, 'from keras.layers import Conv2D, Dense, Flatten, GlobalAveragePooling2D, GlobalMaxPool2D, Dropout, MaxPooling2D\n'), ((1811, 1835), 'keras.layers.GlobalAveragePooling2D', 'GlobalAveragePooling2D', ([], {}), '()\n', (1833, 1835), False, 'from keras.layers import Conv2D, Dense, Flatten, GlobalAveragePooling2D, GlobalMaxPool2D, Dropout, MaxPooling2D\n'), ((1868, 1929), 'keras.layers.Dense', 'Dense', (['(512)'], {'activation': '"""relu"""', 'kernel_initializer': '"""he_normal"""'}), "(512, activation='relu', kernel_initializer='he_normal')\n", (1873, 1929), False, 'from keras.layers import Conv2D, Dense, Flatten, GlobalAveragePooling2D, GlobalMaxPool2D, Dropout, MaxPooling2D\n'), ((1963, 1980), 'keras.layers.Dropout', 'Dropout', ([], {'rate': '(0.5)'}), '(rate=0.5)\n', (1970, 1980), False, 'from keras.layers import Conv2D, Dense, Flatten, GlobalAveragePooling2D, GlobalMaxPool2D, Dropout, MaxPooling2D\n'), ((2014, 2046), 'keras.layers.Dense', 'Dense', (['(196)'], {'activation': '"""softmax"""'}), "(196, activation='softmax')\n", (2019, 2046), False, 'from keras.layers import Conv2D, Dense, Flatten, GlobalAveragePooling2D, GlobalMaxPool2D, Dropout, MaxPooling2D\n'), ((887, 948), 'keras.applications.inception_v3.InceptionV3', 'InceptionV3', ([], {'include_top': 'include_top', 'input_shape': 'input_shape'}), '(include_top=include_top, input_shape=input_shape)\n', (898, 948), False, 'from keras.applications.inception_v3 import InceptionV3\n'), ((999, 1057), 'keras.applications.xception.Xception', 'Xception', ([], {'include_top': 'include_top', 'input_shape': 'input_shape'}), '(include_top=include_top, input_shape=input_shape)\n', (1007, 1057), False, 'from keras.applications.xception import Xception\n')]
|
import numpy as np
from copy import copy
from .base import Simplifier
from ... import operations
from ...analyzers import SplitAnalysis
class ConvertBatchNorm(Simplifier):
ANALYSES = {"is_split": SplitAnalysis}
def visit_BatchNormalization(self, operation: operations.BatchNormalization):
input_op = operation.x
if (
isinstance(input_op, operations.Conv)
and not self.analysis["is_split"][input_op]
):
std = np.sqrt(operation.variance + operation.epsilon)
a = operation.scale / std
b = operation.bias - operation.scale * operation.mean / std
weights = input_op.w
a_w = a[:, None, None, None]
weights = a_w * weights
bias = input_op.b
if bias is None:
bias = np.zeros(weights.shape[0], dtype=weights.dtype)
bias = a * bias + b
new_operation = copy(input_op)
new_operation.w = weights
new_operation.b = bias
return new_operation
elif (
isinstance(input_op, operations.Gemm)
and not self.analysis["is_split"][input_op]
):
std = np.sqrt(operation.variance + operation.epsilon)
a = operation.scale / std
b = operation.bias - operation.mean * a
return operations.Gemm(input_op, np.diag(a), b)
elif isinstance(input_op, operations.Input):
input_shape = input_op.shape
input_dtype = input_op.dtype
if len(input_shape) == 2:
std = np.sqrt(operation.variance + operation.epsilon)
a = operation.scale / std
b = operation.bias - operation.mean * a
return operations.Gemm(input_op, np.diag(a), b)
elif len(input_shape) == 4:
c = operation.mean.shape[0]
std = np.sqrt(operation.variance + operation.epsilon)
k = np.zeros(
(c, c, 1, 1), dtype=input_dtype
) # identity kernel (H, W, inC, outC)
for i in range(c):
k[i, i, 0, 0] = 1
W = k * operation.scale / std
b = operation.bias - operation.scale * operation.mean / std
op = operations.Conv(input_op, W, b)
return op
# TODO : in what other scenarios can BatchNorm be converted?
return operation
|
[
"numpy.zeros",
"numpy.diag",
"copy.copy",
"numpy.sqrt"
] |
[((481, 528), 'numpy.sqrt', 'np.sqrt', (['(operation.variance + operation.epsilon)'], {}), '(operation.variance + operation.epsilon)\n', (488, 528), True, 'import numpy as np\n'), ((941, 955), 'copy.copy', 'copy', (['input_op'], {}), '(input_op)\n', (945, 955), False, 'from copy import copy\n'), ((832, 879), 'numpy.zeros', 'np.zeros', (['weights.shape[0]'], {'dtype': 'weights.dtype'}), '(weights.shape[0], dtype=weights.dtype)\n', (840, 879), True, 'import numpy as np\n'), ((1212, 1259), 'numpy.sqrt', 'np.sqrt', (['(operation.variance + operation.epsilon)'], {}), '(operation.variance + operation.epsilon)\n', (1219, 1259), True, 'import numpy as np\n'), ((1395, 1405), 'numpy.diag', 'np.diag', (['a'], {}), '(a)\n', (1402, 1405), True, 'import numpy as np\n'), ((1605, 1652), 'numpy.sqrt', 'np.sqrt', (['(operation.variance + operation.epsilon)'], {}), '(operation.variance + operation.epsilon)\n', (1612, 1652), True, 'import numpy as np\n'), ((1800, 1810), 'numpy.diag', 'np.diag', (['a'], {}), '(a)\n', (1807, 1810), True, 'import numpy as np\n'), ((1921, 1968), 'numpy.sqrt', 'np.sqrt', (['(operation.variance + operation.epsilon)'], {}), '(operation.variance + operation.epsilon)\n', (1928, 1968), True, 'import numpy as np\n'), ((1989, 2030), 'numpy.zeros', 'np.zeros', (['(c, c, 1, 1)'], {'dtype': 'input_dtype'}), '((c, c, 1, 1), dtype=input_dtype)\n', (1997, 2030), True, 'import numpy as np\n')]
|
# Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""
Classifier head and layer factory
Hacked together by / Copyright 2020 <NAME>
"""
from mindspore import nn
from mindspore import ops
def create_pool(pool_type='avg'):
assert pool_type in ["avg", "max"]
if pool_type == 'avg':
global_pool = ops.ReduceMean(keep_dims=False)
elif pool_type == 'max':
global_pool = ops.ReduceMax(keep_dims=False)
return global_pool
class ClassifierHead(nn.Cell):
"""Classifier head w/ configurable global pooling and dropout."""
def __init__(self, in_chs, num_classes, pool_type='avg', drop_rate=0.):
super(ClassifierHead, self).__init__()
self.drop_rate = drop_rate
self.global_pool = create_pool(pool_type)
self.drop_out = nn.Dropout(keep_prob=1 - self.drop_rate) if self.drop_rate > 0. else ops.Identity()
self.fc = nn.Dense(in_chs, num_classes)
def construct(self, x):
x = self.global_pool(x, (2, 3))
x = self.drop_out(x)
x = self.fc(x)
return x
|
[
"mindspore.ops.ReduceMean",
"mindspore.ops.ReduceMax",
"mindspore.nn.Dropout",
"mindspore.ops.Identity",
"mindspore.nn.Dense"
] |
[((927, 958), 'mindspore.ops.ReduceMean', 'ops.ReduceMean', ([], {'keep_dims': '(False)'}), '(keep_dims=False)\n', (941, 958), False, 'from mindspore import ops\n'), ((1502, 1531), 'mindspore.nn.Dense', 'nn.Dense', (['in_chs', 'num_classes'], {}), '(in_chs, num_classes)\n', (1510, 1531), False, 'from mindspore import nn\n'), ((1010, 1040), 'mindspore.ops.ReduceMax', 'ops.ReduceMax', ([], {'keep_dims': '(False)'}), '(keep_dims=False)\n', (1023, 1040), False, 'from mindspore import ops\n'), ((1400, 1440), 'mindspore.nn.Dropout', 'nn.Dropout', ([], {'keep_prob': '(1 - self.drop_rate)'}), '(keep_prob=1 - self.drop_rate)\n', (1410, 1440), False, 'from mindspore import nn\n'), ((1469, 1483), 'mindspore.ops.Identity', 'ops.Identity', ([], {}), '()\n', (1481, 1483), False, 'from mindspore import ops\n')]
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'layouts/base_project_main.ui'
#
# Created by: PyQt4 UI code generator 4.12.1
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName(_fromUtf8("MainWindow"))
MainWindow.resize(792, 600)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(MainWindow.sizePolicy().hasHeightForWidth())
MainWindow.setSizePolicy(sizePolicy)
MainWindow.setMinimumSize(QtCore.QSize(0, 0))
MainWindow.setMaximumSize(QtCore.QSize(16777215, 16777215))
self.centralwidget = QtGui.QWidget(MainWindow)
self.centralwidget.setObjectName(_fromUtf8("centralwidget"))
self.horizontalLayout = QtGui.QHBoxLayout(self.centralwidget)
self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout"))
self.tabWidget = QtGui.QTabWidget(self.centralwidget)
self.tabWidget.setObjectName(_fromUtf8("tabWidget"))
self.tab_EMG = QtGui.QWidget()
self.tab_EMG.setObjectName(_fromUtf8("tab_EMG"))
self.horizontalLayout_2 = QtGui.QHBoxLayout(self.tab_EMG)
self.horizontalLayout_2.setMargin(0)
self.horizontalLayout_2.setObjectName(_fromUtf8("horizontalLayout_2"))
self.verticalLayoutGraphStatus = QtGui.QVBoxLayout()
self.verticalLayoutGraphStatus.setObjectName(_fromUtf8("verticalLayoutGraphStatus"))
self.verticalLayoutGraph = QtGui.QVBoxLayout()
self.verticalLayoutGraph.setObjectName(_fromUtf8("verticalLayoutGraph"))
self.label_replace = QtGui.QLabel(self.tab_EMG)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_replace.sizePolicy().hasHeightForWidth())
self.label_replace.setSizePolicy(sizePolicy)
self.label_replace.setObjectName(_fromUtf8("label_replace"))
self.verticalLayoutGraph.addWidget(self.label_replace)
self.verticalLayoutGraphStatus.addLayout(self.verticalLayoutGraph)
self.horizontalLayout_3 = QtGui.QHBoxLayout()
self.horizontalLayout_3.setContentsMargins(-1, 0, -1, -1)
self.horizontalLayout_3.setObjectName(_fromUtf8("horizontalLayout_3"))
self.lbl_status = QtGui.QLabel(self.tab_EMG)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.lbl_status.sizePolicy().hasHeightForWidth())
self.lbl_status.setSizePolicy(sizePolicy)
self.lbl_status.setObjectName(_fromUtf8("lbl_status"))
self.horizontalLayout_3.addWidget(self.lbl_status)
self.cb_chart_emg_on_off = QtGui.QCheckBox(self.tab_EMG)
self.cb_chart_emg_on_off.setChecked(True)
self.cb_chart_emg_on_off.setObjectName(_fromUtf8("cb_chart_emg_on_off"))
self.horizontalLayout_3.addWidget(self.cb_chart_emg_on_off)
self.verticalLayoutGraphStatus.addLayout(self.horizontalLayout_3)
self.horizontalLayout_2.addLayout(self.verticalLayoutGraphStatus)
self.tabWidget.addTab(self.tab_EMG, _fromUtf8(""))
self.tab_features = QtGui.QWidget()
self.tab_features.setObjectName(_fromUtf8("tab_features"))
self.horizontalLayout_5 = QtGui.QHBoxLayout(self.tab_features)
self.horizontalLayout_5.setMargin(0)
self.horizontalLayout_5.setObjectName(_fromUtf8("horizontalLayout_5"))
self.verticalLayoutGraphStatus_features = QtGui.QVBoxLayout()
self.verticalLayoutGraphStatus_features.setObjectName(_fromUtf8("verticalLayoutGraphStatus_features"))
self.verticalLayoutGraph_features = QtGui.QVBoxLayout()
self.verticalLayoutGraph_features.setObjectName(_fromUtf8("verticalLayoutGraph_features"))
self.horizontalLayout_features = QtGui.QHBoxLayout()
self.horizontalLayout_features.setContentsMargins(-1, 0, -1, -1)
self.horizontalLayout_features.setObjectName(_fromUtf8("horizontalLayout_features"))
self.label_features = QtGui.QLabel(self.tab_features)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_features.sizePolicy().hasHeightForWidth())
self.label_features.setSizePolicy(sizePolicy)
self.label_features.setObjectName(_fromUtf8("label_features"))
self.horizontalLayout_features.addWidget(self.label_features)
self.cb_features = QtGui.QComboBox(self.tab_features)
self.cb_features.setEditable(False)
self.cb_features.setObjectName(_fromUtf8("cb_features"))
self.horizontalLayout_features.addWidget(self.cb_features)
self.checkBox = QtGui.QCheckBox(self.tab_features)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.checkBox.sizePolicy().hasHeightForWidth())
self.checkBox.setSizePolicy(sizePolicy)
self.checkBox.setObjectName(_fromUtf8("checkBox"))
self.horizontalLayout_features.addWidget(self.checkBox)
self.verticalLayoutGraph_features.addLayout(self.horizontalLayout_features)
self.label_replace_features = QtGui.QLabel(self.tab_features)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_replace_features.sizePolicy().hasHeightForWidth())
self.label_replace_features.setSizePolicy(sizePolicy)
self.label_replace_features.setObjectName(_fromUtf8("label_replace_features"))
self.verticalLayoutGraph_features.addWidget(self.label_replace_features)
self.verticalLayoutGraphStatus_features.addLayout(self.verticalLayoutGraph_features)
self.horizontalLayout_status_features = QtGui.QHBoxLayout()
self.horizontalLayout_status_features.setContentsMargins(-1, 0, -1, -1)
self.horizontalLayout_status_features.setObjectName(_fromUtf8("horizontalLayout_status_features"))
self.lbl_status_features = QtGui.QLabel(self.tab_features)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.lbl_status_features.sizePolicy().hasHeightForWidth())
self.lbl_status_features.setSizePolicy(sizePolicy)
self.lbl_status_features.setObjectName(_fromUtf8("lbl_status_features"))
self.horizontalLayout_status_features.addWidget(self.lbl_status_features)
self.cb_chart_features_on_off = QtGui.QCheckBox(self.tab_features)
self.cb_chart_features_on_off.setChecked(True)
self.cb_chart_features_on_off.setObjectName(_fromUtf8("cb_chart_features_on_off"))
self.horizontalLayout_status_features.addWidget(self.cb_chart_features_on_off)
self.verticalLayoutGraphStatus_features.addLayout(self.horizontalLayout_status_features)
self.horizontalLayout_5.addLayout(self.verticalLayoutGraphStatus_features)
self.tabWidget.addTab(self.tab_features, _fromUtf8(""))
self.tab_classification = QtGui.QWidget()
self.tab_classification.setObjectName(_fromUtf8("tab_classification"))
self.horizontalLayout_6 = QtGui.QHBoxLayout(self.tab_classification)
self.horizontalLayout_6.setMargin(0)
self.horizontalLayout_6.setObjectName(_fromUtf8("horizontalLayout_6"))
self.graphicsView_classification = QtGui.QGraphicsView(self.tab_classification)
self.graphicsView_classification.setObjectName(_fromUtf8("graphicsView_classification"))
self.horizontalLayout_6.addWidget(self.graphicsView_classification)
self.tabWidget.addTab(self.tab_classification, _fromUtf8(""))
self.tab_controle_manuel = QtGui.QWidget()
self.tab_controle_manuel.setObjectName(_fromUtf8("tab_controle_manuel"))
self.horizontalLayout_4 = QtGui.QHBoxLayout(self.tab_controle_manuel)
self.horizontalLayout_4.setMargin(0)
self.horizontalLayout_4.setObjectName(_fromUtf8("horizontalLayout_4"))
self.verticalLayout_5 = QtGui.QVBoxLayout()
self.verticalLayout_5.setObjectName(_fromUtf8("verticalLayout_5"))
self.verticalLayout_6 = QtGui.QVBoxLayout()
self.verticalLayout_6.setContentsMargins(-1, 0, -1, 0)
self.verticalLayout_6.setObjectName(_fromUtf8("verticalLayout_6"))
self.groupBox_f1 = QtGui.QGroupBox(self.tab_controle_manuel)
self.groupBox_f1.setObjectName(_fromUtf8("groupBox_f1"))
self.verticalLayout_11 = QtGui.QVBoxLayout(self.groupBox_f1)
self.verticalLayout_11.setObjectName(_fromUtf8("verticalLayout_11"))
self.h_slider_1 = QtGui.QSlider(self.groupBox_f1)
self.h_slider_1.setMaximum(90)
self.h_slider_1.setOrientation(QtCore.Qt.Horizontal)
self.h_slider_1.setObjectName(_fromUtf8("h_slider_1"))
self.verticalLayout_11.addWidget(self.h_slider_1)
self.verticalLayout_6.addWidget(self.groupBox_f1)
self.groupBox_f2 = QtGui.QGroupBox(self.tab_controle_manuel)
self.groupBox_f2.setObjectName(_fromUtf8("groupBox_f2"))
self.verticalLayout_8 = QtGui.QVBoxLayout(self.groupBox_f2)
self.verticalLayout_8.setObjectName(_fromUtf8("verticalLayout_8"))
self.h_slider_2 = QtGui.QSlider(self.groupBox_f2)
self.h_slider_2.setMaximum(90)
self.h_slider_2.setOrientation(QtCore.Qt.Horizontal)
self.h_slider_2.setObjectName(_fromUtf8("h_slider_2"))
self.verticalLayout_8.addWidget(self.h_slider_2)
self.verticalLayout_6.addWidget(self.groupBox_f2)
self.groupBox_f3 = QtGui.QGroupBox(self.tab_controle_manuel)
self.groupBox_f3.setObjectName(_fromUtf8("groupBox_f3"))
self.verticalLayout_9 = QtGui.QVBoxLayout(self.groupBox_f3)
self.verticalLayout_9.setObjectName(_fromUtf8("verticalLayout_9"))
self.h_slider_3 = QtGui.QSlider(self.groupBox_f3)
self.h_slider_3.setMaximum(90)
self.h_slider_3.setOrientation(QtCore.Qt.Horizontal)
self.h_slider_3.setObjectName(_fromUtf8("h_slider_3"))
self.verticalLayout_9.addWidget(self.h_slider_3)
self.verticalLayout_6.addWidget(self.groupBox_f3)
self.groupBox_f4 = QtGui.QGroupBox(self.tab_controle_manuel)
self.groupBox_f4.setObjectName(_fromUtf8("groupBox_f4"))
self.verticalLayout_10 = QtGui.QVBoxLayout(self.groupBox_f4)
self.verticalLayout_10.setObjectName(_fromUtf8("verticalLayout_10"))
self.h_slider_4 = QtGui.QSlider(self.groupBox_f4)
self.h_slider_4.setMaximum(90)
self.h_slider_4.setOrientation(QtCore.Qt.Horizontal)
self.h_slider_4.setObjectName(_fromUtf8("h_slider_4"))
self.verticalLayout_10.addWidget(self.h_slider_4)
self.verticalLayout_6.addWidget(self.groupBox_f4)
self.groupBox_f5 = QtGui.QGroupBox(self.tab_controle_manuel)
self.groupBox_f5.setObjectName(_fromUtf8("groupBox_f5"))
self.verticalLayout_4 = QtGui.QVBoxLayout(self.groupBox_f5)
self.verticalLayout_4.setObjectName(_fromUtf8("verticalLayout_4"))
self.h_slider_5 = QtGui.QSlider(self.groupBox_f5)
self.h_slider_5.setMaximum(90)
self.h_slider_5.setOrientation(QtCore.Qt.Horizontal)
self.h_slider_5.setObjectName(_fromUtf8("h_slider_5"))
self.verticalLayout_4.addWidget(self.h_slider_5)
self.verticalLayout_6.addWidget(self.groupBox_f5)
self.groupBox_4 = QtGui.QGroupBox(self.tab_controle_manuel)
self.groupBox_4.setObjectName(_fromUtf8("groupBox_4"))
self.horizontalLayout_10 = QtGui.QHBoxLayout(self.groupBox_4)
self.horizontalLayout_10.setObjectName(_fromUtf8("horizontalLayout_10"))
self.btn_laser = QtGui.QPushButton(self.groupBox_4)
self.btn_laser.setObjectName(_fromUtf8("btn_laser"))
self.horizontalLayout_10.addWidget(self.btn_laser)
self.btn_reset_position = QtGui.QPushButton(self.groupBox_4)
self.btn_reset_position.setObjectName(_fromUtf8("btn_reset_position"))
self.horizontalLayout_10.addWidget(self.btn_reset_position)
self.btn_close_position = QtGui.QPushButton(self.groupBox_4)
self.btn_close_position.setObjectName(_fromUtf8("btn_close_position"))
self.horizontalLayout_10.addWidget(self.btn_close_position)
self.btn_send_position = QtGui.QPushButton(self.groupBox_4)
self.btn_send_position.setObjectName(_fromUtf8("btn_send_position"))
self.horizontalLayout_10.addWidget(self.btn_send_position)
self.verticalLayout_6.addWidget(self.groupBox_4)
self.groupBox_3 = QtGui.QGroupBox(self.tab_controle_manuel)
self.groupBox_3.setObjectName(_fromUtf8("groupBox_3"))
self.verticalLayout_14 = QtGui.QVBoxLayout(self.groupBox_3)
self.verticalLayout_14.setContentsMargins(-1, -1, -1, 9)
self.verticalLayout_14.setObjectName(_fromUtf8("verticalLayout_14"))
self.horizontalLayout_14 = QtGui.QHBoxLayout()
self.horizontalLayout_14.setContentsMargins(-1, 0, -1, -1)
self.horizontalLayout_14.setObjectName(_fromUtf8("horizontalLayout_14"))
self.pushButton_forhonnor = QtGui.QPushButton(self.groupBox_3)
self.pushButton_forhonnor.setObjectName(_fromUtf8("pushButton_forhonnor"))
self.horizontalLayout_14.addWidget(self.pushButton_forhonnor)
self.pushButton_pointer = QtGui.QPushButton(self.groupBox_3)
self.pushButton_pointer.setObjectName(_fromUtf8("pushButton_pointer"))
self.horizontalLayout_14.addWidget(self.pushButton_pointer)
self.pushButton_spiderman = QtGui.QPushButton(self.groupBox_3)
self.pushButton_spiderman.setObjectName(_fromUtf8("pushButton_spiderman"))
self.horizontalLayout_14.addWidget(self.pushButton_spiderman)
self.pushButton_comein = QtGui.QPushButton(self.groupBox_3)
self.pushButton_comein.setObjectName(_fromUtf8("pushButton_comein"))
self.horizontalLayout_14.addWidget(self.pushButton_comein)
self.verticalLayout_14.addLayout(self.horizontalLayout_14)
self.horizontalLayout_16 = QtGui.QHBoxLayout()
self.horizontalLayout_16.setContentsMargins(-1, -1, -1, 0)
self.horizontalLayout_16.setObjectName(_fromUtf8("horizontalLayout_16"))
self.pushButton_v = QtGui.QPushButton(self.groupBox_3)
self.pushButton_v.setObjectName(_fromUtf8("pushButton_v"))
self.horizontalLayout_16.addWidget(self.pushButton_v)
self.pushButton_extra6 = QtGui.QPushButton(self.groupBox_3)
self.pushButton_extra6.setObjectName(_fromUtf8("pushButton_extra6"))
self.horizontalLayout_16.addWidget(self.pushButton_extra6)
self.pushButton_extra7 = QtGui.QPushButton(self.groupBox_3)
self.pushButton_extra7.setObjectName(_fromUtf8("pushButton_extra7"))
self.horizontalLayout_16.addWidget(self.pushButton_extra7)
self.comboBox_movimentfinal = QtGui.QComboBox(self.groupBox_3)
self.comboBox_movimentfinal.setObjectName(_fromUtf8("comboBox_movimentfinal"))
self.horizontalLayout_16.addWidget(self.comboBox_movimentfinal)
self.verticalLayout_14.addLayout(self.horizontalLayout_16)
self.verticalLayout_6.addWidget(self.groupBox_3)
self.verticalLayout_5.addLayout(self.verticalLayout_6)
self.horizontalLayout_4.addLayout(self.verticalLayout_5)
self.tabWidget.addTab(self.tab_controle_manuel, _fromUtf8(""))
self.tab_2 = QtGui.QWidget()
self.tab_2.setObjectName(_fromUtf8("tab_2"))
self.tabWidget.addTab(self.tab_2, _fromUtf8(""))
self.tab_settings = QtGui.QWidget()
self.tab_settings.setObjectName(_fromUtf8("tab_settings"))
self.verticalLayout_7 = QtGui.QVBoxLayout(self.tab_settings)
self.verticalLayout_7.setMargin(0)
self.verticalLayout_7.setObjectName(_fromUtf8("verticalLayout_7"))
self.horizontalLayout_11 = QtGui.QHBoxLayout()
self.horizontalLayout_11.setContentsMargins(0, -1, -1, -1)
self.horizontalLayout_11.setObjectName(_fromUtf8("horizontalLayout_11"))
self.groupBox_10 = QtGui.QGroupBox(self.tab_settings)
self.groupBox_10.setObjectName(_fromUtf8("groupBox_10"))
self.horizontalLayout_13 = QtGui.QHBoxLayout(self.groupBox_10)
self.horizontalLayout_13.setObjectName(_fromUtf8("horizontalLayout_13"))
self.cb_in_serial_port = QtGui.QComboBox(self.groupBox_10)
self.cb_in_serial_port.setObjectName(_fromUtf8("cb_in_serial_port"))
self.horizontalLayout_13.addWidget(self.cb_in_serial_port)
self.horizontalLayout_11.addWidget(self.groupBox_10)
self.groupBox_8 = QtGui.QGroupBox(self.tab_settings)
self.groupBox_8.setObjectName(_fromUtf8("groupBox_8"))
self.horizontalLayout_12 = QtGui.QHBoxLayout(self.groupBox_8)
self.horizontalLayout_12.setObjectName(_fromUtf8("horizontalLayout_12"))
self.cb_out_serial_port = QtGui.QComboBox(self.groupBox_8)
self.cb_out_serial_port.setObjectName(_fromUtf8("cb_out_serial_port"))
self.horizontalLayout_12.addWidget(self.cb_out_serial_port)
self.horizontalLayout_11.addWidget(self.groupBox_8)
self.verticalLayout_7.addLayout(self.horizontalLayout_11)
spacerItem = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.verticalLayout_7.addItem(spacerItem)
self.btn_go = QtGui.QPushButton(self.tab_settings)
font = QtGui.QFont()
font.setPointSize(29)
self.btn_go.setFont(font)
self.btn_go.setObjectName(_fromUtf8("btn_go"))
self.verticalLayout_7.addWidget(self.btn_go)
spacerItem1 = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.verticalLayout_7.addItem(spacerItem1)
self.pushButton = QtGui.QPushButton(self.tab_settings)
self.pushButton.setObjectName(_fromUtf8("pushButton"))
self.verticalLayout_7.addWidget(self.pushButton)
self.tabWidget.addTab(self.tab_settings, _fromUtf8(""))
self.horizontalLayout.addWidget(self.tabWidget)
MainWindow.setCentralWidget(self.centralwidget)
self.menubar = QtGui.QMenuBar(MainWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 792, 25))
self.menubar.setObjectName(_fromUtf8("menubar"))
self.menuArquivo = QtGui.QMenu(self.menubar)
self.menuArquivo.setObjectName(_fromUtf8("menuArquivo"))
self.menuHelp = QtGui.QMenu(self.menubar)
self.menuHelp.setObjectName(_fromUtf8("menuHelp"))
self.menuFunctions = QtGui.QMenu(self.menubar)
self.menuFunctions.setObjectName(_fromUtf8("menuFunctions"))
MainWindow.setMenuBar(self.menubar)
self.statusbar = QtGui.QStatusBar(MainWindow)
self.statusbar.setObjectName(_fromUtf8("statusbar"))
MainWindow.setStatusBar(self.statusbar)
self.dockWidget = QtGui.QDockWidget(MainWindow)
self.dockWidget.setObjectName(_fromUtf8("dockWidget"))
self.dockWidgetContents = QtGui.QWidget()
self.dockWidgetContents.setObjectName(_fromUtf8("dockWidgetContents"))
self.verticalLayout = QtGui.QVBoxLayout(self.dockWidgetContents)
self.verticalLayout.setMargin(0)
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.verticalLayoutOptions = QtGui.QVBoxLayout()
self.verticalLayoutOptions.setContentsMargins(-1, 0, -1, -1)
self.verticalLayoutOptions.setObjectName(_fromUtf8("verticalLayoutOptions"))
self.lbl_options = QtGui.QLabel(self.dockWidgetContents)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.lbl_options.sizePolicy().hasHeightForWidth())
self.lbl_options.setSizePolicy(sizePolicy)
self.lbl_options.setMaximumSize(QtCore.QSize(16777215, 50))
font = QtGui.QFont()
font.setBold(True)
font.setItalic(False)
font.setWeight(75)
font.setStrikeOut(False)
font.setKerning(True)
self.lbl_options.setFont(font)
self.lbl_options.setAlignment(QtCore.Qt.AlignCenter)
self.lbl_options.setObjectName(_fromUtf8("lbl_options"))
self.verticalLayoutOptions.addWidget(self.lbl_options)
self.verticalLayoutThreshould = QtGui.QVBoxLayout()
self.verticalLayoutThreshould.setObjectName(_fromUtf8("verticalLayoutThreshould"))
self.groupBox = QtGui.QGroupBox(self.dockWidgetContents)
self.groupBox.setObjectName(_fromUtf8("groupBox"))
self.verticalLayout_2 = QtGui.QVBoxLayout(self.groupBox)
self.verticalLayout_2.setObjectName(_fromUtf8("verticalLayout_2"))
self.btn_record_raw_emg = QtGui.QPushButton(self.groupBox)
self.btn_record_raw_emg.setObjectName(_fromUtf8("btn_record_raw_emg"))
self.verticalLayout_2.addWidget(self.btn_record_raw_emg)
self.label_file_name = QtGui.QLabel(self.groupBox)
self.label_file_name.setObjectName(_fromUtf8("label_file_name"))
self.verticalLayout_2.addWidget(self.label_file_name)
self.horizontalLayout_7 = QtGui.QHBoxLayout()
self.horizontalLayout_7.setContentsMargins(-1, 0, -1, -1)
self.horizontalLayout_7.setObjectName(_fromUtf8("horizontalLayout_7"))
self.lbl_output_name = QtGui.QLabel(self.groupBox)
self.lbl_output_name.setObjectName(_fromUtf8("lbl_output_name"))
self.horizontalLayout_7.addWidget(self.lbl_output_name)
self.lbl_output_value = QtGui.QLabel(self.groupBox)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.lbl_output_value.sizePolicy().hasHeightForWidth())
self.lbl_output_value.setSizePolicy(sizePolicy)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.lbl_output_value.setFont(font)
self.lbl_output_value.setObjectName(_fromUtf8("lbl_output_value"))
self.horizontalLayout_7.addWidget(self.lbl_output_value)
self.verticalLayout_2.addLayout(self.horizontalLayout_7)
self.btn_generate_training_file = QtGui.QPushButton(self.groupBox)
self.btn_generate_training_file.setObjectName(_fromUtf8("btn_generate_training_file"))
self.verticalLayout_2.addWidget(self.btn_generate_training_file)
self.btn_load_training_file = QtGui.QPushButton(self.groupBox)
self.btn_load_training_file.setObjectName(_fromUtf8("btn_load_training_file"))
self.verticalLayout_2.addWidget(self.btn_load_training_file)
self.label = QtGui.QLabel(self.groupBox)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.label.setFont(font)
self.label.setObjectName(_fromUtf8("label"))
self.verticalLayout_2.addWidget(self.label)
self.verticalLayoutThreshould.addWidget(self.groupBox)
self.groupBox_2 = QtGui.QGroupBox(self.dockWidgetContents)
self.groupBox_2.setObjectName(_fromUtf8("groupBox_2"))
self.verticalLayout_3 = QtGui.QVBoxLayout(self.groupBox_2)
self.verticalLayout_3.setObjectName(_fromUtf8("verticalLayout_3"))
self.checkBox_simulation = QtGui.QCheckBox(self.groupBox_2)
self.checkBox_simulation.setChecked(False)
self.checkBox_simulation.setObjectName(_fromUtf8("checkBox_simulation"))
self.verticalLayout_3.addWidget(self.checkBox_simulation)
self.checkBox_simple_mode = QtGui.QCheckBox(self.groupBox_2)
self.checkBox_simple_mode.setObjectName(_fromUtf8("checkBox_simple_mode"))
self.verticalLayout_3.addWidget(self.checkBox_simple_mode)
self.horizontalLayout_8 = QtGui.QHBoxLayout()
self.horizontalLayout_8.setContentsMargins(0, 0, -1, -1)
self.horizontalLayout_8.setObjectName(_fromUtf8("horizontalLayout_8"))
self.horizontalSlider_threshold = QtGui.QSlider(self.groupBox_2)
self.horizontalSlider_threshold.setMaximum(250)
self.horizontalSlider_threshold.setProperty("value", 250)
self.horizontalSlider_threshold.setOrientation(QtCore.Qt.Horizontal)
self.horizontalSlider_threshold.setObjectName(_fromUtf8("horizontalSlider_threshold"))
self.horizontalLayout_8.addWidget(self.horizontalSlider_threshold)
self.label_2 = QtGui.QLabel(self.groupBox_2)
self.label_2.setObjectName(_fromUtf8("label_2"))
self.horizontalLayout_8.addWidget(self.label_2)
self.verticalLayout_3.addLayout(self.horizontalLayout_8)
self.verticalLayoutThreshould.addWidget(self.groupBox_2)
self.verticalLayoutCheckBoxes = QtGui.QVBoxLayout()
self.verticalLayoutCheckBoxes.setObjectName(_fromUtf8("verticalLayoutCheckBoxes"))
self.label_channels = QtGui.QLabel(self.dockWidgetContents)
self.label_channels.setObjectName(_fromUtf8("label_channels"))
self.verticalLayoutCheckBoxes.addWidget(self.label_channels)
self.cb_ch1 = QtGui.QCheckBox(self.dockWidgetContents)
self.cb_ch1.setObjectName(_fromUtf8("cb_ch1"))
self.verticalLayoutCheckBoxes.addWidget(self.cb_ch1)
self.cb_ch3 = QtGui.QCheckBox(self.dockWidgetContents)
self.cb_ch3.setObjectName(_fromUtf8("cb_ch3"))
self.verticalLayoutCheckBoxes.addWidget(self.cb_ch3)
self.cb_ch4 = QtGui.QCheckBox(self.dockWidgetContents)
self.cb_ch4.setObjectName(_fromUtf8("cb_ch4"))
self.verticalLayoutCheckBoxes.addWidget(self.cb_ch4)
self.cb_ch2 = QtGui.QCheckBox(self.dockWidgetContents)
self.cb_ch2.setObjectName(_fromUtf8("cb_ch2"))
self.verticalLayoutCheckBoxes.addWidget(self.cb_ch2)
self.verticalLayoutThreshould.addLayout(self.verticalLayoutCheckBoxes)
self.verticalLayoutOptions.addLayout(self.verticalLayoutThreshould)
self.verticalLayout.addLayout(self.verticalLayoutOptions)
self.dockWidget.setWidget(self.dockWidgetContents)
MainWindow.addDockWidget(QtCore.Qt.DockWidgetArea(1), self.dockWidget)
self.actionSimples = QtGui.QAction(MainWindow)
self.actionSimples.setObjectName(_fromUtf8("actionSimples"))
self.actionIn_Plotter = QtGui.QAction(MainWindow)
self.actionIn_Plotter.setObjectName(_fromUtf8("actionIn_Plotter"))
self.actionThread = QtGui.QAction(MainWindow)
self.actionThread.setObjectName(_fromUtf8("actionThread"))
self.actionDesativado = QtGui.QAction(MainWindow)
self.actionDesativado.setObjectName(_fromUtf8("actionDesativado"))
self.actionStartAcquisition = QtGui.QAction(MainWindow)
self.actionStartAcquisition.setCheckable(False)
self.actionStartAcquisition.setObjectName(_fromUtf8("actionStartAcquisition"))
self.actionStart_Recording = QtGui.QAction(MainWindow)
self.actionStart_Recording.setObjectName(_fromUtf8("actionStart_Recording"))
self.actionStop_Recording = QtGui.QAction(MainWindow)
self.actionStop_Recording.setObjectName(_fromUtf8("actionStop_Recording"))
self.actionAbout = QtGui.QAction(MainWindow)
self.actionAbout.setObjectName(_fromUtf8("actionAbout"))
self.actionFind_Serial_Port = QtGui.QAction(MainWindow)
self.actionFind_Serial_Port.setObjectName(_fromUtf8("actionFind_Serial_Port"))
self.menuArquivo.addAction(self.actionStart_Recording)
self.menuArquivo.addAction(self.actionStop_Recording)
self.menuHelp.addAction(self.actionAbout)
self.menuFunctions.addAction(self.actionStartAcquisition)
self.menuFunctions.addSeparator()
self.menuFunctions.addAction(self.actionFind_Serial_Port)
self.menubar.addAction(self.menuArquivo.menuAction())
self.menubar.addAction(self.menuHelp.menuAction())
self.menubar.addAction(self.menuFunctions.menuAction())
self.retranslateUi(MainWindow)
self.tabWidget.setCurrentIndex(3)
self.cb_features.setCurrentIndex(-1)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
MainWindow.setWindowTitle(_translate("MainWindow", "Project Main", None))
self.label_replace.setText(_translate("MainWindow", "Here will be the chart", None))
self.lbl_status.setText(_translate("MainWindow", "Status:", None))
self.cb_chart_emg_on_off.setText(_translate("MainWindow", "Chart On/Off", None))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_EMG), _translate("MainWindow", "EMG", None))
self.label_features.setText(_translate("MainWindow", "Feature:", None))
self.checkBox.setText(_translate("MainWindow", "Visible", None))
self.label_replace_features.setText(_translate("MainWindow", "Here will be another the chart", None))
self.lbl_status_features.setText(_translate("MainWindow", "Status:", None))
self.cb_chart_features_on_off.setText(_translate("MainWindow", "Chart On/Off", None))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_features), _translate("MainWindow", "Features", None))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_classification), _translate("MainWindow", "Classification", None))
self.groupBox_f1.setTitle(_translate("MainWindow", "Finger 1: 0º", None))
self.groupBox_f2.setTitle(_translate("MainWindow", "Finger 2: 0º", None))
self.groupBox_f3.setTitle(_translate("MainWindow", "Finger 3: 0º", None))
self.groupBox_f4.setTitle(_translate("MainWindow", "Finger 4: 0º", None))
self.groupBox_f5.setTitle(_translate("MainWindow", "Finger 5: 0º", None))
self.groupBox_4.setTitle(_translate("MainWindow", "General", None))
self.btn_laser.setText(_translate("MainWindow", "Laser", None))
self.btn_reset_position.setText(_translate("MainWindow", "Open Hand", None))
self.btn_close_position.setText(_translate("MainWindow", "Close Hand", None))
self.btn_send_position.setText(_translate("MainWindow", "Send", None))
self.groupBox_3.setTitle(_translate("MainWindow", "Extra", None))
self.pushButton_forhonnor.setText(_translate("MainWindow", "For Honnor", None))
self.pushButton_pointer.setText(_translate("MainWindow", "Pointer", None))
self.pushButton_spiderman.setText(_translate("MainWindow", "Spider-Man", None))
self.pushButton_comein.setText(_translate("MainWindow", "Come In", None))
self.pushButton_v.setText(_translate("MainWindow", "V", None))
self.pushButton_extra6.setText(_translate("MainWindow", "Extra 6", None))
self.pushButton_extra7.setText(_translate("MainWindow", "Extra 7", None))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_controle_manuel), _translate("MainWindow", "Controle Manuel", None))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_2), _translate("MainWindow", "Processing", None))
self.groupBox_10.setTitle(_translate("MainWindow", "Input Serial Port", None))
self.groupBox_8.setTitle(_translate("MainWindow", "Output Serial Port", None))
self.btn_go.setText(_translate("MainWindow", "Go!", None))
self.pushButton.setText(_translate("MainWindow", "...", None))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_settings), _translate("MainWindow", "General Settings", None))
self.menuArquivo.setTitle(_translate("MainWindow", "File", None))
self.menuHelp.setTitle(_translate("MainWindow", "Help", None))
self.menuFunctions.setTitle(_translate("MainWindow", "Options", None))
self.lbl_options.setText(_translate("MainWindow", "Options", None))
self.groupBox.setTitle(_translate("MainWindow", "Training", None))
self.btn_record_raw_emg.setText(_translate("MainWindow", "Record raw EMG file", None))
self.label_file_name.setText(_translate("MainWindow", "File: None", None))
self.lbl_output_name.setText(_translate("MainWindow", "Output:", None))
self.lbl_output_value.setText(_translate("MainWindow", "None", None))
self.btn_generate_training_file.setText(_translate("MainWindow", "Generate Training File", None))
self.btn_load_training_file.setText(_translate("MainWindow", "Load Training File", None))
self.label.setText(_translate("MainWindow", "Not Trained", None))
self.groupBox_2.setTitle(_translate("MainWindow", "Status", None))
self.checkBox_simulation.setText(_translate("MainWindow", "Using Simulation", None))
self.checkBox_simple_mode.setText(_translate("MainWindow", "Threshold", None))
self.label_2.setText(_translate("MainWindow", "2,5", None))
self.label_channels.setText(_translate("MainWindow", "Channels:", None))
self.cb_ch1.setText(_translate("MainWindow", "CH1", None))
self.cb_ch3.setText(_translate("MainWindow", "CH2", None))
self.cb_ch4.setText(_translate("MainWindow", "CH3", None))
self.cb_ch2.setText(_translate("MainWindow", "CH4", None))
self.actionSimples.setText(_translate("MainWindow", "Processamento", None))
self.actionIn_Plotter.setText(_translate("MainWindow", "In Plotter", None))
self.actionThread.setText(_translate("MainWindow", "Thread", None))
self.actionDesativado.setText(_translate("MainWindow", "Desativado", None))
self.actionStartAcquisition.setText(_translate("MainWindow", "Start Acquisition", None))
self.actionStart_Recording.setText(_translate("MainWindow", "Start Recording", None))
self.actionStop_Recording.setText(_translate("MainWindow", "Stop Recording", None))
self.actionAbout.setText(_translate("MainWindow", "About", None))
self.actionFind_Serial_Port.setText(_translate("MainWindow", "Find Serial Port", None))
|
[
"PyQt4.QtGui.QWidget",
"PyQt4.QtGui.QLabel",
"PyQt4.QtGui.QCheckBox",
"PyQt4.QtGui.QVBoxLayout",
"PyQt4.QtGui.QSizePolicy",
"PyQt4.QtGui.QFont",
"PyQt4.QtGui.QGroupBox",
"PyQt4.QtGui.QApplication.translate",
"PyQt4.QtGui.QSlider",
"PyQt4.QtGui.QMenu",
"PyQt4.QtGui.QDockWidget",
"PyQt4.QtGui.QAction",
"PyQt4.QtCore.QSize",
"PyQt4.QtCore.Qt.DockWidgetArea",
"PyQt4.QtGui.QComboBox",
"PyQt4.QtGui.QPushButton",
"PyQt4.QtCore.QRect",
"PyQt4.QtGui.QHBoxLayout",
"PyQt4.QtGui.QSpacerItem",
"PyQt4.QtCore.QMetaObject.connectSlotsByName",
"PyQt4.QtGui.QStatusBar",
"PyQt4.QtGui.QTabWidget",
"PyQt4.QtGui.QGraphicsView",
"PyQt4.QtGui.QMenuBar"
] |
[((467, 531), 'PyQt4.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['context', 'text', 'disambig', '_encoding'], {}), '(context, text, disambig, _encoding)\n', (495, 531), False, 'from PyQt4 import QtCore, QtGui\n'), ((849, 924), 'PyQt4.QtGui.QSizePolicy', 'QtGui.QSizePolicy', (['QtGui.QSizePolicy.Preferred', 'QtGui.QSizePolicy.Preferred'], {}), '(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Preferred)\n', (866, 924), False, 'from PyQt4 import QtCore, QtGui\n'), ((1287, 1312), 'PyQt4.QtGui.QWidget', 'QtGui.QWidget', (['MainWindow'], {}), '(MainWindow)\n', (1300, 1312), False, 'from PyQt4 import QtCore, QtGui\n'), ((1414, 1451), 'PyQt4.QtGui.QHBoxLayout', 'QtGui.QHBoxLayout', (['self.centralwidget'], {}), '(self.centralwidget)\n', (1431, 1451), False, 'from PyQt4 import QtCore, QtGui\n'), ((1552, 1588), 'PyQt4.QtGui.QTabWidget', 'QtGui.QTabWidget', (['self.centralwidget'], {}), '(self.centralwidget)\n', (1568, 1588), False, 'from PyQt4 import QtCore, QtGui\n'), ((1673, 1688), 'PyQt4.QtGui.QWidget', 'QtGui.QWidget', ([], {}), '()\n', (1686, 1688), False, 'from PyQt4 import QtCore, QtGui\n'), ((1780, 1811), 'PyQt4.QtGui.QHBoxLayout', 'QtGui.QHBoxLayout', (['self.tab_EMG'], {}), '(self.tab_EMG)\n', (1797, 1811), False, 'from PyQt4 import QtCore, QtGui\n'), ((1977, 1996), 'PyQt4.QtGui.QVBoxLayout', 'QtGui.QVBoxLayout', ([], {}), '()\n', (1994, 1996), False, 'from PyQt4 import QtCore, QtGui\n'), ((2125, 2144), 'PyQt4.QtGui.QVBoxLayout', 'QtGui.QVBoxLayout', ([], {}), '()\n', (2142, 2144), False, 'from PyQt4 import QtCore, QtGui\n'), ((2255, 2281), 'PyQt4.QtGui.QLabel', 'QtGui.QLabel', (['self.tab_EMG'], {}), '(self.tab_EMG)\n', (2267, 2281), False, 'from PyQt4 import QtCore, QtGui\n'), ((2303, 2378), 'PyQt4.QtGui.QSizePolicy', 'QtGui.QSizePolicy', (['QtGui.QSizePolicy.Expanding', 'QtGui.QSizePolicy.Expanding'], {}), '(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding)\n', (2320, 2378), False, 'from PyQt4 import QtCore, QtGui\n'), ((2847, 2866), 'PyQt4.QtGui.QHBoxLayout', 'QtGui.QHBoxLayout', ([], {}), '()\n', (2864, 2866), False, 'from PyQt4 import QtCore, QtGui\n'), ((3038, 3064), 'PyQt4.QtGui.QLabel', 'QtGui.QLabel', (['self.tab_EMG'], {}), '(self.tab_EMG)\n', (3050, 3064), False, 'from PyQt4 import QtCore, QtGui\n'), ((3086, 3161), 'PyQt4.QtGui.QSizePolicy', 'QtGui.QSizePolicy', (['QtGui.QSizePolicy.Expanding', 'QtGui.QSizePolicy.Preferred'], {}), '(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Preferred)\n', (3103, 3161), False, 'from PyQt4 import QtCore, QtGui\n'), ((3540, 3569), 'PyQt4.QtGui.QCheckBox', 'QtGui.QCheckBox', (['self.tab_EMG'], {}), '(self.tab_EMG)\n', (3555, 3569), False, 'from PyQt4 import QtCore, QtGui\n'), ((4004, 4019), 'PyQt4.QtGui.QWidget', 'QtGui.QWidget', ([], {}), '()\n', (4017, 4019), False, 'from PyQt4 import QtCore, QtGui\n'), ((4121, 4157), 'PyQt4.QtGui.QHBoxLayout', 'QtGui.QHBoxLayout', (['self.tab_features'], {}), '(self.tab_features)\n', (4138, 4157), False, 'from PyQt4 import QtCore, QtGui\n'), ((4332, 4351), 'PyQt4.QtGui.QVBoxLayout', 'QtGui.QVBoxLayout', ([], {}), '()\n', (4349, 4351), False, 'from PyQt4 import QtCore, QtGui\n'), ((4507, 4526), 'PyQt4.QtGui.QVBoxLayout', 'QtGui.QVBoxLayout', ([], {}), '()\n', (4524, 4526), False, 'from PyQt4 import QtCore, QtGui\n'), ((4667, 4686), 'PyQt4.QtGui.QHBoxLayout', 'QtGui.QHBoxLayout', ([], {}), '()\n', (4684, 4686), False, 'from PyQt4 import QtCore, QtGui\n'), ((4883, 4914), 'PyQt4.QtGui.QLabel', 'QtGui.QLabel', (['self.tab_features'], {}), '(self.tab_features)\n', (4895, 4914), False, 'from PyQt4 import QtCore, QtGui\n'), ((4936, 5007), 'PyQt4.QtGui.QSizePolicy', 'QtGui.QSizePolicy', (['QtGui.QSizePolicy.Fixed', 'QtGui.QSizePolicy.Preferred'], {}), '(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Preferred)\n', (4953, 5007), False, 'from PyQt4 import QtCore, QtGui\n'), ((5405, 5439), 'PyQt4.QtGui.QComboBox', 'QtGui.QComboBox', (['self.tab_features'], {}), '(self.tab_features)\n', (5420, 5439), False, 'from PyQt4 import QtCore, QtGui\n'), ((5640, 5674), 'PyQt4.QtGui.QCheckBox', 'QtGui.QCheckBox', (['self.tab_features'], {}), '(self.tab_features)\n', (5655, 5674), False, 'from PyQt4 import QtCore, QtGui\n'), ((5696, 5763), 'PyQt4.QtGui.QSizePolicy', 'QtGui.QSizePolicy', (['QtGui.QSizePolicy.Fixed', 'QtGui.QSizePolicy.Fixed'], {}), '(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed)\n', (5713, 5763), False, 'from PyQt4 import QtCore, QtGui\n'), ((6226, 6257), 'PyQt4.QtGui.QLabel', 'QtGui.QLabel', (['self.tab_features'], {}), '(self.tab_features)\n', (6238, 6257), False, 'from PyQt4 import QtCore, QtGui\n'), ((6279, 6354), 'PyQt4.QtGui.QSizePolicy', 'QtGui.QSizePolicy', (['QtGui.QSizePolicy.Expanding', 'QtGui.QSizePolicy.Expanding'], {}), '(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding)\n', (6296, 6354), False, 'from PyQt4 import QtCore, QtGui\n'), ((6909, 6928), 'PyQt4.QtGui.QHBoxLayout', 'QtGui.QHBoxLayout', ([], {}), '()\n', (6926, 6928), False, 'from PyQt4 import QtCore, QtGui\n'), ((7151, 7182), 'PyQt4.QtGui.QLabel', 'QtGui.QLabel', (['self.tab_features'], {}), '(self.tab_features)\n', (7163, 7182), False, 'from PyQt4 import QtCore, QtGui\n'), ((7204, 7279), 'PyQt4.QtGui.QSizePolicy', 'QtGui.QSizePolicy', (['QtGui.QSizePolicy.Expanding', 'QtGui.QSizePolicy.Preferred'], {}), '(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Preferred)\n', (7221, 7279), False, 'from PyQt4 import QtCore, QtGui\n'), ((7722, 7756), 'PyQt4.QtGui.QCheckBox', 'QtGui.QCheckBox', (['self.tab_features'], {}), '(self.tab_features)\n', (7737, 7756), False, 'from PyQt4 import QtCore, QtGui\n'), ((8268, 8283), 'PyQt4.QtGui.QWidget', 'QtGui.QWidget', ([], {}), '()\n', (8281, 8283), False, 'from PyQt4 import QtCore, QtGui\n'), ((8397, 8439), 'PyQt4.QtGui.QHBoxLayout', 'QtGui.QHBoxLayout', (['self.tab_classification'], {}), '(self.tab_classification)\n', (8414, 8439), False, 'from PyQt4 import QtCore, QtGui\n'), ((8607, 8651), 'PyQt4.QtGui.QGraphicsView', 'QtGui.QGraphicsView', (['self.tab_classification'], {}), '(self.tab_classification)\n', (8626, 8651), False, 'from PyQt4 import QtCore, QtGui\n'), ((8930, 8945), 'PyQt4.QtGui.QWidget', 'QtGui.QWidget', ([], {}), '()\n', (8943, 8945), False, 'from PyQt4 import QtCore, QtGui\n'), ((9061, 9104), 'PyQt4.QtGui.QHBoxLayout', 'QtGui.QHBoxLayout', (['self.tab_controle_manuel'], {}), '(self.tab_controle_manuel)\n', (9078, 9104), False, 'from PyQt4 import QtCore, QtGui\n'), ((9261, 9280), 'PyQt4.QtGui.QVBoxLayout', 'QtGui.QVBoxLayout', ([], {}), '()\n', (9278, 9280), False, 'from PyQt4 import QtCore, QtGui\n'), ((9388, 9407), 'PyQt4.QtGui.QVBoxLayout', 'QtGui.QVBoxLayout', ([], {}), '()\n', (9405, 9407), False, 'from PyQt4 import QtCore, QtGui\n'), ((9573, 9614), 'PyQt4.QtGui.QGroupBox', 'QtGui.QGroupBox', (['self.tab_controle_manuel'], {}), '(self.tab_controle_manuel)\n', (9588, 9614), False, 'from PyQt4 import QtCore, QtGui\n'), ((9713, 9748), 'PyQt4.QtGui.QVBoxLayout', 'QtGui.QVBoxLayout', (['self.groupBox_f1'], {}), '(self.groupBox_f1)\n', (9730, 9748), False, 'from PyQt4 import QtCore, QtGui\n'), ((9852, 9883), 'PyQt4.QtGui.QSlider', 'QtGui.QSlider', (['self.groupBox_f1'], {}), '(self.groupBox_f1)\n', (9865, 9883), False, 'from PyQt4 import QtCore, QtGui\n'), ((10190, 10231), 'PyQt4.QtGui.QGroupBox', 'QtGui.QGroupBox', (['self.tab_controle_manuel'], {}), '(self.tab_controle_manuel)\n', (10205, 10231), False, 'from PyQt4 import QtCore, QtGui\n'), ((10329, 10364), 'PyQt4.QtGui.QVBoxLayout', 'QtGui.QVBoxLayout', (['self.groupBox_f2'], {}), '(self.groupBox_f2)\n', (10346, 10364), False, 'from PyQt4 import QtCore, QtGui\n'), ((10466, 10497), 'PyQt4.QtGui.QSlider', 'QtGui.QSlider', (['self.groupBox_f2'], {}), '(self.groupBox_f2)\n', (10479, 10497), False, 'from PyQt4 import QtCore, QtGui\n'), ((10803, 10844), 'PyQt4.QtGui.QGroupBox', 'QtGui.QGroupBox', (['self.tab_controle_manuel'], {}), '(self.tab_controle_manuel)\n', (10818, 10844), False, 'from PyQt4 import QtCore, QtGui\n'), ((10942, 10977), 'PyQt4.QtGui.QVBoxLayout', 'QtGui.QVBoxLayout', (['self.groupBox_f3'], {}), '(self.groupBox_f3)\n', (10959, 10977), False, 'from PyQt4 import QtCore, QtGui\n'), ((11079, 11110), 'PyQt4.QtGui.QSlider', 'QtGui.QSlider', (['self.groupBox_f3'], {}), '(self.groupBox_f3)\n', (11092, 11110), False, 'from PyQt4 import QtCore, QtGui\n'), ((11416, 11457), 'PyQt4.QtGui.QGroupBox', 'QtGui.QGroupBox', (['self.tab_controle_manuel'], {}), '(self.tab_controle_manuel)\n', (11431, 11457), False, 'from PyQt4 import QtCore, QtGui\n'), ((11556, 11591), 'PyQt4.QtGui.QVBoxLayout', 'QtGui.QVBoxLayout', (['self.groupBox_f4'], {}), '(self.groupBox_f4)\n', (11573, 11591), False, 'from PyQt4 import QtCore, QtGui\n'), ((11695, 11726), 'PyQt4.QtGui.QSlider', 'QtGui.QSlider', (['self.groupBox_f4'], {}), '(self.groupBox_f4)\n', (11708, 11726), False, 'from PyQt4 import QtCore, QtGui\n'), ((12033, 12074), 'PyQt4.QtGui.QGroupBox', 'QtGui.QGroupBox', (['self.tab_controle_manuel'], {}), '(self.tab_controle_manuel)\n', (12048, 12074), False, 'from PyQt4 import QtCore, QtGui\n'), ((12172, 12207), 'PyQt4.QtGui.QVBoxLayout', 'QtGui.QVBoxLayout', (['self.groupBox_f5'], {}), '(self.groupBox_f5)\n', (12189, 12207), False, 'from PyQt4 import QtCore, QtGui\n'), ((12309, 12340), 'PyQt4.QtGui.QSlider', 'QtGui.QSlider', (['self.groupBox_f5'], {}), '(self.groupBox_f5)\n', (12322, 12340), False, 'from PyQt4 import QtCore, QtGui\n'), ((12645, 12686), 'PyQt4.QtGui.QGroupBox', 'QtGui.QGroupBox', (['self.tab_controle_manuel'], {}), '(self.tab_controle_manuel)\n', (12660, 12686), False, 'from PyQt4 import QtCore, QtGui\n'), ((12785, 12819), 'PyQt4.QtGui.QHBoxLayout', 'QtGui.QHBoxLayout', (['self.groupBox_4'], {}), '(self.groupBox_4)\n', (12802, 12819), False, 'from PyQt4 import QtCore, QtGui\n'), ((12926, 12960), 'PyQt4.QtGui.QPushButton', 'QtGui.QPushButton', (['self.groupBox_4'], {}), '(self.groupBox_4)\n', (12943, 12960), False, 'from PyQt4 import QtCore, QtGui\n'), ((13115, 13149), 'PyQt4.QtGui.QPushButton', 'QtGui.QPushButton', (['self.groupBox_4'], {}), '(self.groupBox_4)\n', (13132, 13149), False, 'from PyQt4 import QtCore, QtGui\n'), ((13331, 13365), 'PyQt4.QtGui.QPushButton', 'QtGui.QPushButton', (['self.groupBox_4'], {}), '(self.groupBox_4)\n', (13348, 13365), False, 'from PyQt4 import QtCore, QtGui\n'), ((13546, 13580), 'PyQt4.QtGui.QPushButton', 'QtGui.QPushButton', (['self.groupBox_4'], {}), '(self.groupBox_4)\n', (13563, 13580), False, 'from PyQt4 import QtCore, QtGui\n'), ((13808, 13849), 'PyQt4.QtGui.QGroupBox', 'QtGui.QGroupBox', (['self.tab_controle_manuel'], {}), '(self.tab_controle_manuel)\n', (13823, 13849), False, 'from PyQt4 import QtCore, QtGui\n'), ((13946, 13980), 'PyQt4.QtGui.QVBoxLayout', 'QtGui.QVBoxLayout', (['self.groupBox_3'], {}), '(self.groupBox_3)\n', (13963, 13980), False, 'from PyQt4 import QtCore, QtGui\n'), ((14158, 14177), 'PyQt4.QtGui.QHBoxLayout', 'QtGui.QHBoxLayout', ([], {}), '()\n', (14175, 14177), False, 'from PyQt4 import QtCore, QtGui\n'), ((14362, 14396), 'PyQt4.QtGui.QPushButton', 'QtGui.QPushButton', (['self.groupBox_3'], {}), '(self.groupBox_3)\n', (14379, 14396), False, 'from PyQt4 import QtCore, QtGui\n'), ((14584, 14618), 'PyQt4.QtGui.QPushButton', 'QtGui.QPushButton', (['self.groupBox_3'], {}), '(self.groupBox_3)\n', (14601, 14618), False, 'from PyQt4 import QtCore, QtGui\n'), ((14802, 14836), 'PyQt4.QtGui.QPushButton', 'QtGui.QPushButton', (['self.groupBox_3'], {}), '(self.groupBox_3)\n', (14819, 14836), False, 'from PyQt4 import QtCore, QtGui\n'), ((15023, 15057), 'PyQt4.QtGui.QPushButton', 'QtGui.QPushButton', (['self.groupBox_3'], {}), '(self.groupBox_3)\n', (15040, 15057), False, 'from PyQt4 import QtCore, QtGui\n'), ((15304, 15323), 'PyQt4.QtGui.QHBoxLayout', 'QtGui.QHBoxLayout', ([], {}), '()\n', (15321, 15323), False, 'from PyQt4 import QtCore, QtGui\n'), ((15500, 15534), 'PyQt4.QtGui.QPushButton', 'QtGui.QPushButton', (['self.groupBox_3'], {}), '(self.groupBox_3)\n', (15517, 15534), False, 'from PyQt4 import QtCore, QtGui\n'), ((15697, 15731), 'PyQt4.QtGui.QPushButton', 'QtGui.QPushButton', (['self.groupBox_3'], {}), '(self.groupBox_3)\n', (15714, 15731), False, 'from PyQt4 import QtCore, QtGui\n'), ((15909, 15943), 'PyQt4.QtGui.QPushButton', 'QtGui.QPushButton', (['self.groupBox_3'], {}), '(self.groupBox_3)\n', (15926, 15943), False, 'from PyQt4 import QtCore, QtGui\n'), ((16126, 16158), 'PyQt4.QtGui.QComboBox', 'QtGui.QComboBox', (['self.groupBox_3'], {}), '(self.groupBox_3)\n', (16141, 16158), False, 'from PyQt4 import QtCore, QtGui\n'), ((16662, 16677), 'PyQt4.QtGui.QWidget', 'QtGui.QWidget', ([], {}), '()\n', (16675, 16677), False, 'from PyQt4 import QtCore, QtGui\n'), ((16816, 16831), 'PyQt4.QtGui.QWidget', 'QtGui.QWidget', ([], {}), '()\n', (16829, 16831), False, 'from PyQt4 import QtCore, QtGui\n'), ((16931, 16967), 'PyQt4.QtGui.QVBoxLayout', 'QtGui.QVBoxLayout', (['self.tab_settings'], {}), '(self.tab_settings)\n', (16948, 16967), False, 'from PyQt4 import QtCore, QtGui\n'), ((17121, 17140), 'PyQt4.QtGui.QHBoxLayout', 'QtGui.QHBoxLayout', ([], {}), '()\n', (17138, 17140), False, 'from PyQt4 import QtCore, QtGui\n'), ((17316, 17350), 'PyQt4.QtGui.QGroupBox', 'QtGui.QGroupBox', (['self.tab_settings'], {}), '(self.tab_settings)\n', (17331, 17350), False, 'from PyQt4 import QtCore, QtGui\n'), ((17451, 17486), 'PyQt4.QtGui.QHBoxLayout', 'QtGui.QHBoxLayout', (['self.groupBox_10'], {}), '(self.groupBox_10)\n', (17468, 17486), False, 'from PyQt4 import QtCore, QtGui\n'), ((17601, 17634), 'PyQt4.QtGui.QComboBox', 'QtGui.QComboBox', (['self.groupBox_10'], {}), '(self.groupBox_10)\n', (17616, 17634), False, 'from PyQt4 import QtCore, QtGui\n'), ((17866, 17900), 'PyQt4.QtGui.QGroupBox', 'QtGui.QGroupBox', (['self.tab_settings'], {}), '(self.tab_settings)\n', (17881, 17900), False, 'from PyQt4 import QtCore, QtGui\n'), ((17999, 18033), 'PyQt4.QtGui.QHBoxLayout', 'QtGui.QHBoxLayout', (['self.groupBox_8'], {}), '(self.groupBox_8)\n', (18016, 18033), False, 'from PyQt4 import QtCore, QtGui\n'), ((18149, 18181), 'PyQt4.QtGui.QComboBox', 'QtGui.QComboBox', (['self.groupBox_8'], {}), '(self.groupBox_8)\n', (18164, 18181), False, 'from PyQt4 import QtCore, QtGui\n'), ((18476, 18562), 'PyQt4.QtGui.QSpacerItem', 'QtGui.QSpacerItem', (['(20)', '(40)', 'QtGui.QSizePolicy.Minimum', 'QtGui.QSizePolicy.Expanding'], {}), '(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.\n Expanding)\n', (18493, 18562), False, 'from PyQt4 import QtCore, QtGui\n'), ((18630, 18666), 'PyQt4.QtGui.QPushButton', 'QtGui.QPushButton', (['self.tab_settings'], {}), '(self.tab_settings)\n', (18647, 18666), False, 'from PyQt4 import QtCore, QtGui\n'), ((18682, 18695), 'PyQt4.QtGui.QFont', 'QtGui.QFont', ([], {}), '()\n', (18693, 18695), False, 'from PyQt4 import QtCore, QtGui\n'), ((18890, 18976), 'PyQt4.QtGui.QSpacerItem', 'QtGui.QSpacerItem', (['(20)', '(40)', 'QtGui.QSizePolicy.Minimum', 'QtGui.QSizePolicy.Expanding'], {}), '(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.\n Expanding)\n', (18907, 18976), False, 'from PyQt4 import QtCore, QtGui\n'), ((19049, 19085), 'PyQt4.QtGui.QPushButton', 'QtGui.QPushButton', (['self.tab_settings'], {}), '(self.tab_settings)\n', (19066, 19085), False, 'from PyQt4 import QtCore, QtGui\n'), ((19405, 19431), 'PyQt4.QtGui.QMenuBar', 'QtGui.QMenuBar', (['MainWindow'], {}), '(MainWindow)\n', (19419, 19431), False, 'from PyQt4 import QtCore, QtGui\n'), ((19578, 19603), 'PyQt4.QtGui.QMenu', 'QtGui.QMenu', (['self.menubar'], {}), '(self.menubar)\n', (19589, 19603), False, 'from PyQt4 import QtCore, QtGui\n'), ((19693, 19718), 'PyQt4.QtGui.QMenu', 'QtGui.QMenu', (['self.menubar'], {}), '(self.menubar)\n', (19704, 19718), False, 'from PyQt4 import QtCore, QtGui\n'), ((19807, 19832), 'PyQt4.QtGui.QMenu', 'QtGui.QMenu', (['self.menubar'], {}), '(self.menubar)\n', (19818, 19832), False, 'from PyQt4 import QtCore, QtGui\n'), ((19971, 19999), 'PyQt4.QtGui.QStatusBar', 'QtGui.QStatusBar', (['MainWindow'], {}), '(MainWindow)\n', (19987, 19999), False, 'from PyQt4 import QtCore, QtGui\n'), ((20135, 20164), 'PyQt4.QtGui.QDockWidget', 'QtGui.QDockWidget', (['MainWindow'], {}), '(MainWindow)\n', (20152, 20164), False, 'from PyQt4 import QtCore, QtGui\n'), ((20262, 20277), 'PyQt4.QtGui.QWidget', 'QtGui.QWidget', ([], {}), '()\n', (20275, 20277), False, 'from PyQt4 import QtCore, QtGui\n'), ((20387, 20429), 'PyQt4.QtGui.QVBoxLayout', 'QtGui.QVBoxLayout', (['self.dockWidgetContents'], {}), '(self.dockWidgetContents)\n', (20404, 20429), False, 'from PyQt4 import QtCore, QtGui\n'), ((20579, 20598), 'PyQt4.QtGui.QVBoxLayout', 'QtGui.QVBoxLayout', ([], {}), '()\n', (20596, 20598), False, 'from PyQt4 import QtCore, QtGui\n'), ((20780, 20817), 'PyQt4.QtGui.QLabel', 'QtGui.QLabel', (['self.dockWidgetContents'], {}), '(self.dockWidgetContents)\n', (20792, 20817), False, 'from PyQt4 import QtCore, QtGui\n'), ((20839, 20914), 'PyQt4.QtGui.QSizePolicy', 'QtGui.QSizePolicy', (['QtGui.QSizePolicy.Preferred', 'QtGui.QSizePolicy.Preferred'], {}), '(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Preferred)\n', (20856, 20914), False, 'from PyQt4 import QtCore, QtGui\n'), ((21221, 21234), 'PyQt4.QtGui.QFont', 'QtGui.QFont', ([], {}), '()\n', (21232, 21234), False, 'from PyQt4 import QtCore, QtGui\n'), ((21650, 21669), 'PyQt4.QtGui.QVBoxLayout', 'QtGui.QVBoxLayout', ([], {}), '()\n', (21667, 21669), False, 'from PyQt4 import QtCore, QtGui\n'), ((21785, 21825), 'PyQt4.QtGui.QGroupBox', 'QtGui.QGroupBox', (['self.dockWidgetContents'], {}), '(self.dockWidgetContents)\n', (21800, 21825), False, 'from PyQt4 import QtCore, QtGui\n'), ((21917, 21949), 'PyQt4.QtGui.QVBoxLayout', 'QtGui.QVBoxLayout', (['self.groupBox'], {}), '(self.groupBox)\n', (21934, 21949), False, 'from PyQt4 import QtCore, QtGui\n'), ((22059, 22091), 'PyQt4.QtGui.QPushButton', 'QtGui.QPushButton', (['self.groupBox'], {}), '(self.groupBox)\n', (22076, 22091), False, 'from PyQt4 import QtCore, QtGui\n'), ((22267, 22294), 'PyQt4.QtGui.QLabel', 'QtGui.QLabel', (['self.groupBox'], {}), '(self.groupBox)\n', (22279, 22294), False, 'from PyQt4 import QtCore, QtGui\n'), ((22464, 22483), 'PyQt4.QtGui.QHBoxLayout', 'QtGui.QHBoxLayout', ([], {}), '()\n', (22481, 22483), False, 'from PyQt4 import QtCore, QtGui\n'), ((22660, 22687), 'PyQt4.QtGui.QLabel', 'QtGui.QLabel', (['self.groupBox'], {}), '(self.groupBox)\n', (22672, 22687), False, 'from PyQt4 import QtCore, QtGui\n'), ((22857, 22884), 'PyQt4.QtGui.QLabel', 'QtGui.QLabel', (['self.groupBox'], {}), '(self.groupBox)\n', (22869, 22884), False, 'from PyQt4 import QtCore, QtGui\n'), ((22906, 22981), 'PyQt4.QtGui.QSizePolicy', 'QtGui.QSizePolicy', (['QtGui.QSizePolicy.Expanding', 'QtGui.QSizePolicy.Preferred'], {}), '(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Preferred)\n', (22923, 22981), False, 'from PyQt4 import QtCore, QtGui\n'), ((23230, 23243), 'PyQt4.QtGui.QFont', 'QtGui.QFont', ([], {}), '()\n', (23241, 23243), False, 'from PyQt4 import QtCore, QtGui\n'), ((23589, 23621), 'PyQt4.QtGui.QPushButton', 'QtGui.QPushButton', (['self.groupBox'], {}), '(self.groupBox)\n', (23606, 23621), False, 'from PyQt4 import QtCore, QtGui\n'), ((23828, 23860), 'PyQt4.QtGui.QPushButton', 'QtGui.QPushButton', (['self.groupBox'], {}), '(self.groupBox)\n', (23845, 23860), False, 'from PyQt4 import QtCore, QtGui\n'), ((24038, 24065), 'PyQt4.QtGui.QLabel', 'QtGui.QLabel', (['self.groupBox'], {}), '(self.groupBox)\n', (24050, 24065), False, 'from PyQt4 import QtCore, QtGui\n'), ((24081, 24094), 'PyQt4.QtGui.QFont', 'QtGui.QFont', ([], {}), '()\n', (24092, 24094), False, 'from PyQt4 import QtCore, QtGui\n'), ((24376, 24416), 'PyQt4.QtGui.QGroupBox', 'QtGui.QGroupBox', (['self.dockWidgetContents'], {}), '(self.dockWidgetContents)\n', (24391, 24416), False, 'from PyQt4 import QtCore, QtGui\n'), ((24512, 24546), 'PyQt4.QtGui.QVBoxLayout', 'QtGui.QVBoxLayout', (['self.groupBox_2'], {}), '(self.groupBox_2)\n', (24529, 24546), False, 'from PyQt4 import QtCore, QtGui\n'), ((24657, 24689), 'PyQt4.QtGui.QCheckBox', 'QtGui.QCheckBox', (['self.groupBox_2'], {}), '(self.groupBox_2)\n', (24672, 24689), False, 'from PyQt4 import QtCore, QtGui\n'), ((24924, 24956), 'PyQt4.QtGui.QCheckBox', 'QtGui.QCheckBox', (['self.groupBox_2'], {}), '(self.groupBox_2)\n', (24939, 24956), False, 'from PyQt4 import QtCore, QtGui\n'), ((25141, 25160), 'PyQt4.QtGui.QHBoxLayout', 'QtGui.QHBoxLayout', ([], {}), '()\n', (25158, 25160), False, 'from PyQt4 import QtCore, QtGui\n'), ((25347, 25377), 'PyQt4.QtGui.QSlider', 'QtGui.QSlider', (['self.groupBox_2'], {}), '(self.groupBox_2)\n', (25360, 25377), False, 'from PyQt4 import QtCore, QtGui\n'), ((25770, 25799), 'PyQt4.QtGui.QLabel', 'QtGui.QLabel', (['self.groupBox_2'], {}), '(self.groupBox_2)\n', (25782, 25799), False, 'from PyQt4 import QtCore, QtGui\n'), ((26083, 26102), 'PyQt4.QtGui.QVBoxLayout', 'QtGui.QVBoxLayout', ([], {}), '()\n', (26100, 26102), False, 'from PyQt4 import QtCore, QtGui\n'), ((26224, 26261), 'PyQt4.QtGui.QLabel', 'QtGui.QLabel', (['self.dockWidgetContents'], {}), '(self.dockWidgetContents)\n', (26236, 26261), False, 'from PyQt4 import QtCore, QtGui\n'), ((26424, 26464), 'PyQt4.QtGui.QCheckBox', 'QtGui.QCheckBox', (['self.dockWidgetContents'], {}), '(self.dockWidgetContents)\n', (26439, 26464), False, 'from PyQt4 import QtCore, QtGui\n'), ((26603, 26643), 'PyQt4.QtGui.QCheckBox', 'QtGui.QCheckBox', (['self.dockWidgetContents'], {}), '(self.dockWidgetContents)\n', (26618, 26643), False, 'from PyQt4 import QtCore, QtGui\n'), ((26782, 26822), 'PyQt4.QtGui.QCheckBox', 'QtGui.QCheckBox', (['self.dockWidgetContents'], {}), '(self.dockWidgetContents)\n', (26797, 26822), False, 'from PyQt4 import QtCore, QtGui\n'), ((26961, 27001), 'PyQt4.QtGui.QCheckBox', 'QtGui.QCheckBox', (['self.dockWidgetContents'], {}), '(self.dockWidgetContents)\n', (26976, 27001), False, 'from PyQt4 import QtCore, QtGui\n'), ((27506, 27531), 'PyQt4.QtGui.QAction', 'QtGui.QAction', (['MainWindow'], {}), '(MainWindow)\n', (27519, 27531), False, 'from PyQt4 import QtCore, QtGui\n'), ((27633, 27658), 'PyQt4.QtGui.QAction', 'QtGui.QAction', (['MainWindow'], {}), '(MainWindow)\n', (27646, 27658), False, 'from PyQt4 import QtCore, QtGui\n'), ((27762, 27787), 'PyQt4.QtGui.QAction', 'QtGui.QAction', (['MainWindow'], {}), '(MainWindow)\n', (27775, 27787), False, 'from PyQt4 import QtCore, QtGui\n'), ((27887, 27912), 'PyQt4.QtGui.QAction', 'QtGui.QAction', (['MainWindow'], {}), '(MainWindow)\n', (27900, 27912), False, 'from PyQt4 import QtCore, QtGui\n'), ((28026, 28051), 'PyQt4.QtGui.QAction', 'QtGui.QAction', (['MainWindow'], {}), '(MainWindow)\n', (28039, 28051), False, 'from PyQt4 import QtCore, QtGui\n'), ((28232, 28257), 'PyQt4.QtGui.QAction', 'QtGui.QAction', (['MainWindow'], {}), '(MainWindow)\n', (28245, 28257), False, 'from PyQt4 import QtCore, QtGui\n'), ((28379, 28404), 'PyQt4.QtGui.QAction', 'QtGui.QAction', (['MainWindow'], {}), '(MainWindow)\n', (28392, 28404), False, 'from PyQt4 import QtCore, QtGui\n'), ((28515, 28540), 'PyQt4.QtGui.QAction', 'QtGui.QAction', (['MainWindow'], {}), '(MainWindow)\n', (28528, 28540), False, 'from PyQt4 import QtCore, QtGui\n'), ((28644, 28669), 'PyQt4.QtGui.QAction', 'QtGui.QAction', (['MainWindow'], {}), '(MainWindow)\n', (28657, 28669), False, 'from PyQt4 import QtCore, QtGui\n'), ((29426, 29475), 'PyQt4.QtCore.QMetaObject.connectSlotsByName', 'QtCore.QMetaObject.connectSlotsByName', (['MainWindow'], {}), '(MainWindow)\n', (29463, 29475), False, 'from PyQt4 import QtCore, QtGui\n'), ((615, 668), 'PyQt4.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['context', 'text', 'disambig'], {}), '(context, text, disambig)\n', (643, 668), False, 'from PyQt4 import QtCore, QtGui\n'), ((1170, 1188), 'PyQt4.QtCore.QSize', 'QtCore.QSize', (['(0)', '(0)'], {}), '(0, 0)\n', (1182, 1188), False, 'from PyQt4 import QtCore, QtGui\n'), ((1224, 1256), 'PyQt4.QtCore.QSize', 'QtCore.QSize', (['(16777215)', '(16777215)'], {}), '(16777215, 16777215)\n', (1236, 1256), False, 'from PyQt4 import QtCore, QtGui\n'), ((19465, 19492), 'PyQt4.QtCore.QRect', 'QtCore.QRect', (['(0)', '(0)', '(792)', '(25)'], {}), '(0, 0, 792, 25)\n', (19477, 19492), False, 'from PyQt4 import QtCore, QtGui\n'), ((21178, 21204), 'PyQt4.QtCore.QSize', 'QtCore.QSize', (['(16777215)', '(50)'], {}), '(16777215, 50)\n', (21190, 21204), False, 'from PyQt4 import QtCore, QtGui\n'), ((27431, 27458), 'PyQt4.QtCore.Qt.DockWidgetArea', 'QtCore.Qt.DockWidgetArea', (['(1)'], {}), '(1)\n', (27455, 27458), False, 'from PyQt4 import QtCore, QtGui\n')]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed Aug 29 14:54:12 2018
@author: maximov
"""
import torch
import torch.nn as nn
import torch.utils.data
from torch.nn import functional as F
from arch.base_network import BaseNetwork
from arch.normalization import get_nonspade_norm_layer
from arch.architecture import ResnetBlock as ResnetBlock
from arch.architecture import SPADEResnetBlock as SPADEResnetBlock
# from base_network import BaseNetwork
# from normalization import get_nonspade_norm_layer
# from architecture import ResnetBlock as ResnetBlock
# from architecture import SPADEResnetBlock as SPADEResnetBlock
class Args():
num_upsampling_layers = 'normal'
ngf = 64
norm_G = 'spectralspadesyncbatch3x3'
semantic_nc = 11
ndf = 64
output_nc = 3
label_nc = 11
no_instance = True
# main architecture. use concatenation
class Generator(nn.Module):
def __init__(self, input_nc=11, num_classes=1200, encode_one_hot = True, img_size=128, **kwargs):
super(Generator, self).__init__()
self.in_dim = input_nc
self.encode_one_hot = encode_one_hot
self.img_size = img_size
opt =Args()
# align the back ground with semantic
self.align_bg_conv = nn.Conv2d(3, input_nc, 3, padding=1)
input_ch = input_nc
# follow SPADE ResNet
if img_size==128:
self.conv0 = SPADEResnetBlock(input_ch, 32, opt)
input_ch = 32
self.conv1 = SPADEResnetBlock(input_ch, 64, opt)
self.conv2 = SPADEResnetBlock(64, 128, opt)
self.conv3 = SPADEResnetBlock(128,256, opt)
self.conv4 = SPADEResnetBlock(256, 256, opt)
self.res1 = ResidualBlock(256)
self.res2 = ResidualBlock(256)
self.res3 = ResidualBlock(256)
self.res4 = ResidualBlock(256)
# embed onehot with image
self.embed = nn.Sequential(
ConvLayer(512, 256, kernel_size=3, stride=1),
nn.InstanceNorm2d(256, affine=True),
)
self.up = nn.Upsample(scale_factor=2)
self.deconv4 = SPADEResnetBlock(256, 256, opt)
self.deconv3 = SPADEResnetBlock(256, 128, opt)
self.deconv2 = SPADEResnetBlock(128, 64, opt)
self.deconv1 = SPADEResnetBlock(64, 32, opt)
if img_size == 128:
self.deconv0 = SPADEResnetBlock(32, 16, opt)
self.conv_end = nn.Sequential(nn.Conv2d(16, 3, kernel_size=3, stride=1, padding=1),)
self.flag_onehot = encode_one_hot
if encode_one_hot:
self.encode_one_hot = nn.Sequential(
nn.Linear(num_classes, 256), nn.LeakyReLU(0.2, inplace=True),
nn.Linear(256, 256), nn.LeakyReLU(0.2, inplace=True),
nn.Linear(256, 256), nn.LeakyReLU(0.2, inplace=True),
nn.Linear(256, 256), nn.LeakyReLU(0.2, inplace=True),
nn.Linear(256, 512), nn.LeakyReLU(0.2, inplace=True),
nn.Linear(512, 1024), nn.LeakyReLU(0.2, inplace=True),
nn.Linear(1024, 2048), nn.LeakyReLU(0.2, inplace=True),
#nn.LeakyReLU(0.2, inplace=True),
)
self.encode_noise = nn.Sequential(
ConvLayer(32, 64, kernel_size=3, stride=1),
nn.LeakyReLU(0.2, inplace=True),
nn.InstanceNorm2d(64, affine=True),
ConvLayer(64, 128, kernel_size=3, stride=1),
nn.LeakyReLU(0.2, inplace=True),
nn.InstanceNorm2d(128, affine=True),
ConvLayer(128, 256, kernel_size=3, stride=1),
nn.LeakyReLU(0.2, inplace=True),
nn.InstanceNorm2d(256, affine=True),
)
else:
self.encode_one_hot = None
def convblock(self, in_ch,out_ch, krn_sz = 3):
block = nn.Sequential(
nn.Conv2d(in_ch, out_ch, kernel_size=krn_sz, stride=1, padding=int(krn_sz/2)),
#nn.BatchNorm2d(out_ch),
nn.LeakyReLU(0.2, inplace=True),
)
return block
def forward(self, seg, bg, onehot=None, high_res=0):
# step 1: encode bg to align semantic
# step 2: encode semantic?
# step 3: extract latent with bg and semantic
# step 4: upsampling
bg = self.align_bg_conv(bg)
# Encode
if self.img_size==128:
out = self.conv0(bg, seg)
# print(out.size(), seg.size())
out = self.conv1(out, seg) # [B, 64, 32, 32]
out = F.avg_pool2d(out, 2)
# seg = F.avg_pool2d(seg, 2)
# print(out.size(), seg.size())
out = self.conv2(out, seg) # [B, 128, 16, 16]
out = F.avg_pool2d(out, 2)
# seg = F.avg_pool2d(seg, 2)
# print(out.size(), seg.size())
out = self.conv3(out, seg) # [B, 256, 8, 8]
out = F.avg_pool2d(out, 2)
# seg = F.avg_pool2d(seg, 2)
# print(out.size(), seg.size())
out = self.conv4(out, seg) # [B, 256, 4, 4]
out = F.avg_pool2d(out, 2)
# seg = F.avg_pool2d(seg, 2)
# print(out.size(), seg.size())
# Embedding
if onehot is not None and self.flag_onehot:
noise = self.encode_one_hot(onehot)
noise = noise.view(-1, 32, 8, 8)
noise = self.encode_noise(noise)
# print(noise.size(), out.size())
out = torch.cat((out, noise), 1)
out = self.embed(out)
# Residual layers
out = self.res1(out)
out = self.res2(out)
out = self.res3(out)
out = self.res4(out)
# Decode
out = self.up(out)
out = self.deconv4(out,seg) # [B, 256, 8, 8]
out = self.up(out)
out = self.deconv3(out,seg) # [B, 128, 16, 16]
out = self.up(out)
out = self.deconv2(out,seg) # [B, 64, 32, 32]
out = self.deconv1(out,seg) # [B, 32, 64, 64]
# print(out.size())
if self.img_size==128:
out = self.deconv0(out, seg)
out = self.up(out)
# print(out.size())
# print(self.img_size, out.size())
out = self.conv_end(out) # [B, 3, 64, 64]
#out = torch.sigmoid(out)
# print(out.size())
return out
class Discriminator(nn.Module):
def __init__(self, input_nc=3, num_classes=1200, img_size=64, **kwargs):
super(Discriminator, self).__init__()
self.img_size = img_size
self.conv1 = ResidualBlockDown(input_nc, 64)
self.conv2 = ResidualBlockDown(64, 128)
self.conv3 = ResidualBlockDown(128, 256)
self.conv4 = ResidualBlockDown(256, 512)
if img_size==128:
self.conv5 = ResidualBlockDown(512, 512)
self.dense0 = nn.Linear(8192, 1024)
self.dense1 = nn.Linear(1024, 1)
def forward(self, x, high_res=0):
out = x # [B, 6, 64, 64]
# Encode
out_0 = (self.conv1(out)) # [B, 64, 32, 32]
out_1 = (self.conv2(out_0)) # [B, 128, 16, 16]
out_3 = (self.conv3(out_1)) # [B, 256, 8, 8]
out = (self.conv4(out_3)) # [B, 512, 4, 4]
if self.img_size==128:
out = (self.conv5(out)) # [B, 512, 4, 4]
out = out.view(out.size(0), -1)
out = F.leaky_relu(self.dense0(out), 0.2, inplace=True)
out = F.leaky_relu(self.dense1(out), 0.2, inplace=True)
return out
# region Residual Blocks
class ResidualBlockDown(nn.Module):
def __init__(self, in_channels, out_channels, kernel_size=3, stride=1, padding=None):
super(ResidualBlockDown, self).__init__()
# Right Side
self.conv_r1 = ConvLayer(in_channels, out_channels, kernel_size, stride, padding)
self.conv_r2 = ConvLayer(out_channels, out_channels, kernel_size, stride, padding)
# Left Side
self.conv_l = ConvLayer(in_channels, out_channels, 1, 1)
def forward(self, x):
residual = x
# Right Side
out = F.relu(x)
out = self.conv_r1(out)
out = F.relu(out)
out = self.conv_r2(out)
out = F.avg_pool2d(out, 2)
# Left Side
residual = self.conv_l(residual)
residual = F.avg_pool2d(residual, 2)
# Merge
out = residual + out
return out
class ResidualBlockUp(nn.Module):
def __init__(self, in_channels, out_channels, kernel_size=3, stride=1, upsample=2):
super(ResidualBlockUp, self).__init__()
# General
self.upsample = nn.Upsample(scale_factor=upsample, mode='nearest')
# Right Side
self.norm_r1 = nn.InstanceNorm2d(in_channels, affine=True)
self.conv_r1 = ConvLayer(in_channels, out_channels, kernel_size, stride)
self.norm_r2 = nn.InstanceNorm2d(out_channels, affine=True)
self.conv_r2 = ConvLayer(out_channels, out_channels, kernel_size, stride)
# Left Side
self.conv_l = ConvLayer(in_channels, out_channels, 1, 1)
def forward(self, x):
residual = x
# Right Side
out = self.norm_r1(x)
out = F.relu(out)
out = self.upsample(out)
out = self.conv_r1(out)
out = self.norm_r2(out)
out = F.relu(out)
out = self.conv_r2(out)
# Left Side
residual = self.upsample(residual)
residual = self.conv_l(residual)
# Merge
out = residual + out
return out
class ResidualBlock(nn.Module):
def __init__(self, channels):
super(ResidualBlock, self).__init__()
self.conv1 = ConvLayer(channels, channels, kernel_size=3, stride=1)
self.in1 = nn.InstanceNorm2d(channels, affine=True)
self.conv2 = ConvLayer(channels, channels, kernel_size=3, stride=1)
self.in2 = nn.InstanceNorm2d(channels, affine=True)
def forward(self, x):
residual = x
out = self.conv1(x)
out = self.in1(out)
out = F.relu(out)
out = self.conv2(out)
out = self.in2(out)
out = out + residual
return out
class ConvLayer(nn.Module):
def __init__(self, in_channels, out_channels, kernel_size, stride, padding=None):
super(ConvLayer, self).__init__()
if padding is None:
padding = kernel_size // 2
self.reflection_pad = nn.ReflectionPad2d(padding)
self.conv2d = nn.utils.spectral_norm(nn.Conv2d(in_channels, out_channels, kernel_size, stride))
def forward(self, x):
out = self.reflection_pad(x)
out = self.conv2d(out)
return out
# endregion
# if __name__ == '__main__':
# gnet = Generator()
# semantic_input = torch.ones(( 2, 11, 128, 128 ))
# bg_input = torch.ones(( 2,3,128,128 ))
# onehot_input = torch.zeros((2, 1200))
# output = gnet(semantic_input, bg_input, onehot_input)
|
[
"torch.nn.ReflectionPad2d",
"torch.nn.functional.avg_pool2d",
"torch.nn.Conv2d",
"torch.nn.InstanceNorm2d",
"torch.cat",
"torch.nn.Upsample",
"arch.architecture.SPADEResnetBlock",
"torch.nn.Linear",
"torch.nn.functional.relu",
"torch.nn.LeakyReLU"
] |
[((1275, 1311), 'torch.nn.Conv2d', 'nn.Conv2d', (['(3)', 'input_nc', '(3)'], {'padding': '(1)'}), '(3, input_nc, 3, padding=1)\n', (1284, 1311), True, 'import torch.nn as nn\n'), ((1506, 1541), 'arch.architecture.SPADEResnetBlock', 'SPADEResnetBlock', (['input_ch', '(64)', 'opt'], {}), '(input_ch, 64, opt)\n', (1522, 1541), True, 'from arch.architecture import SPADEResnetBlock as SPADEResnetBlock\n'), ((1563, 1593), 'arch.architecture.SPADEResnetBlock', 'SPADEResnetBlock', (['(64)', '(128)', 'opt'], {}), '(64, 128, opt)\n', (1579, 1593), True, 'from arch.architecture import SPADEResnetBlock as SPADEResnetBlock\n'), ((1615, 1646), 'arch.architecture.SPADEResnetBlock', 'SPADEResnetBlock', (['(128)', '(256)', 'opt'], {}), '(128, 256, opt)\n', (1631, 1646), True, 'from arch.architecture import SPADEResnetBlock as SPADEResnetBlock\n'), ((1667, 1698), 'arch.architecture.SPADEResnetBlock', 'SPADEResnetBlock', (['(256)', '(256)', 'opt'], {}), '(256, 256, opt)\n', (1683, 1698), True, 'from arch.architecture import SPADEResnetBlock as SPADEResnetBlock\n'), ((2063, 2090), 'torch.nn.Upsample', 'nn.Upsample', ([], {'scale_factor': '(2)'}), '(scale_factor=2)\n', (2074, 2090), True, 'import torch.nn as nn\n'), ((2114, 2145), 'arch.architecture.SPADEResnetBlock', 'SPADEResnetBlock', (['(256)', '(256)', 'opt'], {}), '(256, 256, opt)\n', (2130, 2145), True, 'from arch.architecture import SPADEResnetBlock as SPADEResnetBlock\n'), ((2169, 2200), 'arch.architecture.SPADEResnetBlock', 'SPADEResnetBlock', (['(256)', '(128)', 'opt'], {}), '(256, 128, opt)\n', (2185, 2200), True, 'from arch.architecture import SPADEResnetBlock as SPADEResnetBlock\n'), ((2224, 2254), 'arch.architecture.SPADEResnetBlock', 'SPADEResnetBlock', (['(128)', '(64)', 'opt'], {}), '(128, 64, opt)\n', (2240, 2254), True, 'from arch.architecture import SPADEResnetBlock as SPADEResnetBlock\n'), ((2278, 2307), 'arch.architecture.SPADEResnetBlock', 'SPADEResnetBlock', (['(64)', '(32)', 'opt'], {}), '(64, 32, opt)\n', (2294, 2307), True, 'from arch.architecture import SPADEResnetBlock as SPADEResnetBlock\n'), ((4552, 4572), 'torch.nn.functional.avg_pool2d', 'F.avg_pool2d', (['out', '(2)'], {}), '(out, 2)\n', (4564, 4572), True, 'from torch.nn import functional as F\n'), ((4721, 4741), 'torch.nn.functional.avg_pool2d', 'F.avg_pool2d', (['out', '(2)'], {}), '(out, 2)\n', (4733, 4741), True, 'from torch.nn import functional as F\n'), ((4888, 4908), 'torch.nn.functional.avg_pool2d', 'F.avg_pool2d', (['out', '(2)'], {}), '(out, 2)\n', (4900, 4908), True, 'from torch.nn import functional as F\n'), ((5055, 5075), 'torch.nn.functional.avg_pool2d', 'F.avg_pool2d', (['out', '(2)'], {}), '(out, 2)\n', (5067, 5075), True, 'from torch.nn import functional as F\n'), ((6782, 6803), 'torch.nn.Linear', 'nn.Linear', (['(8192)', '(1024)'], {}), '(8192, 1024)\n', (6791, 6803), True, 'import torch.nn as nn\n'), ((6826, 6844), 'torch.nn.Linear', 'nn.Linear', (['(1024)', '(1)'], {}), '(1024, 1)\n', (6835, 6844), True, 'import torch.nn as nn\n'), ((7999, 8008), 'torch.nn.functional.relu', 'F.relu', (['x'], {}), '(x)\n', (8005, 8008), True, 'from torch.nn import functional as F\n'), ((8055, 8066), 'torch.nn.functional.relu', 'F.relu', (['out'], {}), '(out)\n', (8061, 8066), True, 'from torch.nn import functional as F\n'), ((8113, 8133), 'torch.nn.functional.avg_pool2d', 'F.avg_pool2d', (['out', '(2)'], {}), '(out, 2)\n', (8125, 8133), True, 'from torch.nn import functional as F\n'), ((8215, 8240), 'torch.nn.functional.avg_pool2d', 'F.avg_pool2d', (['residual', '(2)'], {}), '(residual, 2)\n', (8227, 8240), True, 'from torch.nn import functional as F\n'), ((8521, 8571), 'torch.nn.Upsample', 'nn.Upsample', ([], {'scale_factor': 'upsample', 'mode': '"""nearest"""'}), "(scale_factor=upsample, mode='nearest')\n", (8532, 8571), True, 'import torch.nn as nn\n'), ((8617, 8660), 'torch.nn.InstanceNorm2d', 'nn.InstanceNorm2d', (['in_channels'], {'affine': '(True)'}), '(in_channels, affine=True)\n', (8634, 8660), True, 'import torch.nn as nn\n'), ((8766, 8810), 'torch.nn.InstanceNorm2d', 'nn.InstanceNorm2d', (['out_channels'], {'affine': '(True)'}), '(out_channels, affine=True)\n', (8783, 8810), True, 'import torch.nn as nn\n'), ((9093, 9104), 'torch.nn.functional.relu', 'F.relu', (['out'], {}), '(out)\n', (9099, 9104), True, 'from torch.nn import functional as F\n'), ((9216, 9227), 'torch.nn.functional.relu', 'F.relu', (['out'], {}), '(out)\n', (9222, 9227), True, 'from torch.nn import functional as F\n'), ((9639, 9679), 'torch.nn.InstanceNorm2d', 'nn.InstanceNorm2d', (['channels'], {'affine': '(True)'}), '(channels, affine=True)\n', (9656, 9679), True, 'import torch.nn as nn\n'), ((9775, 9815), 'torch.nn.InstanceNorm2d', 'nn.InstanceNorm2d', (['channels'], {'affine': '(True)'}), '(channels, affine=True)\n', (9792, 9815), True, 'import torch.nn as nn\n'), ((9935, 9946), 'torch.nn.functional.relu', 'F.relu', (['out'], {}), '(out)\n', (9941, 9946), True, 'from torch.nn import functional as F\n'), ((10309, 10336), 'torch.nn.ReflectionPad2d', 'nn.ReflectionPad2d', (['padding'], {}), '(padding)\n', (10327, 10336), True, 'import torch.nn as nn\n'), ((1421, 1456), 'arch.architecture.SPADEResnetBlock', 'SPADEResnetBlock', (['input_ch', '(32)', 'opt'], {}), '(input_ch, 32, opt)\n', (1437, 1456), True, 'from arch.architecture import SPADEResnetBlock as SPADEResnetBlock\n'), ((1997, 2032), 'torch.nn.InstanceNorm2d', 'nn.InstanceNorm2d', (['(256)'], {'affine': '(True)'}), '(256, affine=True)\n', (2014, 2032), True, 'import torch.nn as nn\n'), ((2373, 2402), 'arch.architecture.SPADEResnetBlock', 'SPADEResnetBlock', (['(32)', '(16)', 'opt'], {}), '(32, 16, opt)\n', (2389, 2402), True, 'from arch.architecture import SPADEResnetBlock as SPADEResnetBlock\n'), ((2442, 2494), 'torch.nn.Conv2d', 'nn.Conv2d', (['(16)', '(3)'], {'kernel_size': '(3)', 'stride': '(1)', 'padding': '(1)'}), '(16, 3, kernel_size=3, stride=1, padding=1)\n', (2451, 2494), True, 'import torch.nn as nn\n'), ((4007, 4038), 'torch.nn.LeakyReLU', 'nn.LeakyReLU', (['(0.2)'], {'inplace': '(True)'}), '(0.2, inplace=True)\n', (4019, 4038), True, 'import torch.nn as nn\n'), ((5429, 5455), 'torch.cat', 'torch.cat', (['(out, noise)', '(1)'], {}), '((out, noise), 1)\n', (5438, 5455), False, 'import torch\n'), ((10382, 10439), 'torch.nn.Conv2d', 'nn.Conv2d', (['in_channels', 'out_channels', 'kernel_size', 'stride'], {}), '(in_channels, out_channels, kernel_size, stride)\n', (10391, 10439), True, 'import torch.nn as nn\n'), ((2632, 2659), 'torch.nn.Linear', 'nn.Linear', (['num_classes', '(256)'], {}), '(num_classes, 256)\n', (2641, 2659), True, 'import torch.nn as nn\n'), ((2661, 2692), 'torch.nn.LeakyReLU', 'nn.LeakyReLU', (['(0.2)'], {'inplace': '(True)'}), '(0.2, inplace=True)\n', (2673, 2692), True, 'import torch.nn as nn\n'), ((2710, 2729), 'torch.nn.Linear', 'nn.Linear', (['(256)', '(256)'], {}), '(256, 256)\n', (2719, 2729), True, 'import torch.nn as nn\n'), ((2731, 2762), 'torch.nn.LeakyReLU', 'nn.LeakyReLU', (['(0.2)'], {'inplace': '(True)'}), '(0.2, inplace=True)\n', (2743, 2762), True, 'import torch.nn as nn\n'), ((2780, 2799), 'torch.nn.Linear', 'nn.Linear', (['(256)', '(256)'], {}), '(256, 256)\n', (2789, 2799), True, 'import torch.nn as nn\n'), ((2801, 2832), 'torch.nn.LeakyReLU', 'nn.LeakyReLU', (['(0.2)'], {'inplace': '(True)'}), '(0.2, inplace=True)\n', (2813, 2832), True, 'import torch.nn as nn\n'), ((2850, 2869), 'torch.nn.Linear', 'nn.Linear', (['(256)', '(256)'], {}), '(256, 256)\n', (2859, 2869), True, 'import torch.nn as nn\n'), ((2871, 2902), 'torch.nn.LeakyReLU', 'nn.LeakyReLU', (['(0.2)'], {'inplace': '(True)'}), '(0.2, inplace=True)\n', (2883, 2902), True, 'import torch.nn as nn\n'), ((2920, 2939), 'torch.nn.Linear', 'nn.Linear', (['(256)', '(512)'], {}), '(256, 512)\n', (2929, 2939), True, 'import torch.nn as nn\n'), ((2941, 2972), 'torch.nn.LeakyReLU', 'nn.LeakyReLU', (['(0.2)'], {'inplace': '(True)'}), '(0.2, inplace=True)\n', (2953, 2972), True, 'import torch.nn as nn\n'), ((2990, 3010), 'torch.nn.Linear', 'nn.Linear', (['(512)', '(1024)'], {}), '(512, 1024)\n', (2999, 3010), True, 'import torch.nn as nn\n'), ((3012, 3043), 'torch.nn.LeakyReLU', 'nn.LeakyReLU', (['(0.2)'], {'inplace': '(True)'}), '(0.2, inplace=True)\n', (3024, 3043), True, 'import torch.nn as nn\n'), ((3061, 3082), 'torch.nn.Linear', 'nn.Linear', (['(1024)', '(2048)'], {}), '(1024, 2048)\n', (3070, 3082), True, 'import torch.nn as nn\n'), ((3084, 3115), 'torch.nn.LeakyReLU', 'nn.LeakyReLU', (['(0.2)'], {'inplace': '(True)'}), '(0.2, inplace=True)\n', (3096, 3115), True, 'import torch.nn as nn\n'), ((3304, 3335), 'torch.nn.LeakyReLU', 'nn.LeakyReLU', (['(0.2)'], {'inplace': '(True)'}), '(0.2, inplace=True)\n', (3316, 3335), True, 'import torch.nn as nn\n'), ((3353, 3387), 'torch.nn.InstanceNorm2d', 'nn.InstanceNorm2d', (['(64)'], {'affine': '(True)'}), '(64, affine=True)\n', (3370, 3387), True, 'import torch.nn as nn\n'), ((3466, 3497), 'torch.nn.LeakyReLU', 'nn.LeakyReLU', (['(0.2)'], {'inplace': '(True)'}), '(0.2, inplace=True)\n', (3478, 3497), True, 'import torch.nn as nn\n'), ((3515, 3550), 'torch.nn.InstanceNorm2d', 'nn.InstanceNorm2d', (['(128)'], {'affine': '(True)'}), '(128, affine=True)\n', (3532, 3550), True, 'import torch.nn as nn\n'), ((3630, 3661), 'torch.nn.LeakyReLU', 'nn.LeakyReLU', (['(0.2)'], {'inplace': '(True)'}), '(0.2, inplace=True)\n', (3642, 3661), True, 'import torch.nn as nn\n'), ((3679, 3714), 'torch.nn.InstanceNorm2d', 'nn.InstanceNorm2d', (['(256)'], {'affine': '(True)'}), '(256, affine=True)\n', (3696, 3714), True, 'import torch.nn as nn\n')]
|
import logging
from pkg.compiler import compile_template
logger = logging.getLogger()
logger.setLevel(logging.INFO)
def lambda_handler(event: dict, context: object) -> dict:
# event = {
# "requestId": "1234567890",
# "fragment": {...}
# }
ret = event.copy()
try:
ret["fragment"] = compile_template(event["fragment"])
ret["status"] = "success"
except Exception as e:
# https://stackoverflow.com/questions/55190232/aws-cloudformation-transform-how-do-i-properly-return-an-error-message
logger.exception("failed: ")
ret["status"] = "failure"
ret["errorMessage"] = str(e)
finally:
return ret
|
[
"pkg.compiler.compile_template",
"logging.getLogger"
] |
[((68, 87), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (85, 87), False, 'import logging\n'), ((323, 358), 'pkg.compiler.compile_template', 'compile_template', (["event['fragment']"], {}), "(event['fragment'])\n", (339, 358), False, 'from pkg.compiler import compile_template\n')]
|
import tensorflow as tf
import numpy as np
import chess
#load the saved model
model=tf.keras.models.load_model('openlock_model')
#rest explained in nntest.py
probmodel=tf.keras.Sequential([
model,
tf.keras.layers.Softmax()
])
PieceNum={'p':'0','n':'1','b':'2','r':'3','q':'4','k':'5','.':'6'}
def numreprgen(repres):
splted=[repres[0:8],repres[8:16],repres[16:24],repres[24:32],repres[32:40],repres[40:48],repres[48:56],repres[56:64]]
numsplted=[]
for j in splted:
toappend=[]
for k in j:
toappend.append(PieceNum[k.lower()])
numsplted.append(toappend)
for j in range(len(numsplted)):
for k in range(8):
numsplted[j][k]=int(numsplted[j][k])/6.0
return numsplted
def reprgener(fen):
brd=chess.Board()
brd.set_fen(fen)
bb=chess.BaseBoard()
bb.set_board_fen(brd.board_fen())
pcmap=bb.piece_map()
repres=[]
for i in range(64):
if i in pcmap:
repres.append(pcmap[i].symbol())
else:
repres.append('.')
strrepres=''.join([elem for elem in repres])
return strrepres
testfen='r4r1k/p5p1/1pRq2np/5p2/7P/P4BP1/1P2QP2/2K1R3 b - - 0 1'
probs=probmodel(np.array([numreprgen(reprgener(testfen))]))
print(np.argmax(probs))
print(probs)
|
[
"tensorflow.keras.models.load_model",
"numpy.argmax",
"tensorflow.keras.layers.Softmax",
"chess.Board",
"chess.BaseBoard"
] |
[((85, 129), 'tensorflow.keras.models.load_model', 'tf.keras.models.load_model', (['"""openlock_model"""'], {}), "('openlock_model')\n", (111, 129), True, 'import tensorflow as tf\n'), ((778, 791), 'chess.Board', 'chess.Board', ([], {}), '()\n', (789, 791), False, 'import chess\n'), ((820, 837), 'chess.BaseBoard', 'chess.BaseBoard', ([], {}), '()\n', (835, 837), False, 'import chess\n'), ((1259, 1275), 'numpy.argmax', 'np.argmax', (['probs'], {}), '(probs)\n', (1268, 1275), True, 'import numpy as np\n'), ((203, 228), 'tensorflow.keras.layers.Softmax', 'tf.keras.layers.Softmax', ([], {}), '()\n', (226, 228), True, 'import tensorflow as tf\n')]
|
import json
import functools
from os import path, mkdir, getcwd
from flask import Flask, request
from flask_sqlalchemy import SQLAlchemy
from sqlalchemy import TypeDecorator, Unicode
from sqlalchemy_media import Image, ImageValidator, ImageProcessor, ImageAnalyzer, StoreManager, \
FileSystemStore
from sqlalchemy_media.constants import MB, KB
WORKING_DIR = path.abspath(getcwd())
TEMP_PATH = path.join(WORKING_DIR, 'static', 'avatars')
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///demo.db'
db = SQLAlchemy(app)
StoreManager.register(
'fs',
functools.partial(FileSystemStore, TEMP_PATH, 'http://localhost:5000/static/avatars'),
default=True
)
class MasterPageView(object):
header = '<!DOCTYPE html><head><meta charset="utf-8"><title>%s</title></head><body>'
footer = '</body>'
def __init__(self, title='demo', body=''):
self.title = title
self.body = body
def __str__(self):
return (self.header % self.title) + self.body + self.footer
def __iadd__(self, other):
self.body += other if isinstance(other, str) else str(other)
return self
def __iter__(self):
return iter(str(self).splitlines())
class Json(TypeDecorator):
impl = Unicode
def process_bind_param(self, value, engine):
return json.dumps(value)
def process_result_value(self, value, engine):
if value is None:
return None
return json.loads(value)
class Avatar(Image):
__auto_coercion__ = True
__pre_processors__ = [
ImageAnalyzer(),
ImageValidator(
minimum=(10, 10),
maximum=(3840, 3840),
content_types=('image/jpeg', 'image/png', 'image/gif'),
min_aspect_ratio=1,
max_aspect_ratio=1
),
ImageProcessor(fmt='jpeg', width=128)
]
__max_length__ = 6*MB
__min_length__ = 10*KB
class Person(db.Model):
__tablename__ = 'person'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(Unicode)
avatar = db.Column(Avatar.as_mutable(Json))
@app.errorhandler(500)
def internal_error(exception):
app.logger.error(exception)
return "500"
@app.route("/", methods=['GET', 'POST'])
def index():
page = MasterPageView('Index')
page += '<form method="POST" action="/" enctype="multipart/form-data">'
page += '<input type="text" name="name" value="Your Name here"/>'
page += '<input type="file" name="avatar" />'
page += '<input type="submit" />'
page += '</form>'
page += '<hr />'
with StoreManager(db.session()):
if request.method == 'POST':
new_person = Person(name=request.form['name'], avatar=request.files['avatar'])
db.session.add(new_person)
db.session.commit()
page += '<ul>'
for p in db.session.query(Person):
page += '<li>'
page += '<img src="%s" alt="%s">' % (p.avatar.locate(), p.name)
page += '<h2>%s</h2>' % p.name
page += '<h2>ID: %s</h2>' % p.id
page += '</li>'
page += '</ul>'
return str(page)
if __name__ == "__main__":
if not path.exists(TEMP_PATH):
mkdir(TEMP_PATH)
db.create_all()
app.run()
|
[
"functools.partial",
"os.mkdir",
"json.loads",
"os.getcwd",
"flask.Flask",
"os.path.exists",
"json.dumps",
"flask_sqlalchemy.SQLAlchemy",
"sqlalchemy_media.ImageProcessor",
"sqlalchemy_media.ImageAnalyzer",
"sqlalchemy_media.ImageValidator",
"os.path.join"
] |
[((401, 444), 'os.path.join', 'path.join', (['WORKING_DIR', '"""static"""', '"""avatars"""'], {}), "(WORKING_DIR, 'static', 'avatars')\n", (410, 444), False, 'from os import path, mkdir, getcwd\n'), ((452, 467), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (457, 467), False, 'from flask import Flask, request\n'), ((533, 548), 'flask_sqlalchemy.SQLAlchemy', 'SQLAlchemy', (['app'], {}), '(app)\n', (543, 548), False, 'from flask_sqlalchemy import SQLAlchemy\n'), ((379, 387), 'os.getcwd', 'getcwd', ([], {}), '()\n', (385, 387), False, 'from os import path, mkdir, getcwd\n'), ((588, 677), 'functools.partial', 'functools.partial', (['FileSystemStore', 'TEMP_PATH', '"""http://localhost:5000/static/avatars"""'], {}), "(FileSystemStore, TEMP_PATH,\n 'http://localhost:5000/static/avatars')\n", (605, 677), False, 'import functools\n'), ((1333, 1350), 'json.dumps', 'json.dumps', (['value'], {}), '(value)\n', (1343, 1350), False, 'import json\n'), ((1468, 1485), 'json.loads', 'json.loads', (['value'], {}), '(value)\n', (1478, 1485), False, 'import json\n'), ((1573, 1588), 'sqlalchemy_media.ImageAnalyzer', 'ImageAnalyzer', ([], {}), '()\n', (1586, 1588), False, 'from sqlalchemy_media import Image, ImageValidator, ImageProcessor, ImageAnalyzer, StoreManager, FileSystemStore\n'), ((1598, 1757), 'sqlalchemy_media.ImageValidator', 'ImageValidator', ([], {'minimum': '(10, 10)', 'maximum': '(3840, 3840)', 'content_types': "('image/jpeg', 'image/png', 'image/gif')", 'min_aspect_ratio': '(1)', 'max_aspect_ratio': '(1)'}), "(minimum=(10, 10), maximum=(3840, 3840), content_types=(\n 'image/jpeg', 'image/png', 'image/gif'), min_aspect_ratio=1,\n max_aspect_ratio=1)\n", (1612, 1757), False, 'from sqlalchemy_media import Image, ImageValidator, ImageProcessor, ImageAnalyzer, StoreManager, FileSystemStore\n'), ((1828, 1865), 'sqlalchemy_media.ImageProcessor', 'ImageProcessor', ([], {'fmt': '"""jpeg"""', 'width': '(128)'}), "(fmt='jpeg', width=128)\n", (1842, 1865), False, 'from sqlalchemy_media import Image, ImageValidator, ImageProcessor, ImageAnalyzer, StoreManager, FileSystemStore\n'), ((3192, 3214), 'os.path.exists', 'path.exists', (['TEMP_PATH'], {}), '(TEMP_PATH)\n', (3203, 3214), False, 'from os import path, mkdir, getcwd\n'), ((3224, 3240), 'os.mkdir', 'mkdir', (['TEMP_PATH'], {}), '(TEMP_PATH)\n', (3229, 3240), False, 'from os import path, mkdir, getcwd\n')]
|
# Copyright (c) Facebook, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import torch.nn as nn
import torch.nn.functional as F
from foundations import hparams
from lottery.desc import LotteryDesc
from models import base
from pruning import sparse_global
class Model(base.Model):
"""A MobileNet-V1 as originally designed for CIFAR-10."""
class Block(nn.Module):
"""A MobileNet-V1 block."""
# def __init__(self, f_in: int, f_out: int, downsample=False):
def __init__(self, f_in: int, f_out: int, stride=1):
super(Model.Block, self).__init__()
self.conv1 = nn.Conv2d(f_in, f_in, kernel_size=3, stride=stride, padding=1, groups=f_in, bias=False)
self.bn1 = nn.BatchNorm2d(f_in)
self.conv2 = nn.Conv2d(f_in, f_out, kernel_size=1, stride=1, padding=0, bias=False)
self.bn2 = nn.BatchNorm2d(f_out)
def forward(self, x):
out = F.relu(self.bn1(self.conv1(x)))
out = F.relu(self.bn2(self.conv2(out)))
return F.relu(out)
def __init__(self, initializer, num_512_blocks=5, outputs=None):
super(Model, self).__init__()
outputs = outputs or 10
# (128,2) means conv planes=128, conv stride=2, by default conv stride=1
# cfg = [64, (128,2), 128, (256,2), 256, (512,2), 512, 512, 512, 512, 512, (1024,2), 1024]
cfg_part1 = [64, (128,2), 128, (256,2), 256, (512,2)]
cfg_part2 = [512] * num_512_blocks
cfg_part3 = [(1024,2), 1024]
# Initial convolution.
self.conv = nn.Conv2d(3, 32, kernel_size=3, stride=1, padding=1, bias=False)
self.bn = nn.BatchNorm2d(32)
# The subsequent layers of MobileNet-V1.
self.layers_part1 = self._make_layers(in_planes=32 , config=cfg_part1)
self.layers_part2 = self._make_layers(in_planes=512, config=cfg_part2)
self.layers_part3 = self._make_layers(in_planes=512, config=cfg_part3)
# Final fc layer. Size = number of filters in last segment.
self.fc = nn.Linear(1024, outputs)
self.criterion = nn.CrossEntropyLoss()
# Initialize.
self.apply(initializer)
def _make_layers(self, in_planes, config):
layers = []
for x in config:
out_planes = x if isinstance(x, int) else x[0]
stride = 1 if isinstance(x, int) else x[1]
layers.append(Model.Block(in_planes, out_planes, stride))
in_planes = out_planes
return nn.Sequential(*layers)
def forward(self, x):
out = F.relu(self.bn(self.conv(x)))
out = self.layers_part1(out)
out = self.layers_part2(out)
out = self.layers_part3(out)
out = F.avg_pool2d(out, out.size()[3])
out = out.view(out.size(0), -1)
out = self.fc(out)
return out
@property
def output_layer_names(self):
return ['fc.weight', 'fc.bias']
@staticmethod
def is_valid_model_name(model_name):
return (model_name.startswith('cifar_mobilenetv1') and
3 >= len(model_name.split('_')) >= 2 and
all([x.isdigit() and int(x) > 0 for x in model_name.split('_')[2:]]) and
(len(model_name.split('_')) == 2 or int(model_name.split('_')[2]) >= 1))
@staticmethod
def get_model_from_name(model_name, initializer, outputs=10):
"""The naming scheme for a MobileNetV1 is 'cifar_mobilenetv1[_N]'.
The name of a MobileNetV1 is 'cifar_mobilenetv1[_N]'.
N is the total number of blocks with 512 input and output channels and stride 1.
The default value of W is 5 if it isn't provided.
"""
if not Model.is_valid_model_name(model_name):
raise ValueError('Invalid model name: {}'.format(model_name))
name = model_name.split('_')
N = 5 if len(name) == 2 else int(name[2])
return Model(initializer, N, outputs)
@property
def loss_criterion(self):
return self.criterion
@staticmethod
def default_hparams():
model_hparams = hparams.ModelHparams(
model_name='cifar_mobilenetv1',
model_init='kaiming_normal',
batchnorm_init='uniform',
)
dataset_hparams = hparams.DatasetHparams(
dataset_name='cifar10',
batch_size=128,
)
training_hparams = hparams.TrainingHparams(
optimizer_name='sgd',
momentum=0.9,
milestone_steps='80ep,120ep',
lr=0.1,
gamma=0.1,
weight_decay=1e-4,
training_steps='160ep',
)
pruning_hparams = sparse_global.PruningHparams(
pruning_strategy='sparse_global',
pruning_fraction=0.2
)
return LotteryDesc(model_hparams, dataset_hparams, training_hparams, pruning_hparams)
|
[
"torch.nn.Sequential",
"foundations.hparams.ModelHparams",
"lottery.desc.LotteryDesc",
"foundations.hparams.TrainingHparams",
"torch.nn.Conv2d",
"torch.nn.CrossEntropyLoss",
"torch.nn.BatchNorm2d",
"pruning.sparse_global.PruningHparams",
"torch.nn.Linear",
"torch.nn.functional.relu",
"foundations.hparams.DatasetHparams"
] |
[((1672, 1736), 'torch.nn.Conv2d', 'nn.Conv2d', (['(3)', '(32)'], {'kernel_size': '(3)', 'stride': '(1)', 'padding': '(1)', 'bias': '(False)'}), '(3, 32, kernel_size=3, stride=1, padding=1, bias=False)\n', (1681, 1736), True, 'import torch.nn as nn\n'), ((1755, 1773), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['(32)'], {}), '(32)\n', (1769, 1773), True, 'import torch.nn as nn\n'), ((2148, 2172), 'torch.nn.Linear', 'nn.Linear', (['(1024)', 'outputs'], {}), '(1024, outputs)\n', (2157, 2172), True, 'import torch.nn as nn\n'), ((2198, 2219), 'torch.nn.CrossEntropyLoss', 'nn.CrossEntropyLoss', ([], {}), '()\n', (2217, 2219), True, 'import torch.nn as nn\n'), ((2602, 2624), 'torch.nn.Sequential', 'nn.Sequential', (['*layers'], {}), '(*layers)\n', (2615, 2624), True, 'import torch.nn as nn\n'), ((4179, 4291), 'foundations.hparams.ModelHparams', 'hparams.ModelHparams', ([], {'model_name': '"""cifar_mobilenetv1"""', 'model_init': '"""kaiming_normal"""', 'batchnorm_init': '"""uniform"""'}), "(model_name='cifar_mobilenetv1', model_init=\n 'kaiming_normal', batchnorm_init='uniform')\n", (4199, 4291), False, 'from foundations import hparams\n'), ((4361, 4423), 'foundations.hparams.DatasetHparams', 'hparams.DatasetHparams', ([], {'dataset_name': '"""cifar10"""', 'batch_size': '(128)'}), "(dataset_name='cifar10', batch_size=128)\n", (4383, 4423), False, 'from foundations import hparams\n'), ((4487, 4650), 'foundations.hparams.TrainingHparams', 'hparams.TrainingHparams', ([], {'optimizer_name': '"""sgd"""', 'momentum': '(0.9)', 'milestone_steps': '"""80ep,120ep"""', 'lr': '(0.1)', 'gamma': '(0.1)', 'weight_decay': '(0.0001)', 'training_steps': '"""160ep"""'}), "(optimizer_name='sgd', momentum=0.9, milestone_steps\n ='80ep,120ep', lr=0.1, gamma=0.1, weight_decay=0.0001, training_steps=\n '160ep')\n", (4510, 4650), False, 'from foundations import hparams\n'), ((4761, 4849), 'pruning.sparse_global.PruningHparams', 'sparse_global.PruningHparams', ([], {'pruning_strategy': '"""sparse_global"""', 'pruning_fraction': '(0.2)'}), "(pruning_strategy='sparse_global',\n pruning_fraction=0.2)\n", (4789, 4849), False, 'from pruning import sparse_global\n'), ((4896, 4974), 'lottery.desc.LotteryDesc', 'LotteryDesc', (['model_hparams', 'dataset_hparams', 'training_hparams', 'pruning_hparams'], {}), '(model_hparams, dataset_hparams, training_hparams, pruning_hparams)\n', (4907, 4974), False, 'from lottery.desc import LotteryDesc\n'), ((720, 811), 'torch.nn.Conv2d', 'nn.Conv2d', (['f_in', 'f_in'], {'kernel_size': '(3)', 'stride': 'stride', 'padding': '(1)', 'groups': 'f_in', 'bias': '(False)'}), '(f_in, f_in, kernel_size=3, stride=stride, padding=1, groups=f_in,\n bias=False)\n', (729, 811), True, 'import torch.nn as nn\n'), ((831, 851), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['f_in'], {}), '(f_in)\n', (845, 851), True, 'import torch.nn as nn\n'), ((877, 947), 'torch.nn.Conv2d', 'nn.Conv2d', (['f_in', 'f_out'], {'kernel_size': '(1)', 'stride': '(1)', 'padding': '(0)', 'bias': '(False)'}), '(f_in, f_out, kernel_size=1, stride=1, padding=0, bias=False)\n', (886, 947), True, 'import torch.nn as nn\n'), ((971, 992), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['f_out'], {}), '(f_out)\n', (985, 992), True, 'import torch.nn as nn\n'), ((1145, 1156), 'torch.nn.functional.relu', 'F.relu', (['out'], {}), '(out)\n', (1151, 1156), True, 'import torch.nn.functional as F\n')]
|
# Utilities
import pickle
from math import pi, cos, sin, asin, sqrt
def saveFile(filename, data):
with open(filename, "wb") as f:
pickle.dump(data, f)
def loadFile(filename):
with open(filename, "rb") as f:
return pickle.load(f)
def coordToDeg(coord):
return coord[0] + coord[1] / 60 + coord[2] / 3600
def deg2rad(deg):
return deg * pi / 180
def rad2deg(rad):
return 180 * rad / pi
'''
Uses Haversine formula to calculate
distance between 2 points on a sphere
(approximation of Earth).
Input:
-point P given as (longitude, latitude),
where each is given as (degrees, minutes, seconds)
-point Q
Output:
-distance in kilometers
'''
def getDistance(P, Q): # point = (longitude, latitude)
x1, y1 = P; x1 = deg2rad(coordToDeg(x1)); y1 = deg2rad(coordToDeg(y1))
x2, y2 = Q; x2 = deg2rad(coordToDeg(x2)); y2 = deg2rad(coordToDeg(y2))
f1 = sin((y2-y1)/2)**2
f2 = cos(y1)
f3 = cos(y2)
f4 = sin((x2-x1)/2)**2
f = sqrt(f1 + f2*f3*f4)
R = (6356.752 + 6378.137) / 2 # Average of "Earth radius" at the poles and at the equator
return 2 * R * asin(f)
|
[
"pickle.dump",
"math.asin",
"math.sqrt",
"math.sin",
"pickle.load",
"math.cos"
] |
[((937, 944), 'math.cos', 'cos', (['y1'], {}), '(y1)\n', (940, 944), False, 'from math import pi, cos, sin, asin, sqrt\n'), ((954, 961), 'math.cos', 'cos', (['y2'], {}), '(y2)\n', (957, 961), False, 'from math import pi, cos, sin, asin, sqrt\n'), ((998, 1021), 'math.sqrt', 'sqrt', (['(f1 + f2 * f3 * f4)'], {}), '(f1 + f2 * f3 * f4)\n', (1002, 1021), False, 'from math import pi, cos, sin, asin, sqrt\n'), ((144, 164), 'pickle.dump', 'pickle.dump', (['data', 'f'], {}), '(data, f)\n', (155, 164), False, 'import pickle\n'), ((241, 255), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (252, 255), False, 'import pickle\n'), ((910, 928), 'math.sin', 'sin', (['((y2 - y1) / 2)'], {}), '((y2 - y1) / 2)\n', (913, 928), False, 'from math import pi, cos, sin, asin, sqrt\n'), ((971, 989), 'math.sin', 'sin', (['((x2 - x1) / 2)'], {}), '((x2 - x1) / 2)\n', (974, 989), False, 'from math import pi, cos, sin, asin, sqrt\n'), ((1132, 1139), 'math.asin', 'asin', (['f'], {}), '(f)\n', (1136, 1139), False, 'from math import pi, cos, sin, asin, sqrt\n')]
|
#!/usr/bin/env python3
from navicatGA.selfies_solver import SelfiesGenAlgSolver
from navicatGA.score_modifiers import score_modifier
from navicatGA.wrappers_selfies import (
sc2smiles,
sc2mol_structure,
mol_structure2depictions,
)
from navicatGA.quantum_wrappers_selfies import sc2gap
from navicatGA.wrappers_selfies import sc2logp, sc2mw
# In this test, we dont use a chimera scalarizer and we simply define a combined fitness function
def fitness_function_wrapper(target_1, target_2, target_3):
return (
lambda chromosome: (
0.4 * score_modifier(sc2gap(chromosome, lot=0), target_1, 3)
+ 0.4 * score_modifier(sc2logp(chromosome), target_2, 1)
+ 0.2 * score_modifier(sc2mw(chromosome), target_3, 3)
)
/ 3
)
def test_real_application_16():
starting_selfies = ["[C][O][=C][C][=N][Ring_1]"]
solver = SelfiesGenAlgSolver(
n_genes=15,
pop_size=10,
max_gen=10,
fitness_function=fitness_function_wrapper(
target_1=0.05, target_2=0.1, target_3=65
), # homo-lumo gap, logp, mw
starting_selfies=starting_selfies,
starting_stoned=True,
prune_duplicates=True,
mutation_rate=0.05,
selection_rate=0.4,
random_state=666,
n_crossover_points=1,
verbose=False,
progress_bars=True,
to_file=True,
selection_strategy="boltzmann",
to_stdout=False,
logger_level="INFO",
logger_file="real_application.log",
show_stats=True,
)
solver.solve()
print(
"After optimization, the corresponding SMILES is : {0}".format(
sc2smiles(solver.best_individual_)
)
)
print(
"It has properties: \n HOMO-LUMO gap : {0} \n LogP : {1} \n Molecular weight : {2}".format(
sc2gap(solver.best_individual_),
sc2logp(solver.best_individual_),
sc2mw(solver.best_individual_),
)
)
mol = sc2mol_structure(solver.best_individual_)
mol_structure2depictions(mol, "real_application")
solver.close_solver_logger()
if __name__ == "__main__":
test_real_application_16()
|
[
"navicatGA.wrappers_selfies.sc2mw",
"navicatGA.quantum_wrappers_selfies.sc2gap",
"navicatGA.wrappers_selfies.sc2smiles",
"navicatGA.wrappers_selfies.sc2logp",
"navicatGA.wrappers_selfies.mol_structure2depictions",
"navicatGA.wrappers_selfies.sc2mol_structure"
] |
[((2011, 2052), 'navicatGA.wrappers_selfies.sc2mol_structure', 'sc2mol_structure', (['solver.best_individual_'], {}), '(solver.best_individual_)\n', (2027, 2052), False, 'from navicatGA.wrappers_selfies import sc2smiles, sc2mol_structure, mol_structure2depictions\n'), ((2057, 2106), 'navicatGA.wrappers_selfies.mol_structure2depictions', 'mol_structure2depictions', (['mol', '"""real_application"""'], {}), "(mol, 'real_application')\n", (2081, 2106), False, 'from navicatGA.wrappers_selfies import sc2smiles, sc2mol_structure, mol_structure2depictions\n'), ((1688, 1722), 'navicatGA.wrappers_selfies.sc2smiles', 'sc2smiles', (['solver.best_individual_'], {}), '(solver.best_individual_)\n', (1697, 1722), False, 'from navicatGA.wrappers_selfies import sc2smiles, sc2mol_structure, mol_structure2depictions\n'), ((1862, 1893), 'navicatGA.quantum_wrappers_selfies.sc2gap', 'sc2gap', (['solver.best_individual_'], {}), '(solver.best_individual_)\n', (1868, 1893), False, 'from navicatGA.quantum_wrappers_selfies import sc2gap\n'), ((1907, 1939), 'navicatGA.wrappers_selfies.sc2logp', 'sc2logp', (['solver.best_individual_'], {}), '(solver.best_individual_)\n', (1914, 1939), False, 'from navicatGA.wrappers_selfies import sc2logp, sc2mw\n'), ((1953, 1983), 'navicatGA.wrappers_selfies.sc2mw', 'sc2mw', (['solver.best_individual_'], {}), '(solver.best_individual_)\n', (1958, 1983), False, 'from navicatGA.wrappers_selfies import sc2logp, sc2mw\n'), ((732, 749), 'navicatGA.wrappers_selfies.sc2mw', 'sc2mw', (['chromosome'], {}), '(chromosome)\n', (737, 749), False, 'from navicatGA.wrappers_selfies import sc2logp, sc2mw\n'), ((588, 613), 'navicatGA.quantum_wrappers_selfies.sc2gap', 'sc2gap', (['chromosome'], {'lot': '(0)'}), '(chromosome, lot=0)\n', (594, 613), False, 'from navicatGA.quantum_wrappers_selfies import sc2gap\n'), ((663, 682), 'navicatGA.wrappers_selfies.sc2logp', 'sc2logp', (['chromosome'], {}), '(chromosome)\n', (670, 682), False, 'from navicatGA.wrappers_selfies import sc2logp, sc2mw\n')]
|
import madlib
for i in range(0, 100):
print(madlib.get_madlib())
|
[
"madlib.get_madlib"
] |
[((49, 68), 'madlib.get_madlib', 'madlib.get_madlib', ([], {}), '()\n', (66, 68), False, 'import madlib\n')]
|
from karabo.simulation.coordinate_helper import east_north_to_long_lat
from karabo.simulation.east_north_coordinate import EastNorthCoordinate
class Station:
def __init__(self, position: EastNorthCoordinate,
parent_longitude: float = 0,
parent_latitude: float = 0,
parent_altitude: float = 0):
"""
:param position: Position of station in relation to the telescope.png centre
"""
self.position: EastNorthCoordinate = position
self.antennas: [EastNorthCoordinate] = []
long, lat = east_north_to_long_lat(position.x, position.y, parent_longitude, parent_latitude)
self.longitude: float = long
self.latitude: float = lat
self.altitude: float = position.z
def add_station_antenna(self, antenna: EastNorthCoordinate):
self.antennas.append(antenna)
|
[
"karabo.simulation.coordinate_helper.east_north_to_long_lat"
] |
[((585, 670), 'karabo.simulation.coordinate_helper.east_north_to_long_lat', 'east_north_to_long_lat', (['position.x', 'position.y', 'parent_longitude', 'parent_latitude'], {}), '(position.x, position.y, parent_longitude,\n parent_latitude)\n', (607, 670), False, 'from karabo.simulation.coordinate_helper import east_north_to_long_lat\n')]
|
import numpy as np
import matplotlib.pyplot as plt
import timeit
import random
import math
def insertionSort(a):
for i in range(1,len(a)):
value = a[i]
pos = i
while (pos > 0 and value < a[pos-1]):
a[pos] = a[pos-1]
pos = pos-1
a[pos] = value
def countingSort(a,max):
m = max+1
count = [0 for i in range(m)]
for i in a:
count[i] +=1
x = 0
for i in range(m):
for j in range(count[i]):
a[x]=a
x+=1
def quickSort(a, low, high):
if (low >= high):
return
i, j = low, high
pivot = a[random.randint(low, high)]
while i <= j:
while a[i] < pivot: i += 1
while a[j] > pivot: j -= 1
if i <= j:
a[i], a[j] = a[j], a[i]
i, j = i + 1, j - 1
quickSort(a, low, j)
quickSort(a, i, high)
def mergeSort(a):
if (len(a)< 2):
return a
pivot = len(a)//2
left = mergeSort(a[:pivot])
right = mergeSort(a[pivot:])
return merge(left,right)
def merge(left,right):
if not left or not right:
return left or right
aux = []
i = 0
j = 0
while (len(aux) < len(left) + len(right)):
if (left[i] < right[j]):
aux.append(left[i])
i += 1
else:
aux.append(right[j])
j += 1
if (i == len(left) or j == len(right)):
aux.extend(left[i:] or right[j:])
break
return aux
def radixSort(a):
length = len(a)
out = [0] * length
count = [0]*10
for i in a:
if (i>0):
x = (a[i]//10)
count[(x)%10]+=1
for i in range(1,10):
count[i] += count[i-1]
i = length - 1
while (1>=0):
x = (a[i]//10)
out[count[(x)%10]-1] = a[i]
count[(x)%10] -= 1
i -= 1
for i in range(len(a)):
a[i] = out[i]
############## Begin Here ###############
a = np.random.randint(101, size = 128)
a = a.tolist()
radixSort(a)
print('true')
############ insertion sort ###############
b = np.array([])
c = np.array([])
d = np.array([])
e = np.array([])
counts = 0
print('128')
while (counts < 100):
a = np.random.randint(101, size = 128)
t1 = timeit.default_timer()
insertionSort(a)
t2 = timeit.default_timer()
t_diff = t2-t1
b = np.append(b, t_diff)
counts = counts + 1
b_insertion_sort_mean = np.mean(b)
print('1024')
while (counts < 200):
a = np.random.randint(101, size = 1024)
t1 = timeit.default_timer()
insertionSort(a)
t2 = timeit.default_timer()
t_diff = t2-t1
c = np.append(c, t_diff)
counts = counts + 1
c_insertion_sort_mean = np.mean(c)
print('4096')
while (counts < 300):
a = np.random.randint(101, size = 4096)
t1 = timeit.default_timer()
insertionSort(a)
t2 = timeit.default_timer()
t_diff = t2-t1
d = np.append(d, t_diff)
counts = counts + 1
d_insertion_sort_mean = np.mean(d)
print('16384')
while (counts < 400):
a = np.random.randint(101, size = 16384)
t1 = timeit.default_timer()
insertionSort(a)
t2 = timeit.default_timer()
t_diff = t2-t1
e = np.append(e, t_diff)
counts = counts + 1
e_insertion_sort_mean = np.mean(e)
################ counting sort ##################
b = np.array([])
c = np.array([])
d = np.array([])
e = np.array([])
counts = 0
print('128')
while (counts < 100):
a = np.random.randint(101, size = 128)
t1 = timeit.default_timer()
countingSort(a,100)
t2 = timeit.default_timer()
t_diff = t2-t1
b = np.append(b, t_diff)
counts = counts + 1
b_countingsort_mean = np.mean(b)
print('1024')
while (counts < 200):
a = np.random.randint(101, size = 1024)
t1 = timeit.default_timer()
countingSort(a,100)
t2 = timeit.default_timer()
t_diff = t2-t1
c = np.append(c, t_diff)
counts = counts + 1
c_countingsort_mean = np.mean(c)
print('4096')
while (counts < 300):
a = np.random.randint(101, size = 4096)
t1 = timeit.default_timer()
countingSort(a,100)
t2 = timeit.default_timer()
t_diff = t2-t1
d = np.append(d, t_diff)
counts = counts + 1
d_countingsort_mean = np.mean(d)
print('16384')
while (counts < 400):
a = np.random.randint(101, size = 16384)
t1 = timeit.default_timer()
countingSort(a,100)
t2 = timeit.default_timer()
t_diff = t2-t1
e = np.append(e, t_diff)
counts = counts + 1
e_countingsort_mean = np.mean(e)
################ quick sort ##################
b = np.array([])
c = np.array([])
d = np.array([])
e = np.array([])
counts = 0
print('128')
while (counts < 100):
a = np.random.randint(101, size = 128)
t1 = timeit.default_timer()
quickSort(a)
t2 = timeit.default_timer()
t_diff = t2-t1
b = np.append(b, t_diff)
counts = counts + 1
b_quicksort_mean = np.mean(b)
print('1024')
while (counts < 200):
a = np.random.randint(101, size = 1024)
t1 = timeit.default_timer()
quickSort(a)
t2 = timeit.default_timer()
t_diff = t2-t1
c = np.append(c, t_diff)
counts = counts + 1
c_quicksort_mean = np.mean(c)
print('4096')
while (counts < 300):
a = np.random.randint(101, size = 4096)
t1 = timeit.default_timer()
quickSort(a)
t2 = timeit.default_timer()
t_diff = t2-t1
d = np.append(d, t_diff)
counts = counts + 1
d_quicksort_mean = np.mean(d)
print('16384')
while (counts < 400):
a = np.random.randint(101, size = 16384)
t1 = timeit.default_timer()
quickSort(a)
t2 = timeit.default_timer()
t_diff = t2-t1
e = np.append(e, t_diff)
counts = counts + 1
e_quicksort_mean = np.mean(e)
############### merge sort ###################
b = np.array([])
c = np.array([])
d = np.array([])
e = np.array([])
counts = 0
print('128')
while (counts < 100):
a = np.random.randint(101, size = 128)
t1 = timeit.default_timer()
mergeSort(a)
t2 = timeit.default_timer()
t_diff = t2-t1
b = np.append(b, t_diff)
counts = counts + 1
b_mergesort_mean = np.mean(b)
print('1024')
while (counts < 200):
a = np.random.randint(101, size = 1024)
t1 = timeit.default_timer()
mergeSort(a)
t2 = timeit.default_timer()
t_diff = t2-t1
c = np.append(c, t_diff)
counts = counts + 1
c_mergesort_mean = np.mean(c)
print('4096')
while (counts < 300):
a = np.random.randint(101, size = 4096)
t1 = timeit.default_timer()
mergeSort(a)
t2 = timeit.default_timer()
t_diff = t2-t1
d = np.append(d, t_diff)
counts = counts + 1
d_mergesort_mean = np.mean(d)
print('16384')
while (counts < 400):
a = np.random.randint(101, size = 16384)
t1 = timeit.default_timer()
mergeSort(a)
t2 = timeit.default_timer()
t_diff = t2-t1
e = np.append(e, t_diff)
counts = counts + 1
e_mergesort_mean = np.mean(e)
############### radix sort ##################
b = np.array([])
c = np.array([])
d = np.array([])
e = np.array([])
counts = 0
print('128')
while (counts < 100):
a = np.random.randint(101, size = 128)
t1 = timeit.default_timer()
radixSort(a,10,100)
t2 = timeit.default_timer()
t_diff = t2-t1
b = np.append(b, t_diff)
counts = counts + 1
b_radixsort_mean = np.mean(b)
print('1024')
while (counts < 200):
a = np.random.randint(101, size = 1024)
t1 = timeit.default_timer()
radixSort(a,10,100)
t2 = timeit.default_timer()
t_diff = t2-t1
c = np.append(c, t_diff)
counts = counts + 1
c_radixsort_mean = np.mean(c)
print('4096')
while (counts < 300):
a = np.random.randint(101, size = 4096)
t1 = timeit.default_timer()
radixSort(a,10,100)
t2 = timeit.default_timer()
t_diff = t2-t1
d = np.append(d, t_diff)
counts = counts + 1
d_radixsort_mean = np.mean(d)
print('16384')
while (counts < 400):
a = np.random.randint(101, size = 16384)
t1 = timeit.default_timer()
radixSort(a,10,100)
t2 = timeit.default_timer()
t_diff = t2-t1
e = np.append(e, t_diff)
counts = counts + 1
e_radixsort_mean = np.mean(e)
############## plotting ##############
plt.plot([128, 1024, 4096, 16384],
[b_insertion_sort_mean, c_insertion_sort_mean, d_insertion_sort_mean,e_insertion_sort_mean],
c = 'blue',
label = 'Insertion Sort',
linestyle = '--',
linewidth = 2)
plt.plot([128, 1024, 4096, 16384],
[b_countingsort_mean, c_countingsort_mean, d_countingsort_mean,e_countingsort_mean],
c = 'red',
label = 'Counting Sort',
linestyle = '--',
linewidth = 2)
plt.plot([128, 1024, 4096, 16384],
[b_quicksort_mean, c_quicksort_mean, d_quicksort_mean,e_quicksort_mean],
c = 'green',
label = 'Quick Sort',
linestyle = '--',
linewidth = 2)
plt.plot([128, 1024, 4096, 16384],
[b_mergesort_mean, c_mergesort_mean, d_mergesort_mean,e_mergesort_mean],
c = 'yellow',
label = 'Merge Sort',
linestyle = '--',
linewidth = 2)
plt.plot([128, 1024, 4096, 16384],
[b_radixsort_mean, c_radixsort_mean, d_radixsort_mean,e_radixsort_mean],
c = 'black',
label = 'Radix Sort',
linestyle = '--',
linewidth = 2)
plt.axis([0,18000,0,.0005])
plt.show()
|
[
"matplotlib.pyplot.show",
"random.randint",
"matplotlib.pyplot.plot",
"timeit.default_timer",
"matplotlib.pyplot.axis",
"numpy.append",
"numpy.mean",
"numpy.random.randint",
"numpy.array"
] |
[((1790, 1822), 'numpy.random.randint', 'np.random.randint', (['(101)'], {'size': '(128)'}), '(101, size=128)\n', (1807, 1822), True, 'import numpy as np\n'), ((1927, 1939), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (1935, 1939), True, 'import numpy as np\n'), ((1945, 1957), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (1953, 1957), True, 'import numpy as np\n'), ((1963, 1975), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (1971, 1975), True, 'import numpy as np\n'), ((1981, 1993), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (1989, 1993), True, 'import numpy as np\n'), ((2260, 2270), 'numpy.mean', 'np.mean', (['b'], {}), '(b)\n', (2267, 2270), True, 'import numpy as np\n'), ((2523, 2533), 'numpy.mean', 'np.mean', (['c'], {}), '(c)\n', (2530, 2533), True, 'import numpy as np\n'), ((2786, 2796), 'numpy.mean', 'np.mean', (['d'], {}), '(d)\n', (2793, 2796), True, 'import numpy as np\n'), ((3051, 3061), 'numpy.mean', 'np.mean', (['e'], {}), '(e)\n', (3058, 3061), True, 'import numpy as np\n'), ((3122, 3134), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (3130, 3134), True, 'import numpy as np\n'), ((3140, 3152), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (3148, 3152), True, 'import numpy as np\n'), ((3158, 3170), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (3166, 3170), True, 'import numpy as np\n'), ((3176, 3188), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (3184, 3188), True, 'import numpy as np\n'), ((3456, 3466), 'numpy.mean', 'np.mean', (['b'], {}), '(b)\n', (3463, 3466), True, 'import numpy as np\n'), ((3720, 3730), 'numpy.mean', 'np.mean', (['c'], {}), '(c)\n', (3727, 3730), True, 'import numpy as np\n'), ((3984, 3994), 'numpy.mean', 'np.mean', (['d'], {}), '(d)\n', (3991, 3994), True, 'import numpy as np\n'), ((4250, 4260), 'numpy.mean', 'np.mean', (['e'], {}), '(e)\n', (4257, 4260), True, 'import numpy as np\n'), ((4320, 4332), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (4328, 4332), True, 'import numpy as np\n'), ((4338, 4350), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (4346, 4350), True, 'import numpy as np\n'), ((4356, 4368), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (4364, 4368), True, 'import numpy as np\n'), ((4374, 4386), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (4382, 4386), True, 'import numpy as np\n'), ((4644, 4654), 'numpy.mean', 'np.mean', (['b'], {}), '(b)\n', (4651, 4654), True, 'import numpy as np\n'), ((4898, 4908), 'numpy.mean', 'np.mean', (['c'], {}), '(c)\n', (4905, 4908), True, 'import numpy as np\n'), ((5152, 5162), 'numpy.mean', 'np.mean', (['d'], {}), '(d)\n', (5159, 5162), True, 'import numpy as np\n'), ((5408, 5418), 'numpy.mean', 'np.mean', (['e'], {}), '(e)\n', (5415, 5418), True, 'import numpy as np\n'), ((5474, 5486), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (5482, 5486), True, 'import numpy as np\n'), ((5492, 5504), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (5500, 5504), True, 'import numpy as np\n'), ((5510, 5522), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (5518, 5522), True, 'import numpy as np\n'), ((5528, 5540), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (5536, 5540), True, 'import numpy as np\n'), ((5798, 5808), 'numpy.mean', 'np.mean', (['b'], {}), '(b)\n', (5805, 5808), True, 'import numpy as np\n'), ((6052, 6062), 'numpy.mean', 'np.mean', (['c'], {}), '(c)\n', (6059, 6062), True, 'import numpy as np\n'), ((6306, 6316), 'numpy.mean', 'np.mean', (['d'], {}), '(d)\n', (6313, 6316), True, 'import numpy as np\n'), ((6562, 6572), 'numpy.mean', 'np.mean', (['e'], {}), '(e)\n', (6569, 6572), True, 'import numpy as np\n'), ((6629, 6641), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (6637, 6641), True, 'import numpy as np\n'), ((6647, 6659), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (6655, 6659), True, 'import numpy as np\n'), ((6665, 6677), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (6673, 6677), True, 'import numpy as np\n'), ((6683, 6695), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (6691, 6695), True, 'import numpy as np\n'), ((6960, 6970), 'numpy.mean', 'np.mean', (['b'], {}), '(b)\n', (6967, 6970), True, 'import numpy as np\n'), ((7221, 7231), 'numpy.mean', 'np.mean', (['c'], {}), '(c)\n', (7228, 7231), True, 'import numpy as np\n'), ((7482, 7492), 'numpy.mean', 'np.mean', (['d'], {}), '(d)\n', (7489, 7492), True, 'import numpy as np\n'), ((7745, 7755), 'numpy.mean', 'np.mean', (['e'], {}), '(e)\n', (7752, 7755), True, 'import numpy as np\n'), ((7802, 8002), 'matplotlib.pyplot.plot', 'plt.plot', (['[128, 1024, 4096, 16384]', '[b_insertion_sort_mean, c_insertion_sort_mean, d_insertion_sort_mean,\n e_insertion_sort_mean]'], {'c': '"""blue"""', 'label': '"""Insertion Sort"""', 'linestyle': '"""--"""', 'linewidth': '(2)'}), "([128, 1024, 4096, 16384], [b_insertion_sort_mean,\n c_insertion_sort_mean, d_insertion_sort_mean, e_insertion_sort_mean], c\n ='blue', label='Insertion Sort', linestyle='--', linewidth=2)\n", (7810, 8002), True, 'import matplotlib.pyplot as plt\n'), ((8012, 8201), 'matplotlib.pyplot.plot', 'plt.plot', (['[128, 1024, 4096, 16384]', '[b_countingsort_mean, c_countingsort_mean, d_countingsort_mean,\n e_countingsort_mean]'], {'c': '"""red"""', 'label': '"""Counting Sort"""', 'linestyle': '"""--"""', 'linewidth': '(2)'}), "([128, 1024, 4096, 16384], [b_countingsort_mean,\n c_countingsort_mean, d_countingsort_mean, e_countingsort_mean], c='red',\n label='Counting Sort', linestyle='--', linewidth=2)\n", (8020, 8201), True, 'import matplotlib.pyplot as plt\n'), ((8212, 8388), 'matplotlib.pyplot.plot', 'plt.plot', (['[128, 1024, 4096, 16384]', '[b_quicksort_mean, c_quicksort_mean, d_quicksort_mean, e_quicksort_mean]'], {'c': '"""green"""', 'label': '"""Quick Sort"""', 'linestyle': '"""--"""', 'linewidth': '(2)'}), "([128, 1024, 4096, 16384], [b_quicksort_mean, c_quicksort_mean,\n d_quicksort_mean, e_quicksort_mean], c='green', label='Quick Sort',\n linestyle='--', linewidth=2)\n", (8220, 8388), True, 'import matplotlib.pyplot as plt\n'), ((8399, 8576), 'matplotlib.pyplot.plot', 'plt.plot', (['[128, 1024, 4096, 16384]', '[b_mergesort_mean, c_mergesort_mean, d_mergesort_mean, e_mergesort_mean]'], {'c': '"""yellow"""', 'label': '"""Merge Sort"""', 'linestyle': '"""--"""', 'linewidth': '(2)'}), "([128, 1024, 4096, 16384], [b_mergesort_mean, c_mergesort_mean,\n d_mergesort_mean, e_mergesort_mean], c='yellow', label='Merge Sort',\n linestyle='--', linewidth=2)\n", (8407, 8576), True, 'import matplotlib.pyplot as plt\n'), ((8587, 8763), 'matplotlib.pyplot.plot', 'plt.plot', (['[128, 1024, 4096, 16384]', '[b_radixsort_mean, c_radixsort_mean, d_radixsort_mean, e_radixsort_mean]'], {'c': '"""black"""', 'label': '"""Radix Sort"""', 'linestyle': '"""--"""', 'linewidth': '(2)'}), "([128, 1024, 4096, 16384], [b_radixsort_mean, c_radixsort_mean,\n d_radixsort_mean, e_radixsort_mean], c='black', label='Radix Sort',\n linestyle='--', linewidth=2)\n", (8595, 8763), True, 'import matplotlib.pyplot as plt\n'), ((8774, 8805), 'matplotlib.pyplot.axis', 'plt.axis', (['[0, 18000, 0, 0.0005]'], {}), '([0, 18000, 0, 0.0005])\n', (8782, 8805), True, 'import matplotlib.pyplot as plt\n'), ((8803, 8813), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (8811, 8813), True, 'import matplotlib.pyplot as plt\n'), ((2055, 2087), 'numpy.random.randint', 'np.random.randint', (['(101)'], {'size': '(128)'}), '(101, size=128)\n', (2072, 2087), True, 'import numpy as np\n'), ((2097, 2119), 'timeit.default_timer', 'timeit.default_timer', ([], {}), '()\n', (2117, 2119), False, 'import timeit\n'), ((2146, 2168), 'timeit.default_timer', 'timeit.default_timer', ([], {}), '()\n', (2166, 2168), False, 'import timeit\n'), ((2192, 2212), 'numpy.append', 'np.append', (['b', 't_diff'], {}), '(b, t_diff)\n', (2201, 2212), True, 'import numpy as np\n'), ((2317, 2350), 'numpy.random.randint', 'np.random.randint', (['(101)'], {'size': '(1024)'}), '(101, size=1024)\n', (2334, 2350), True, 'import numpy as np\n'), ((2360, 2382), 'timeit.default_timer', 'timeit.default_timer', ([], {}), '()\n', (2380, 2382), False, 'import timeit\n'), ((2409, 2431), 'timeit.default_timer', 'timeit.default_timer', ([], {}), '()\n', (2429, 2431), False, 'import timeit\n'), ((2455, 2475), 'numpy.append', 'np.append', (['c', 't_diff'], {}), '(c, t_diff)\n', (2464, 2475), True, 'import numpy as np\n'), ((2580, 2613), 'numpy.random.randint', 'np.random.randint', (['(101)'], {'size': '(4096)'}), '(101, size=4096)\n', (2597, 2613), True, 'import numpy as np\n'), ((2623, 2645), 'timeit.default_timer', 'timeit.default_timer', ([], {}), '()\n', (2643, 2645), False, 'import timeit\n'), ((2672, 2694), 'timeit.default_timer', 'timeit.default_timer', ([], {}), '()\n', (2692, 2694), False, 'import timeit\n'), ((2718, 2738), 'numpy.append', 'np.append', (['d', 't_diff'], {}), '(d, t_diff)\n', (2727, 2738), True, 'import numpy as np\n'), ((2844, 2878), 'numpy.random.randint', 'np.random.randint', (['(101)'], {'size': '(16384)'}), '(101, size=16384)\n', (2861, 2878), True, 'import numpy as np\n'), ((2888, 2910), 'timeit.default_timer', 'timeit.default_timer', ([], {}), '()\n', (2908, 2910), False, 'import timeit\n'), ((2937, 2959), 'timeit.default_timer', 'timeit.default_timer', ([], {}), '()\n', (2957, 2959), False, 'import timeit\n'), ((2983, 3003), 'numpy.append', 'np.append', (['e', 't_diff'], {}), '(e, t_diff)\n', (2992, 3003), True, 'import numpy as np\n'), ((3250, 3282), 'numpy.random.randint', 'np.random.randint', (['(101)'], {'size': '(128)'}), '(101, size=128)\n', (3267, 3282), True, 'import numpy as np\n'), ((3292, 3314), 'timeit.default_timer', 'timeit.default_timer', ([], {}), '()\n', (3312, 3314), False, 'import timeit\n'), ((3344, 3366), 'timeit.default_timer', 'timeit.default_timer', ([], {}), '()\n', (3364, 3366), False, 'import timeit\n'), ((3390, 3410), 'numpy.append', 'np.append', (['b', 't_diff'], {}), '(b, t_diff)\n', (3399, 3410), True, 'import numpy as np\n'), ((3513, 3546), 'numpy.random.randint', 'np.random.randint', (['(101)'], {'size': '(1024)'}), '(101, size=1024)\n', (3530, 3546), True, 'import numpy as np\n'), ((3556, 3578), 'timeit.default_timer', 'timeit.default_timer', ([], {}), '()\n', (3576, 3578), False, 'import timeit\n'), ((3608, 3630), 'timeit.default_timer', 'timeit.default_timer', ([], {}), '()\n', (3628, 3630), False, 'import timeit\n'), ((3654, 3674), 'numpy.append', 'np.append', (['c', 't_diff'], {}), '(c, t_diff)\n', (3663, 3674), True, 'import numpy as np\n'), ((3777, 3810), 'numpy.random.randint', 'np.random.randint', (['(101)'], {'size': '(4096)'}), '(101, size=4096)\n', (3794, 3810), True, 'import numpy as np\n'), ((3820, 3842), 'timeit.default_timer', 'timeit.default_timer', ([], {}), '()\n', (3840, 3842), False, 'import timeit\n'), ((3872, 3894), 'timeit.default_timer', 'timeit.default_timer', ([], {}), '()\n', (3892, 3894), False, 'import timeit\n'), ((3918, 3938), 'numpy.append', 'np.append', (['d', 't_diff'], {}), '(d, t_diff)\n', (3927, 3938), True, 'import numpy as np\n'), ((4042, 4076), 'numpy.random.randint', 'np.random.randint', (['(101)'], {'size': '(16384)'}), '(101, size=16384)\n', (4059, 4076), True, 'import numpy as np\n'), ((4086, 4108), 'timeit.default_timer', 'timeit.default_timer', ([], {}), '()\n', (4106, 4108), False, 'import timeit\n'), ((4138, 4160), 'timeit.default_timer', 'timeit.default_timer', ([], {}), '()\n', (4158, 4160), False, 'import timeit\n'), ((4184, 4204), 'numpy.append', 'np.append', (['e', 't_diff'], {}), '(e, t_diff)\n', (4193, 4204), True, 'import numpy as np\n'), ((4448, 4480), 'numpy.random.randint', 'np.random.randint', (['(101)'], {'size': '(128)'}), '(101, size=128)\n', (4465, 4480), True, 'import numpy as np\n'), ((4490, 4512), 'timeit.default_timer', 'timeit.default_timer', ([], {}), '()\n', (4510, 4512), False, 'import timeit\n'), ((4535, 4557), 'timeit.default_timer', 'timeit.default_timer', ([], {}), '()\n', (4555, 4557), False, 'import timeit\n'), ((4581, 4601), 'numpy.append', 'np.append', (['b', 't_diff'], {}), '(b, t_diff)\n', (4590, 4601), True, 'import numpy as np\n'), ((4701, 4734), 'numpy.random.randint', 'np.random.randint', (['(101)'], {'size': '(1024)'}), '(101, size=1024)\n', (4718, 4734), True, 'import numpy as np\n'), ((4744, 4766), 'timeit.default_timer', 'timeit.default_timer', ([], {}), '()\n', (4764, 4766), False, 'import timeit\n'), ((4789, 4811), 'timeit.default_timer', 'timeit.default_timer', ([], {}), '()\n', (4809, 4811), False, 'import timeit\n'), ((4835, 4855), 'numpy.append', 'np.append', (['c', 't_diff'], {}), '(c, t_diff)\n', (4844, 4855), True, 'import numpy as np\n'), ((4955, 4988), 'numpy.random.randint', 'np.random.randint', (['(101)'], {'size': '(4096)'}), '(101, size=4096)\n', (4972, 4988), True, 'import numpy as np\n'), ((4998, 5020), 'timeit.default_timer', 'timeit.default_timer', ([], {}), '()\n', (5018, 5020), False, 'import timeit\n'), ((5043, 5065), 'timeit.default_timer', 'timeit.default_timer', ([], {}), '()\n', (5063, 5065), False, 'import timeit\n'), ((5089, 5109), 'numpy.append', 'np.append', (['d', 't_diff'], {}), '(d, t_diff)\n', (5098, 5109), True, 'import numpy as np\n'), ((5210, 5244), 'numpy.random.randint', 'np.random.randint', (['(101)'], {'size': '(16384)'}), '(101, size=16384)\n', (5227, 5244), True, 'import numpy as np\n'), ((5254, 5276), 'timeit.default_timer', 'timeit.default_timer', ([], {}), '()\n', (5274, 5276), False, 'import timeit\n'), ((5299, 5321), 'timeit.default_timer', 'timeit.default_timer', ([], {}), '()\n', (5319, 5321), False, 'import timeit\n'), ((5345, 5365), 'numpy.append', 'np.append', (['e', 't_diff'], {}), '(e, t_diff)\n', (5354, 5365), True, 'import numpy as np\n'), ((5602, 5634), 'numpy.random.randint', 'np.random.randint', (['(101)'], {'size': '(128)'}), '(101, size=128)\n', (5619, 5634), True, 'import numpy as np\n'), ((5644, 5666), 'timeit.default_timer', 'timeit.default_timer', ([], {}), '()\n', (5664, 5666), False, 'import timeit\n'), ((5689, 5711), 'timeit.default_timer', 'timeit.default_timer', ([], {}), '()\n', (5709, 5711), False, 'import timeit\n'), ((5735, 5755), 'numpy.append', 'np.append', (['b', 't_diff'], {}), '(b, t_diff)\n', (5744, 5755), True, 'import numpy as np\n'), ((5855, 5888), 'numpy.random.randint', 'np.random.randint', (['(101)'], {'size': '(1024)'}), '(101, size=1024)\n', (5872, 5888), True, 'import numpy as np\n'), ((5898, 5920), 'timeit.default_timer', 'timeit.default_timer', ([], {}), '()\n', (5918, 5920), False, 'import timeit\n'), ((5943, 5965), 'timeit.default_timer', 'timeit.default_timer', ([], {}), '()\n', (5963, 5965), False, 'import timeit\n'), ((5989, 6009), 'numpy.append', 'np.append', (['c', 't_diff'], {}), '(c, t_diff)\n', (5998, 6009), True, 'import numpy as np\n'), ((6109, 6142), 'numpy.random.randint', 'np.random.randint', (['(101)'], {'size': '(4096)'}), '(101, size=4096)\n', (6126, 6142), True, 'import numpy as np\n'), ((6152, 6174), 'timeit.default_timer', 'timeit.default_timer', ([], {}), '()\n', (6172, 6174), False, 'import timeit\n'), ((6197, 6219), 'timeit.default_timer', 'timeit.default_timer', ([], {}), '()\n', (6217, 6219), False, 'import timeit\n'), ((6243, 6263), 'numpy.append', 'np.append', (['d', 't_diff'], {}), '(d, t_diff)\n', (6252, 6263), True, 'import numpy as np\n'), ((6364, 6398), 'numpy.random.randint', 'np.random.randint', (['(101)'], {'size': '(16384)'}), '(101, size=16384)\n', (6381, 6398), True, 'import numpy as np\n'), ((6408, 6430), 'timeit.default_timer', 'timeit.default_timer', ([], {}), '()\n', (6428, 6430), False, 'import timeit\n'), ((6453, 6475), 'timeit.default_timer', 'timeit.default_timer', ([], {}), '()\n', (6473, 6475), False, 'import timeit\n'), ((6499, 6519), 'numpy.append', 'np.append', (['e', 't_diff'], {}), '(e, t_diff)\n', (6508, 6519), True, 'import numpy as np\n'), ((6757, 6789), 'numpy.random.randint', 'np.random.randint', (['(101)'], {'size': '(128)'}), '(101, size=128)\n', (6774, 6789), True, 'import numpy as np\n'), ((6799, 6821), 'timeit.default_timer', 'timeit.default_timer', ([], {}), '()\n', (6819, 6821), False, 'import timeit\n'), ((6851, 6873), 'timeit.default_timer', 'timeit.default_timer', ([], {}), '()\n', (6871, 6873), False, 'import timeit\n'), ((6897, 6917), 'numpy.append', 'np.append', (['b', 't_diff'], {}), '(b, t_diff)\n', (6906, 6917), True, 'import numpy as np\n'), ((7017, 7050), 'numpy.random.randint', 'np.random.randint', (['(101)'], {'size': '(1024)'}), '(101, size=1024)\n', (7034, 7050), True, 'import numpy as np\n'), ((7060, 7082), 'timeit.default_timer', 'timeit.default_timer', ([], {}), '()\n', (7080, 7082), False, 'import timeit\n'), ((7112, 7134), 'timeit.default_timer', 'timeit.default_timer', ([], {}), '()\n', (7132, 7134), False, 'import timeit\n'), ((7158, 7178), 'numpy.append', 'np.append', (['c', 't_diff'], {}), '(c, t_diff)\n', (7167, 7178), True, 'import numpy as np\n'), ((7278, 7311), 'numpy.random.randint', 'np.random.randint', (['(101)'], {'size': '(4096)'}), '(101, size=4096)\n', (7295, 7311), True, 'import numpy as np\n'), ((7321, 7343), 'timeit.default_timer', 'timeit.default_timer', ([], {}), '()\n', (7341, 7343), False, 'import timeit\n'), ((7373, 7395), 'timeit.default_timer', 'timeit.default_timer', ([], {}), '()\n', (7393, 7395), False, 'import timeit\n'), ((7419, 7439), 'numpy.append', 'np.append', (['d', 't_diff'], {}), '(d, t_diff)\n', (7428, 7439), True, 'import numpy as np\n'), ((7540, 7574), 'numpy.random.randint', 'np.random.randint', (['(101)'], {'size': '(16384)'}), '(101, size=16384)\n', (7557, 7574), True, 'import numpy as np\n'), ((7584, 7606), 'timeit.default_timer', 'timeit.default_timer', ([], {}), '()\n', (7604, 7606), False, 'import timeit\n'), ((7636, 7658), 'timeit.default_timer', 'timeit.default_timer', ([], {}), '()\n', (7656, 7658), False, 'import timeit\n'), ((7682, 7702), 'numpy.append', 'np.append', (['e', 't_diff'], {}), '(e, t_diff)\n', (7691, 7702), True, 'import numpy as np\n'), ((568, 593), 'random.randint', 'random.randint', (['low', 'high'], {}), '(low, high)\n', (582, 593), False, 'import random\n')]
|
from .Const import Const
import requests
from bs4 import BeautifulSoup
from selenium import webdriver
from selenium.webdriver.chrome.service import Service
from selenium.webdriver.support import expected_conditions as ec
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.common.by import By
from selenium.common.exceptions import TimeoutException
class Shop():
CRAWLER_DRIVER_PATH = Const.CRAWLER_DRIVER_PATH
reqUserAgent = Const.UserAgent
products = []
seleniumWebDriver = None
def __init__(self):
options = webdriver.ChromeOptions()
options.add_argument('--headless')
options.add_argument('--no-proxy-server')
options.add_argument("--window-position=-700,0")
options.add_argument("--window-size=576,1024")
options.add_argument('--blink-settings=imagesEnabled=false')
# options.add_argument('user-data-dir=' + chrome_profile_path)
self.seleniumWebDriver = webdriver.Chrome(service=Service(Const.CRAWLER_DRIVER_PATH), options=options)
return
def getDomain(self) -> str:
raise NotImplementedError('Override this function in derived class!')
def handlePyResponse(self, resp) -> bool:
raise NotImplementedError('Override this function in derived class!')
def handleSeleniumResponse(self, webdriver) -> bool:
raise NotImplementedError('Override this function in derived class!')
def getProducts(self, url):
print("▶ Working on: 🌎 %s" % url)
self.products = []
if resp := self.execPyRequest(url): # first, try with py requests lib
if self.handlePyResponse(resp):
return self.products # return products set by the derived classes
if resp := self.execSeleniumRequest(url): # try with Selenium
if self.handleSeleniumResponse(self.seleniumWebDriver):
return self.products # return products set by the derived classes
return self.products
def execPyRequest(self, url) -> str:
print("▶ Trying with PyRequestsLib...", end=" ")
try:
response = requests.get(url, headers = Const.UserAgent)
print("HTTP %s" % (response.status_code))
if response.status_code != 200:
return None
with open('debug-last-resp.log', 'a') as debug_file: ### DEBUG
debug_file.write(response.text) ### DEBUG
return response.text
except Exception as e:
print("☠ Got a problem: %s" % e)
return None
def execSeleniumRequest(self, url) -> str:
print("▶ Trying with Selenium..." , end=" ")
try:
self.seleniumWebDriver.get(url)
with open('debug-last-resp.log', 'a') as debug_file: ### DEBUG
debug_file.write(self.seleniumWebDriver.page_source) ### DEBUG
return self.seleniumWebDriver.page_source ###not used currently
except Exception as e:
print("☠ Got a problem: %s" % e)
return None
def close(self):
self.seleniumWebDriver.close()
self.seleniumWebDriver.quit()
def seleniumGetSourceByWaitForClass(self, expected_elem, delay=5):
soup = BeautifulSoup(self.seleniumWebDriver.page_source, features="html.parser")
if soup.findAll(attrs={'class': expected_elem}).__len__() == 0:
try: # page possibly needs some time to load up - so wait little bit
element_present = ec.presence_of_element_located((By.CLASS_NAME, expected_elem))
print("Waiting for element \"%s\"..." % expected_elem)
WebDriverWait(self.seleniumWebDriver, delay).until(element_present)
except TimeoutException:
print("Expected element \"%s\" not found!" % expected_elem)
return None
return self.seleniumWebDriver.page_source
def getChipName(self, name):
chip = "N/A"
if name.__contains__("3060"):
if name.__contains__("Ti ") or name.__contains__("TI "):
chip = Const.CHIP_NAME_3060Ti
else:
chip = Const.CHIP_NAME_3060
elif name.__contains__("3070"):
if name.__contains__("Ti ") or name.__contains__("TI "):
chip = Const.CHIP_NAME_3070Ti
else:
chip = Const.CHIP_NAME_3070
elif name.__contains__("3080"):
if name.__contains__("Ti ") or name.__contains__("TI "):
chip = Const.CHIP_NAME_3080Ti
else:
chip = Const.CHIP_NAME_3080
elif name.__contains__("3090"):
if name.__contains__("Ti ") or name.__contains__("TI "):
chip = Const.CHIP_NAME_3090Ti
else:
chip = Const.CHIP_NAME_3090
elif name.__contains__("6700"):
if name.__contains__("XT "):
chip = Const.CHIP_NAME_6700XT
else:
chip = Const.CHIP_NAME_6700
elif name.__contains__("6800"):
if name.__contains__("XT "):
chip = Const.CHIP_NAME_6800XT
else:
chip = Const.CHIP_NAME_6800
elif name.__contains__("6900"):
if name.__contains__("XT "):
chip = Const.CHIP_NAME_6900XT
else:
chip = Const.CHIP_NAME_6900
return chip
|
[
"selenium.webdriver.chrome.service.Service",
"selenium.webdriver.support.expected_conditions.presence_of_element_located",
"selenium.webdriver.ChromeOptions",
"requests.get",
"bs4.BeautifulSoup",
"selenium.webdriver.support.ui.WebDriverWait"
] |
[((571, 596), 'selenium.webdriver.ChromeOptions', 'webdriver.ChromeOptions', ([], {}), '()\n', (594, 596), False, 'from selenium import webdriver\n'), ((3251, 3324), 'bs4.BeautifulSoup', 'BeautifulSoup', (['self.seleniumWebDriver.page_source'], {'features': '"""html.parser"""'}), "(self.seleniumWebDriver.page_source, features='html.parser')\n", (3264, 3324), False, 'from bs4 import BeautifulSoup\n'), ((2140, 2182), 'requests.get', 'requests.get', (['url'], {'headers': 'Const.UserAgent'}), '(url, headers=Const.UserAgent)\n', (2152, 2182), False, 'import requests\n'), ((1000, 1034), 'selenium.webdriver.chrome.service.Service', 'Service', (['Const.CRAWLER_DRIVER_PATH'], {}), '(Const.CRAWLER_DRIVER_PATH)\n', (1007, 1034), False, 'from selenium.webdriver.chrome.service import Service\n'), ((3512, 3574), 'selenium.webdriver.support.expected_conditions.presence_of_element_located', 'ec.presence_of_element_located', (['(By.CLASS_NAME, expected_elem)'], {}), '((By.CLASS_NAME, expected_elem))\n', (3542, 3574), True, 'from selenium.webdriver.support import expected_conditions as ec\n'), ((3662, 3706), 'selenium.webdriver.support.ui.WebDriverWait', 'WebDriverWait', (['self.seleniumWebDriver', 'delay'], {}), '(self.seleniumWebDriver, delay)\n', (3675, 3706), False, 'from selenium.webdriver.support.ui import WebDriverWait\n')]
|
# -*- coding: utf-8 -*-
import unittest
from openprocurement.auctions.tessel.tests.base import BaseTesselAuctionWebTest
from openprocurement.auctions.core.tests.base import snitch
from openprocurement.auctions.core.tests.document import (
AuctionDocumentResourceTestMixin,
AuctionDocumentWithDSResourceTestMixin
)
from openprocurement.auctions.core.tests.blanks.document_blanks import (
# TesselAuctionDocumentWithDSResourceTest
create_auction_document_vdr,
put_auction_document_vdr,
)
from openprocurement.auctions.tessel.tests.blanks.document_blanks import (
patch_auction_document
)
class TesselAuctionDocumentResourceTest(BaseTesselAuctionWebTest, AuctionDocumentResourceTestMixin):
docservice = False
test_patch_auction_document = snitch(patch_auction_document)
class TesselAuctionDocumentWithDSResourceTest(TesselAuctionDocumentResourceTest, AuctionDocumentWithDSResourceTestMixin):
docservice = True
test_patch_auction_document = snitch(patch_auction_document)
test_create_auction_document_pas = None
test_put_auction_document_pas = None
def suite():
tests = unittest.TestSuite()
tests.addTest(unittest.makeSuite(TesselAuctionDocumentResourceTest))
tests.addTest(unittest.makeSuite(TesselAuctionDocumentWithDSResourceTest))
return tests
if __name__ == '__main__':
unittest.main(defaultTest='suite')
|
[
"unittest.main",
"openprocurement.auctions.core.tests.base.snitch",
"unittest.makeSuite",
"unittest.TestSuite"
] |
[((771, 801), 'openprocurement.auctions.core.tests.base.snitch', 'snitch', (['patch_auction_document'], {}), '(patch_auction_document)\n', (777, 801), False, 'from openprocurement.auctions.core.tests.base import snitch\n'), ((983, 1013), 'openprocurement.auctions.core.tests.base.snitch', 'snitch', (['patch_auction_document'], {}), '(patch_auction_document)\n', (989, 1013), False, 'from openprocurement.auctions.core.tests.base import snitch\n'), ((1127, 1147), 'unittest.TestSuite', 'unittest.TestSuite', ([], {}), '()\n', (1145, 1147), False, 'import unittest\n'), ((1350, 1384), 'unittest.main', 'unittest.main', ([], {'defaultTest': '"""suite"""'}), "(defaultTest='suite')\n", (1363, 1384), False, 'import unittest\n'), ((1166, 1219), 'unittest.makeSuite', 'unittest.makeSuite', (['TesselAuctionDocumentResourceTest'], {}), '(TesselAuctionDocumentResourceTest)\n', (1184, 1219), False, 'import unittest\n'), ((1239, 1298), 'unittest.makeSuite', 'unittest.makeSuite', (['TesselAuctionDocumentWithDSResourceTest'], {}), '(TesselAuctionDocumentWithDSResourceTest)\n', (1257, 1298), False, 'import unittest\n')]
|
# This file is part of astro_metadata_translator.
#
# Developed for the LSST Data Management System.
# This product includes software developed by the LSST Project
# (http://www.lsst.org).
# See the LICENSE file at the top-level directory of this distribution
# for details of code ownership.
#
# Use of this source code is governed by a 3-clause BSD-style
# license that can be found in the LICENSE file.
"""Metadata translation code for CFHT MegaPrime FITS headers"""
__all__ = ("MegaPrimeTranslator", )
import re
import posixpath
from astropy.io import fits
from astropy.coordinates import EarthLocation, Angle
import astropy.units as u
from ..translator import cache_translation, CORRECTIONS_RESOURCE_ROOT
from .fits import FitsTranslator
from .helpers import tracking_from_degree_headers, altaz_from_degree_headers
class MegaPrimeTranslator(FitsTranslator):
"""Metadata translator for CFHT MegaPrime standard headers.
"""
name = "MegaPrime"
"""Name of this translation class"""
supported_instrument = "MegaPrime"
"""Supports the MegaPrime instrument."""
default_resource_root = posixpath.join(CORRECTIONS_RESOURCE_ROOT, "CFHT")
"""Default resource path root to use to locate header correction files."""
# CFHT Megacam has no rotator, and the instrument angle on sky is set to
# +Y=N, +X=W which we define as a 0 degree rotation.
_const_map = {"boresight_rotation_angle": Angle(0*u.deg),
"boresight_rotation_coord": "sky",
"detector_group": None}
_trivial_map = {"physical_filter": "FILTER",
"dark_time": ("DARKTIME", dict(unit=u.s)),
"exposure_time": ("EXPTIME", dict(unit=u.s)),
"observation_id": "OBSID",
"object": "OBJECT",
"science_program": "RUNID",
"exposure_id": "EXPNUM",
"visit_id": "EXPNUM",
"detector_serial": "CCDNAME",
"relative_humidity": ["RELHUMID", "HUMIDITY"],
"temperature": (["TEMPERAT", "AIRTEMP"], dict(unit=u.deg_C)),
"boresight_airmass": ["AIRMASS", "BORE-AIRMASS"]}
@cache_translation
def to_datetime_begin(self):
# Docstring will be inherited. Property defined in properties.py
# We know it is UTC
value = self._from_fits_date_string(self._header["DATE-OBS"],
time_str=self._header["UTC-OBS"], scale="utc")
self._used_these_cards("DATE-OBS", "UTC-OBS")
return value
@cache_translation
def to_datetime_end(self):
# Docstring will be inherited. Property defined in properties.py
# Older files are missing UTCEND
if self.is_key_ok("UTCEND"):
# We know it is UTC
value = self._from_fits_date_string(self._header["DATE-OBS"],
time_str=self._header["UTCEND"], scale="utc")
self._used_these_cards("DATE-OBS", "UTCEND")
else:
# Take a guess by adding on the exposure time
value = self.to_datetime_begin() + self.to_exposure_time()
return value
@cache_translation
def to_location(self):
"""Calculate the observatory location.
Returns
-------
location : `astropy.coordinates.EarthLocation`
An object representing the location of the telescope.
"""
# Height is not in some MegaPrime files. Use the value from
# EarthLocation.of_site("CFHT")
# Some data uses OBS-LONG, OBS-LAT, other data uses LONGITUD and
# LATITUDE
for long_key, lat_key in (("LONGITUD", "LATITUDE"), ("OBS-LONG", "OBS-LAT")):
if self.are_keys_ok([long_key, lat_key]):
value = EarthLocation.from_geodetic(self._header[long_key], self._header[lat_key], 4215.0)
self._used_these_cards(long_key, lat_key)
break
else:
value = EarthLocation.of_site("CFHT")
return value
@cache_translation
def to_detector_name(self):
# Docstring will be inherited. Property defined in properties.py
if self.is_key_ok("EXTNAME"):
name = self._header["EXTNAME"]
# Only valid name has form "ccdNN"
if re.match(r"ccd\d+$", name):
self._used_these_cards("EXTNAME")
return name
# Dummy value, intended for PHU (need something to get filename)
return "ccd99"
@cache_translation
def to_detector_num(self):
name = self.to_detector_name()
return int(name[3:])
@cache_translation
def to_observation_type(self):
"""Calculate the observation type.
Returns
-------
typ : `str`
Observation type. Normalized to standard set.
"""
obstype = self._header["OBSTYPE"].strip().lower()
self._used_these_cards("OBSTYPE")
if obstype == "object":
return "science"
return obstype
@cache_translation
def to_tracking_radec(self):
"""Calculate the tracking RA/Dec for this observation.
Currently will be `None` for geocentric apparent coordinates.
Additionally, can be `None` for non-science observations.
The method supports multiple versions of header defining tracking
coordinates.
Returns
-------
coords : `astropy.coordinates.SkyCoord`
The tracking coordinates.
"""
radecsys = ("RADECSYS", "OBJRADEC", "RADESYS")
radecpairs = (("RA_DEG", "DEC_DEG"), ("BORE-RA", "BORE-DEC"))
return tracking_from_degree_headers(self, radecsys, radecpairs)
@cache_translation
def to_altaz_begin(self):
# Docstring will be inherited. Property defined in properties.py
return altaz_from_degree_headers(self, (("TELALT", "TELAZ"), ("BORE-ALT", "BORE-AZ")),
self.to_datetime_begin())
@cache_translation
def to_detector_exposure_id(self):
# Docstring will be inherited. Property defined in properties.py
return self.to_exposure_id() * 36 + self.to_detector_num()
@cache_translation
def to_pressure(self):
# Docstring will be inherited. Property defined in properties.py
# Can be either AIRPRESS in Pa or PRESSURE in mbar
for key, unit in (("PRESSURE", u.hPa), ("AIRPRESS", u.Pa)):
if self.is_key_ok(key):
return self.quantity_from_card(key, unit)
else:
raise KeyError(f"{self._log_prefix}: Could not find pressure keywords in header")
@cache_translation
def to_observation_counter(self):
"""Return the lifetime exposure number.
Returns
-------
sequence : `int`
The observation counter.
"""
return self.to_exposure_id()
@classmethod
def determine_translatable_headers(cls, filename, primary=None):
"""Given a file return all the headers usable for metadata translation.
MegaPrime files are multi-extension FITS with a primary header and
each detector stored in a subsequent extension. MegaPrime uses
``INHERIT=F`` therefore the primary header will always be ignored
if given.
Parameters
----------
filename : `str`
Path to a file in a format understood by this translator.
primary : `dict`-like, optional
The primary header obtained by the caller. This is sometimes
already known, for example if a system is trying to bootstrap
without already knowing what data is in the file. Will be
ignored.
Yields
------
headers : iterator of `dict`-like
Each detector header in turn. The supplied header will never be
included.
Notes
-----
This translator class is specifically tailored to raw MegaPrime data
and is not designed to work with general FITS files. The normal
paradigm is for the caller to have read the first header and then
called `determine_translator()` on the result to work out which
translator class to then call to obtain the real headers to be used for
translation.
"""
# Since we want to scan many HDUs we use astropy directly to keep
# the file open rather than continually opening and closing it
# as we go to each HDU.
with fits.open(filename) as fits_file:
for hdu in fits_file:
# Astropy <=4.2 strips the EXTNAME header but some CFHT data
# have two EXTNAME headers and the CCD number is in the
# second one.
if hdu.name == "PRIMARY":
continue
if hdu.name.startswith("ccd"):
# It may only be some data files that are broken so
# handle the expected form.
yield hdu.header
continue
# Some test data at least has the EXTNAME as
# COMPRESSED_IMAGE but the EXTVER as the detector number.
if hdu.name == "COMPRESSED_IMAGE":
header = hdu.header
# Astropy strips EXTNAME so put it back for the translator
header["EXTNAME"] = f"ccd{hdu.ver:02d}"
yield header
|
[
"astropy.coordinates.EarthLocation.from_geodetic",
"re.match",
"posixpath.join",
"astropy.io.fits.open",
"astropy.coordinates.Angle",
"astropy.coordinates.EarthLocation.of_site"
] |
[((1121, 1170), 'posixpath.join', 'posixpath.join', (['CORRECTIONS_RESOURCE_ROOT', '"""CFHT"""'], {}), "(CORRECTIONS_RESOURCE_ROOT, 'CFHT')\n", (1135, 1170), False, 'import posixpath\n'), ((1431, 1447), 'astropy.coordinates.Angle', 'Angle', (['(0 * u.deg)'], {}), '(0 * u.deg)\n', (1436, 1447), False, 'from astropy.coordinates import EarthLocation, Angle\n'), ((4058, 4087), 'astropy.coordinates.EarthLocation.of_site', 'EarthLocation.of_site', (['"""CFHT"""'], {}), "('CFHT')\n", (4079, 4087), False, 'from astropy.coordinates import EarthLocation, Angle\n'), ((4381, 4407), 're.match', 're.match', (['"""ccd\\\\d+$"""', 'name'], {}), "('ccd\\\\d+$', name)\n", (4389, 4407), False, 'import re\n'), ((8613, 8632), 'astropy.io.fits.open', 'fits.open', (['filename'], {}), '(filename)\n', (8622, 8632), False, 'from astropy.io import fits\n'), ((3861, 3948), 'astropy.coordinates.EarthLocation.from_geodetic', 'EarthLocation.from_geodetic', (['self._header[long_key]', 'self._header[lat_key]', '(4215.0)'], {}), '(self._header[long_key], self._header[lat_key], \n 4215.0)\n', (3888, 3948), False, 'from astropy.coordinates import EarthLocation, Angle\n')]
|
import logging
from rich.logging import RichHandler
from rich.traceback import install
install(max_frames=1)
FORMAT = '%(message)s'
logging.basicConfig(
level='INFO',
format=FORMAT,
datefmt='[%X]',
handlers=[RichHandler(rich_tracebacks=True)]
)
log = logging.getLogger('rich')
|
[
"rich.traceback.install",
"rich.logging.RichHandler",
"logging.getLogger"
] |
[((89, 110), 'rich.traceback.install', 'install', ([], {'max_frames': '(1)'}), '(max_frames=1)\n', (96, 110), False, 'from rich.traceback import install\n'), ((271, 296), 'logging.getLogger', 'logging.getLogger', (['"""rich"""'], {}), "('rich')\n", (288, 296), False, 'import logging\n'), ((227, 260), 'rich.logging.RichHandler', 'RichHandler', ([], {'rich_tracebacks': '(True)'}), '(rich_tracebacks=True)\n', (238, 260), False, 'from rich.logging import RichHandler\n')]
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from django.conf.urls import url
from . import views
urlpatterns = [
url(
regex=r'^$',
view=views.program_list,
name='program_list'
),
url(
regex=r'(?P<program_slug>[-\w]+)/$',
view=views.program_detail,
name='program_detail'
),
url(
regex=r'(?P<program_slug>[-\w]+)/entry/(?P<programentryid>\d+)/$',
view=views.program_entry,
name='program_entry'
),
url(
regex=r'(?P<program_slug>[-\w]+)/(?P<exercise_slug>[-\w]+)$',
view=views.exercise_detail,
name='exercise_detail'
),
url(
regex=r'(?P<program_slug>[-\w]+)/entry/(?P<programentryid>\d+)/(?P<exercise_slug>[-\w]+)/(?P<exerciseentryid>\d+)/$',
view=views.exercise_entry,
name='exercise_entry'
),
]
|
[
"django.conf.urls.url"
] |
[((157, 218), 'django.conf.urls.url', 'url', ([], {'regex': '"""^$"""', 'view': 'views.program_list', 'name': '"""program_list"""'}), "(regex='^$', view=views.program_list, name='program_list')\n", (160, 218), False, 'from django.conf.urls import url\n'), ((255, 350), 'django.conf.urls.url', 'url', ([], {'regex': '"""(?P<program_slug>[-\\\\w]+)/$"""', 'view': 'views.program_detail', 'name': '"""program_detail"""'}), "(regex='(?P<program_slug>[-\\\\w]+)/$', view=views.program_detail, name=\n 'program_detail')\n", (258, 350), False, 'from django.conf.urls import url\n'), ((381, 504), 'django.conf.urls.url', 'url', ([], {'regex': '"""(?P<program_slug>[-\\\\w]+)/entry/(?P<programentryid>\\\\d+)/$"""', 'view': 'views.program_entry', 'name': '"""program_entry"""'}), "(regex='(?P<program_slug>[-\\\\w]+)/entry/(?P<programentryid>\\\\d+)/$',\n view=views.program_entry, name='program_entry')\n", (384, 504), False, 'from django.conf.urls import url\n'), ((535, 658), 'django.conf.urls.url', 'url', ([], {'regex': '"""(?P<program_slug>[-\\\\w]+)/(?P<exercise_slug>[-\\\\w]+)$"""', 'view': 'views.exercise_detail', 'name': '"""exercise_detail"""'}), "(regex='(?P<program_slug>[-\\\\w]+)/(?P<exercise_slug>[-\\\\w]+)$', view=\n views.exercise_detail, name='exercise_detail')\n", (538, 658), False, 'from django.conf.urls import url\n'), ((688, 872), 'django.conf.urls.url', 'url', ([], {'regex': '"""(?P<program_slug>[-\\\\w]+)/entry/(?P<programentryid>\\\\d+)/(?P<exercise_slug>[-\\\\w]+)/(?P<exerciseentryid>\\\\d+)/$"""', 'view': 'views.exercise_entry', 'name': '"""exercise_entry"""'}), "(regex=\n '(?P<program_slug>[-\\\\w]+)/entry/(?P<programentryid>\\\\d+)/(?P<exercise_slug>[-\\\\w]+)/(?P<exerciseentryid>\\\\d+)/$'\n , view=views.exercise_entry, name='exercise_entry')\n", (691, 872), False, 'from django.conf.urls import url\n')]
|
"""Custom TestCase and helpers for connectmessages tests."""
# -*- coding: utf-8 -*-
from django.core.urlresolvers import reverse
from django.contrib.auth import get_user_model
from django.contrib.messages.storage.fallback import FallbackStorage
from django.test import RequestFactory
from django.utils.timezone import now
from model_mommy import mommy
from open_connect.connect_core.utils.basetests import ConnectTestCase
from open_connect.groups.models import Group
from open_connect.connectmessages.models import Message, Thread, UserThread
USER_MODEL = get_user_model()
MESSAGE_TEXT = (
'This has been a test. This has been a test.'
' This has been a test. This has been a test.'
' This has been a test. This has been a test.'
' This has been a test. This has been a test.'
)
THREAD_SUBJECT = 'Test message'
class ConnectMessageTestCase(ConnectTestCase):
"""Helper TestCase for connectmessages app."""
# pylint: disable=invalid-name
@classmethod
def setUpClass(cls):
"""Setup the TestCase class"""
super(ConnectMessageTestCase, cls).setUpClass()
cls.group1 = mommy.make(
Group, tos_accepted_at=now())
cls.group2 = mommy.make(Group)
cls.superuser.add_to_group(cls.group1.pk)
cls.superuser.add_to_group(cls.group2.pk)
cls.normal_user.add_to_group(cls.group1.pk)
cls.staff_user.add_to_group(cls.group1.pk)
cls.thread1 = mommy.make(
Thread, group=cls.group1, subject=THREAD_SUBJECT)
cls.message1 = mommy.make(
Message, thread=cls.thread1, sender=cls.superuser,
text=MESSAGE_TEXT, status='approved')
cls.message2 = mommy.make(
Message, thread=cls.thread1, sender=cls.normal_user,
text=MESSAGE_TEXT, status='approved')
cls.thread2 = mommy.make(
Thread, group=cls.group2, subject=THREAD_SUBJECT)
cls.message3 = mommy.make(
Message, thread=cls.thread2, sender=cls.superuser,
text=MESSAGE_TEXT, status='approved')
cls.directthread1 = mommy.make(
Thread, thread_type='direct', subject=THREAD_SUBJECT)
cls.directmessage1 = mommy.make(
Message,
thread=cls.directthread1,
sender=cls.user1,
text=MESSAGE_TEXT,
status='approved'
)
mommy.make(UserThread, user=cls.normal_user, thread=cls.directthread1)
mommy.make(UserThread, user=cls.staff_user, thread=cls.directthread1)
cls.request_factory = RequestFactory()
cls.request = cls.request_factory.get('/')
setattr(cls.request, 'session', 'session')
messages = FallbackStorage(cls.request)
setattr(cls.request, '_messages', messages)
cls.request.user = cls.superuser
cls._group = None
# pylint: disable=invalid-name
def setUp(self):
"""Setup the test"""
self.client.post(
reverse('account_login'),
{'login': '<EMAIL>', 'password': '<PASSWORD>'})
def message(self, **kwargs):
"""Create a new non-persistent Message."""
return mommy.prepare(
Message,
thread=kwargs.get('thread', self.thread1),
sender=kwargs.get('user', self.superuser),
text=kwargs.get('message', MESSAGE_TEXT),
status=kwargs.get('status', 'approved')
)
@property
def group(self):
"""Cache and return the test group."""
if not self._group:
self._group = mommy.make(
Group, group__name='Test group', published=True)
return self._group
# pylint: disable=invalid-name
def assertSuccess(self, response):
"""Helper method for asserting a response object was successful."""
self.assertEqual(response.status_code, 200)
|
[
"model_mommy.mommy.make",
"django.core.urlresolvers.reverse",
"django.test.RequestFactory",
"django.utils.timezone.now",
"django.contrib.auth.get_user_model",
"django.contrib.messages.storage.fallback.FallbackStorage"
] |
[((561, 577), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (575, 577), False, 'from django.contrib.auth import get_user_model\n'), ((1201, 1218), 'model_mommy.mommy.make', 'mommy.make', (['Group'], {}), '(Group)\n', (1211, 1218), False, 'from model_mommy import mommy\n'), ((1446, 1506), 'model_mommy.mommy.make', 'mommy.make', (['Thread'], {'group': 'cls.group1', 'subject': 'THREAD_SUBJECT'}), '(Thread, group=cls.group1, subject=THREAD_SUBJECT)\n', (1456, 1506), False, 'from model_mommy import mommy\n'), ((1543, 1647), 'model_mommy.mommy.make', 'mommy.make', (['Message'], {'thread': 'cls.thread1', 'sender': 'cls.superuser', 'text': 'MESSAGE_TEXT', 'status': '"""approved"""'}), "(Message, thread=cls.thread1, sender=cls.superuser, text=\n MESSAGE_TEXT, status='approved')\n", (1553, 1647), False, 'from model_mommy import mommy\n'), ((1691, 1797), 'model_mommy.mommy.make', 'mommy.make', (['Message'], {'thread': 'cls.thread1', 'sender': 'cls.normal_user', 'text': 'MESSAGE_TEXT', 'status': '"""approved"""'}), "(Message, thread=cls.thread1, sender=cls.normal_user, text=\n MESSAGE_TEXT, status='approved')\n", (1701, 1797), False, 'from model_mommy import mommy\n'), ((1841, 1901), 'model_mommy.mommy.make', 'mommy.make', (['Thread'], {'group': 'cls.group2', 'subject': 'THREAD_SUBJECT'}), '(Thread, group=cls.group2, subject=THREAD_SUBJECT)\n', (1851, 1901), False, 'from model_mommy import mommy\n'), ((1938, 2042), 'model_mommy.mommy.make', 'mommy.make', (['Message'], {'thread': 'cls.thread2', 'sender': 'cls.superuser', 'text': 'MESSAGE_TEXT', 'status': '"""approved"""'}), "(Message, thread=cls.thread2, sender=cls.superuser, text=\n MESSAGE_TEXT, status='approved')\n", (1948, 2042), False, 'from model_mommy import mommy\n'), ((2092, 2156), 'model_mommy.mommy.make', 'mommy.make', (['Thread'], {'thread_type': '"""direct"""', 'subject': 'THREAD_SUBJECT'}), "(Thread, thread_type='direct', subject=THREAD_SUBJECT)\n", (2102, 2156), False, 'from model_mommy import mommy\n'), ((2199, 2305), 'model_mommy.mommy.make', 'mommy.make', (['Message'], {'thread': 'cls.directthread1', 'sender': 'cls.user1', 'text': 'MESSAGE_TEXT', 'status': '"""approved"""'}), "(Message, thread=cls.directthread1, sender=cls.user1, text=\n MESSAGE_TEXT, status='approved')\n", (2209, 2305), False, 'from model_mommy import mommy\n'), ((2380, 2450), 'model_mommy.mommy.make', 'mommy.make', (['UserThread'], {'user': 'cls.normal_user', 'thread': 'cls.directthread1'}), '(UserThread, user=cls.normal_user, thread=cls.directthread1)\n', (2390, 2450), False, 'from model_mommy import mommy\n'), ((2459, 2528), 'model_mommy.mommy.make', 'mommy.make', (['UserThread'], {'user': 'cls.staff_user', 'thread': 'cls.directthread1'}), '(UserThread, user=cls.staff_user, thread=cls.directthread1)\n', (2469, 2528), False, 'from model_mommy import mommy\n'), ((2560, 2576), 'django.test.RequestFactory', 'RequestFactory', ([], {}), '()\n', (2574, 2576), False, 'from django.test import RequestFactory\n'), ((2698, 2726), 'django.contrib.messages.storage.fallback.FallbackStorage', 'FallbackStorage', (['cls.request'], {}), '(cls.request)\n', (2713, 2726), False, 'from django.contrib.messages.storage.fallback import FallbackStorage\n'), ((2970, 2994), 'django.core.urlresolvers.reverse', 'reverse', (['"""account_login"""'], {}), "('account_login')\n", (2977, 2994), False, 'from django.core.urlresolvers import reverse\n'), ((3555, 3614), 'model_mommy.mommy.make', 'mommy.make', (['Group'], {'group__name': '"""Test group"""', 'published': '(True)'}), "(Group, group__name='Test group', published=True)\n", (3565, 3614), False, 'from model_mommy import mommy\n'), ((1173, 1178), 'django.utils.timezone.now', 'now', ([], {}), '()\n', (1176, 1178), False, 'from django.utils.timezone import now\n')]
|
from django.urls import include, path
from videos.views import manage_videos, manage_videos_search
app_name = 'videos'
urlpatterns = [
path('videos', manage_videos, name='manage_videos'),
path('videos/search', manage_videos_search, name='manage_videos_search')
]
|
[
"django.urls.path"
] |
[((142, 193), 'django.urls.path', 'path', (['"""videos"""', 'manage_videos'], {'name': '"""manage_videos"""'}), "('videos', manage_videos, name='manage_videos')\n", (146, 193), False, 'from django.urls import include, path\n'), ((199, 271), 'django.urls.path', 'path', (['"""videos/search"""', 'manage_videos_search'], {'name': '"""manage_videos_search"""'}), "('videos/search', manage_videos_search, name='manage_videos_search')\n", (203, 271), False, 'from django.urls import include, path\n')]
|
# coding=utf-8
from __future__ import absolute_import
import logging
def init_logging(debug):
logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s')
if debug:
logging.getLogger().setLevel(logging.DEBUG)
else:
logging.getLogger('eodatasets').setLevel(logging.INFO)
|
[
"logging.getLogger",
"logging.basicConfig"
] |
[((101, 168), 'logging.basicConfig', 'logging.basicConfig', ([], {'format': '"""%(asctime)s %(levelname)s %(message)s"""'}), "(format='%(asctime)s %(levelname)s %(message)s')\n", (120, 168), False, 'import logging\n'), ((191, 210), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (208, 210), False, 'import logging\n'), ((253, 284), 'logging.getLogger', 'logging.getLogger', (['"""eodatasets"""'], {}), "('eodatasets')\n", (270, 284), False, 'import logging\n')]
|
from typing import Iterable, Sized, Collection, Callable, Tuple
from typing import Union, Optional, overload
from labml.internal.monitor import monitor_singleton as _internal
def clear():
_internal().clear()
def func(name, *,
is_silent: bool = False,
is_timed: bool = True,
is_partial: bool = False,
is_new_line: bool = True,
is_children_silent: bool = False,
total_steps: float = 1.0):
def decorator_func(f: Callable):
def wrapper(*args, **kwargs):
with section(name,
is_silent=is_silent,
is_timed=is_timed,
is_partial=is_partial,
is_new_line=is_new_line,
is_children_silent=is_children_silent,
total_steps=total_steps):
return f(*args, **kwargs)
return wrapper
return decorator_func
def iterate(name, iterable: Union[Iterable, Sized, int],
total_steps: Optional[int] = None, *,
is_silent: bool = False,
is_children_silent: bool = False,
is_timed: bool = True,
context=None):
return _internal().iterate(name, iterable, total_steps,
is_silent=is_silent,
is_children_silent=is_children_silent,
is_timed=is_timed,
section=context)
def enum(name, iterable: Sized, *,
is_silent: bool = False,
is_children_silent: bool = False,
is_timed: bool = True,
context=None):
return _internal().enum(name, iterable,
is_silent=is_silent,
is_children_silent=is_children_silent,
is_timed=is_timed,
section=context)
def section(name, *,
is_silent: bool = False,
is_timed: bool = True,
is_partial: bool = False,
is_new_line: bool = True,
is_children_silent: bool = False,
total_steps: float = 1.0):
return _internal().section(name, is_silent=is_silent,
is_timed=is_timed,
is_partial=is_partial,
total_steps=total_steps,
is_new_line=is_new_line,
is_children_silent=is_children_silent)
def progress(steps: float):
_internal().progress(steps)
def fail():
_internal().set_successful(False)
@overload
def loop(iterator_: int, *,
is_track: bool = True,
is_print_iteration_time: bool = True):
...
@overload
def loop(iterator_: range, *,
is_track: bool = True,
is_print_iteration_time: bool = True):
...
@overload
def loop(iterator_: Collection, *,
is_track: bool = True,
is_print_iteration_time: bool = True):
...
def loop(iterator_: Union[Collection, range, int], *,
is_track: bool = True,
is_print_iteration_time: bool = True):
"""
This has multiple overloads
.. function:: loop(iterator_: range, *, is_track=True, is_print_iteration_time=True)
:noindex:
.. function:: loop(iterator_: int, *, is_track=True, is_print_iteration_time=True)
:noindex:
"""
if type(iterator_) == int:
return _internal().loop(range(iterator_),
is_track=is_track,
is_print_iteration_time=is_print_iteration_time)
else:
return _internal().loop(iterator_,
is_track=is_track,
is_print_iteration_time=is_print_iteration_time)
def mix(total_iterations, *iterators: Tuple[str, Sized],
is_monit: bool = True):
"""
Mix a set of iterators
"""
return _internal().mix(total_iterations, list(iterators), is_monit=is_monit)
def finish_loop():
_internal().finish_loop()
|
[
"labml.internal.monitor.monitor_singleton"
] |
[((195, 206), 'labml.internal.monitor.monitor_singleton', '_internal', ([], {}), '()\n', (204, 206), True, 'from labml.internal.monitor import monitor_singleton as _internal\n'), ((1217, 1228), 'labml.internal.monitor.monitor_singleton', '_internal', ([], {}), '()\n', (1226, 1228), True, 'from labml.internal.monitor import monitor_singleton as _internal\n'), ((1667, 1678), 'labml.internal.monitor.monitor_singleton', '_internal', ([], {}), '()\n', (1676, 1678), True, 'from labml.internal.monitor import monitor_singleton as _internal\n'), ((2175, 2186), 'labml.internal.monitor.monitor_singleton', '_internal', ([], {}), '()\n', (2184, 2186), True, 'from labml.internal.monitor import monitor_singleton as _internal\n'), ((2542, 2553), 'labml.internal.monitor.monitor_singleton', '_internal', ([], {}), '()\n', (2551, 2553), True, 'from labml.internal.monitor import monitor_singleton as _internal\n'), ((2588, 2599), 'labml.internal.monitor.monitor_singleton', '_internal', ([], {}), '()\n', (2597, 2599), True, 'from labml.internal.monitor import monitor_singleton as _internal\n'), ((3981, 3992), 'labml.internal.monitor.monitor_singleton', '_internal', ([], {}), '()\n', (3990, 3992), True, 'from labml.internal.monitor import monitor_singleton as _internal\n'), ((4076, 4087), 'labml.internal.monitor.monitor_singleton', '_internal', ([], {}), '()\n', (4085, 4087), True, 'from labml.internal.monitor import monitor_singleton as _internal\n'), ((3484, 3495), 'labml.internal.monitor.monitor_singleton', '_internal', ([], {}), '()\n', (3493, 3495), True, 'from labml.internal.monitor import monitor_singleton as _internal\n'), ((3676, 3687), 'labml.internal.monitor.monitor_singleton', '_internal', ([], {}), '()\n', (3685, 3687), True, 'from labml.internal.monitor import monitor_singleton as _internal\n')]
|
import sys
from django.shortcuts import render, get_object_or_404
from bleet.models import Bleet
from users.models import Follow, Profile
from django.contrib.auth.models import User
from django.views.generic import (
ListView,
DetailView,
CreateView,
UpdateView,
DeleteView,
)
from django.contrib.auth.mixins import LoginRequiredMixin, UserPassesTestMixin
from django.db.models import Count
from rest_framework.viewsets import ModelViewSet
from .serializers import BleetSerializer
def is_author(post, request):
"""
Returns a boolean indication if the post is authored by the logged in
user who made the request.
"""
return post.author == request.user
#: Number of bleets to show per page.
PAGINATION_COUNT = 10
class BleetListView(LoginRequiredMixin, ListView):
"""
Displays a list of bleets
"""
model = Bleet
template_name = "bleet/home.html"
context_object_name = "bleets"
ordering = ["-date_posted"]
paginate_by = PAGINATION_COUNT
def get_queryset(self):
user = self.request.user
qs = Follow.objects.filter(user=user)
follows = [user]
for obj in qs:
follows.append(obj.follow_user)
return Bleet.objects.filter(author__in=follows).order_by("-date_posted")
class UserBleetListView(LoginRequiredMixin, ListView):
model = Bleet
template_name = "bleet/user_posts.html"
context_object_name = "bleets"
paginate_by = PAGINATION_COUNT
def visible_user(self):
return get_object_or_404(User, username=self.kwargs.get("username"))
def get_context_data(self, **kwargs):
visible_user = self.visible_user()
logged_user = self.request.user
if logged_user.username == "" or logged_user is None:
can_follow = False
else:
can_follow = (
Follow.objects.filter(
user=logged_user, follow_user=visible_user
).count()
== 0
)
data = super().get_context_data(**kwargs)
data["user_profile"] = visible_user
data["can_follow"] = can_follow
return data
def get_queryset(self):
user = self.visible_user()
return Bleet.objects.filter(author=user).order_by("-date_posted")
def post(self, request, *args, **kwargs):
if request.user.id is not None:
follows = Follow.objects.filter(
user=request.user, follow_user=self.visible_user()
)
if "follow" in request.POST:
new_relation = Follow(
user=request.user, follow_user=self.visible_user()
)
if follows.count() == 0:
new_relation.save()
elif "unfollow" in request.POST:
if follows.count() > 0:
follows.delete()
return self.get(self, request, *args, **kwargs)
class BleetDeleteView(LoginRequiredMixin, UserPassesTestMixin, DeleteView):
model = Bleet
template_name = "bleet/bleet_delete.html"
context_object_name = "bleet"
success_url = "/"
def test_func(self):
return is_author(self.get_object(), self.request)
class BleetCreateView(LoginRequiredMixin, CreateView):
model = Bleet
fields = ["content"]
template_name = "bleet/bleet_new.html"
success_url = "/"
def form_valid(self, form):
form.instance.author = self.request.user
return super().form_valid(form)
def get_context_data(self, **kwargs):
data = super().get_context_data(**kwargs)
data["tag_line"] = "Add a new bleet"
return data
class FollowsListView(ListView):
model = Follow
template_name = "bleet/follow.html"
context_object_name = "follows"
def visible_user(self):
return get_object_or_404(User, username=self.kwargs.get("username"))
def get_queryset(self):
user = self.visible_user()
return Follow.objects.filter(user=user).order_by("-date")
def get_context_data(self, *, object_list=None, **kwargs):
data = super().get_context_data(**kwargs)
data["follow"] = "follows"
return data
class FollowersListView(ListView):
model = Follow
template_name = "bleet/follow.html"
context_object_name = "follows"
def visible_user(self):
return get_object_or_404(User, username=self.kwargs.get("username"))
def get_queryset(self):
user = self.visible_user()
return Follow.objects.filter(follow_user=user).order_by("-date")
def get_context_data(self, *, object_list=None, **kwargs):
data = super().get_context_data(**kwargs)
data["follow"] = "followers"
return data
class BleetViewSet(ModelViewSet):
queryset = Bleet.objects.all()
serializer_class = BleetSerializer
|
[
"bleet.models.Bleet.objects.all",
"users.models.Follow.objects.filter",
"bleet.models.Bleet.objects.filter"
] |
[((4809, 4828), 'bleet.models.Bleet.objects.all', 'Bleet.objects.all', ([], {}), '()\n', (4826, 4828), False, 'from bleet.models import Bleet\n'), ((1089, 1121), 'users.models.Follow.objects.filter', 'Follow.objects.filter', ([], {'user': 'user'}), '(user=user)\n', (1110, 1121), False, 'from users.models import Follow, Profile\n'), ((1229, 1269), 'bleet.models.Bleet.objects.filter', 'Bleet.objects.filter', ([], {'author__in': 'follows'}), '(author__in=follows)\n', (1249, 1269), False, 'from bleet.models import Bleet\n'), ((2248, 2281), 'bleet.models.Bleet.objects.filter', 'Bleet.objects.filter', ([], {'author': 'user'}), '(author=user)\n', (2268, 2281), False, 'from bleet.models import Bleet\n'), ((3991, 4023), 'users.models.Follow.objects.filter', 'Follow.objects.filter', ([], {'user': 'user'}), '(user=user)\n', (4012, 4023), False, 'from users.models import Follow, Profile\n'), ((4528, 4567), 'users.models.Follow.objects.filter', 'Follow.objects.filter', ([], {'follow_user': 'user'}), '(follow_user=user)\n', (4549, 4567), False, 'from users.models import Follow, Profile\n'), ((1867, 1932), 'users.models.Follow.objects.filter', 'Follow.objects.filter', ([], {'user': 'logged_user', 'follow_user': 'visible_user'}), '(user=logged_user, follow_user=visible_user)\n', (1888, 1932), False, 'from users.models import Follow, Profile\n')]
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'
import tensorflow as tf
import numpy as np
import PIL.Image as Image
import time
import cv2
from pyfirmata import Arduino
board = Arduino('/dev/ttyACM0')
little = board.get_pin('d:3:s')
ring = board.get_pin('d:5:s')
middle = board.get_pin('d:6:s')
thumb = board.get_pin('d:9:s')
index = board.get_pin('d:10:s')
def rock():
thumb.write(170)
index.write(170)
middle.write(170)
ring.write(170)
little.write(170)
def paper():
thumb.write(10)
index.write(10)
middle.write(10)
ring.write(10)
little.write(10)
def scissor():
thumb.write(170)
index.write(10)
middle.write(10)
ring.write(170)
little.write(170)
def rps(model_dir, classes):
clicked = False
def onMouse(event, x, y, flags, param):
global clicked
if event == cv2.EVENT_LBUTTONUP:
clicked = True
cameraCapture = cv2.VideoCapture(2)
cameraCapture.set(3, 100)
cameraCapture.set(4, 100)
cv2.namedWindow('MyWindow')
cv2.setMouseCallback('MyWindow', onMouse)
print('showing camera feed. Click window or press and key to stop.')
success, frame = cameraCapture.read()
print(success)
count = 0
flag = 0
saver = tf.train.import_meta_graph(model_dir+".meta")
with tf.Session() as sess:
saver.restore(sess, model_dir)
x = tf.get_default_graph().get_tensor_by_name("images:0")
keep_prob = tf.get_default_graph().get_tensor_by_name("keep_prob:0")
y = tf.get_default_graph().get_tensor_by_name("fc2/output:0")
count=0
while success and cv2.waitKey(1)==-1 and not clicked:
time1 = time.time()
cv2.imshow('MyWindow', frame)
success, frame = cameraCapture.read()
img = Image.fromarray(frame)
# 将图片转化成灰度并缩小尺寸
img = np.array(img.convert('L').resize((28, 28)),dtype=np.float32)
img = img.reshape((1,28*28))
img = img/255.0
prediction = sess.run(y, feed_dict={x:img,keep_prob: 1.0})
index = np.argmax(prediction)
probability = prediction[0][index]
if index==0 and flag!=0 and probability>0.8:
print('you paper, me scissor')
scissor()
flag=0
elif index==1 and flag!=1 and probability>0.8:
print('you rock, me paper')
paper()
flag = 1
elif index==2 and flag!=2 and probability>0.8:
print('you scissor, me rock')
rock()
flag = 2
elif index == 3 and flag != 3 and probability > 0.8:
# rotate(p, -30)
print('hey, show either rock, paper or scissor')
flag = 3
cv2.destroyWindow('MyWindow')
cameraCapture.release()
if __name__=="__main__":
classes = ['paper', 'rock', 'scissors', 'others']
model_dir="model/model.ckpt"
time.sleep(2)
rps(model_dir, classes)
|
[
"tensorflow.train.import_meta_graph",
"numpy.argmax",
"cv2.waitKey",
"tensorflow.Session",
"time.sleep",
"cv2.VideoCapture",
"time.time",
"pyfirmata.Arduino",
"cv2.setMouseCallback",
"cv2.destroyWindow",
"PIL.Image.fromarray",
"tensorflow.get_default_graph",
"cv2.imshow",
"cv2.namedWindow"
] |
[((334, 357), 'pyfirmata.Arduino', 'Arduino', (['"""/dev/ttyACM0"""'], {}), "('/dev/ttyACM0')\n", (341, 357), False, 'from pyfirmata import Arduino\n'), ((1122, 1141), 'cv2.VideoCapture', 'cv2.VideoCapture', (['(2)'], {}), '(2)\n', (1138, 1141), False, 'import cv2\n'), ((1206, 1233), 'cv2.namedWindow', 'cv2.namedWindow', (['"""MyWindow"""'], {}), "('MyWindow')\n", (1221, 1233), False, 'import cv2\n'), ((1244, 1285), 'cv2.setMouseCallback', 'cv2.setMouseCallback', (['"""MyWindow"""', 'onMouse'], {}), "('MyWindow', onMouse)\n", (1264, 1285), False, 'import cv2\n'), ((1507, 1554), 'tensorflow.train.import_meta_graph', 'tf.train.import_meta_graph', (["(model_dir + '.meta')"], {}), "(model_dir + '.meta')\n", (1533, 1554), True, 'import tensorflow as tf\n'), ((3346, 3359), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (3356, 3359), False, 'import time\n'), ((1566, 1578), 'tensorflow.Session', 'tf.Session', ([], {}), '()\n', (1576, 1578), True, 'import tensorflow as tf\n'), ((3162, 3191), 'cv2.destroyWindow', 'cv2.destroyWindow', (['"""MyWindow"""'], {}), "('MyWindow')\n", (3179, 3191), False, 'import cv2\n'), ((1966, 1977), 'time.time', 'time.time', ([], {}), '()\n', (1975, 1977), False, 'import time\n'), ((1994, 2023), 'cv2.imshow', 'cv2.imshow', (['"""MyWindow"""', 'frame'], {}), "('MyWindow', frame)\n", (2004, 2023), False, 'import cv2\n'), ((2102, 2124), 'PIL.Image.fromarray', 'Image.fromarray', (['frame'], {}), '(frame)\n', (2117, 2124), True, 'import PIL.Image as Image\n'), ((2417, 2438), 'numpy.argmax', 'np.argmax', (['prediction'], {}), '(prediction)\n', (2426, 2438), True, 'import numpy as np\n'), ((1647, 1669), 'tensorflow.get_default_graph', 'tf.get_default_graph', ([], {}), '()\n', (1667, 1669), True, 'import tensorflow as tf\n'), ((1725, 1747), 'tensorflow.get_default_graph', 'tf.get_default_graph', ([], {}), '()\n', (1745, 1747), True, 'import tensorflow as tf\n'), ((1798, 1820), 'tensorflow.get_default_graph', 'tf.get_default_graph', ([], {}), '()\n', (1818, 1820), True, 'import tensorflow as tf\n'), ((1906, 1920), 'cv2.waitKey', 'cv2.waitKey', (['(1)'], {}), '(1)\n', (1917, 1920), False, 'import cv2\n')]
|
from unittest import TestCase
from class_odd_and_prime_number import Number
class TestNumber(TestCase):
def test_number_init(self):
valid_number = Number(5)
self.assertEqual(valid_number.value, 5)
|
[
"class_odd_and_prime_number.Number"
] |
[((162, 171), 'class_odd_and_prime_number.Number', 'Number', (['(5)'], {}), '(5)\n', (168, 171), False, 'from class_odd_and_prime_number import Number\n')]
|
import setuptools
with open("README.md", "r", encoding="utf-8") as fh:
long_description = fh.read()
setuptools.setup(
name="shazamio",
version="0.0.5",
author="dotX12",
description="Is a FREE asynchronous library from reverse engineered Shazam API written in Python 3.6+ with asyncio and aiohttp. Includes all the methods that Shazam has, including searching for a song by file.",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/dotX12/ShazamIO",
install_requires=['aiohttp', 'pydub', 'numpy', 'aiofiles', 'dataclass-factory',],
packages=setuptools.find_packages(),
python_requires='>=3.6',
)
|
[
"setuptools.find_packages"
] |
[((638, 664), 'setuptools.find_packages', 'setuptools.find_packages', ([], {}), '()\n', (662, 664), False, 'import setuptools\n')]
|
#!/usr/bin/env python
# coding: utf-8
# Copyright 2020 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import argparse
import errno
import filecmp
import os
import plistlib
import shutil
import stat
import subprocess
import sys
import time
def _stat_or_none(path, root):
"""Calls os.stat or os.lstat to obtain information about a path.
This program traverses parallel directory trees, which may have subtle
differences such as directory entries that are present in fewer than all
trees. It also operates on symbolic links directly, instead of on their
targets.
Args:
path: The path to call os.stat or os.lstat on.
root: True if called on the root of a tree to be merged, False
otherwise. See the discussion below.
Returns:
The return value of os.stat or os.lstat, or possibly None if the path
does not exist.
When root is True, indicating that path is at the root of one of these
trees, this permissiveness is disabled, as all roots are required to be
present. If one is absent, an exception will be raised. When root is True,
os.stat will be used, as this is the one case when it is desirable to
operate on a symbolic link’s target.
When root is False, os.lstat will be used to operate on symbolic links
directly, and a missing path will cause None to be returned.
"""
if root:
return os.stat(path)
try:
return os.lstat(path)
except OSError as e:
if e.errno == errno.ENOENT:
return None
raise
def _file_type_for_stat(st):
"""Returns a string indicating the type of directory entry in st.
Args:
st: The return value of os.stat or os.lstat.
Returns:
'symbolic link', 'file', or 'directory'.
"""
if stat.S_ISLNK(st.st_mode):
return 'symbolic_link'
if stat.S_ISREG(st.st_mode):
return 'file'
if stat.S_ISDIR(st.st_mode):
return 'directory'
raise Exception('unknown file type for mode 0o%o' % mode)
def _sole_list_element(l, exception_message):
"""Assures that every element in a list is identical.
Args:
l: The list to consider.
exception_message: A message used to convey failure if every element in
l is not identical.
Returns:
The value of each identical element in the list.
"""
s = set(l)
if len(s) != 1:
raise Exception(exception_message)
return l[0]
def _read_plist(path):
"""Reads a macOS property list, API compatibility adapter."""
with open(path, 'rb') as file:
try:
# New API, available since Python 3.4.
return plistlib.load(file)
except AttributeError:
# Old API, available (but deprecated) until Python 3.9.
return plistlib.readPlist(file)
def _write_plist(value, path):
"""Writes a macOS property list, API compatibility adapter."""
with open(path, 'wb') as file:
try:
# New API, available since Python 3.4.
plistlib.dump(value, file)
except AttributeError:
# Old API, available (but deprecated) until Python 3.9.
plistlib.writePlist(value, file)
class CantMergeException(Exception):
"""Raised when differences exist between input files such that they cannot
be merged successfully.
"""
pass
def _merge_info_plists(input_paths, output_path):
"""Merges multiple macOS Info.plist files.
Args:
input_plists: A list of paths containing Info.plist files to be merged.
output_plist: The path of the merged Info.plist to create.
Raises:
CantMergeException if all input_paths could not successfully be merged
into output_path.
A small number of differences are tolerated in the input Info.plists. If a
key identifying the build environment (OS or toolchain) is different in any
of the inputs, it will be removed from the output. There are valid reasons
to produce builds for different architectures using different toolchains or
SDKs, and there is no way to rationalize these differences into a single
value.
If present, the Chrome KSChannelID family of keys are rationalized by using
“universal” to identify the architecture (compared to, for example,
“arm64”.)
"""
input_plists = [_read_plist(x) for x in input_paths]
output_plist = input_plists[0]
for index in range(1, len(input_plists)):
input_plist = input_plists[index]
for key in set(input_plist.keys()) | set(output_plist.keys()):
if input_plist.get(key, None) == output_plist.get(key, None):
continue
if key in ('BuildMachineOSBuild', 'DTCompiler', 'DTPlatformBuild',
'DTPlatformName', 'DTPlatformVersion', 'DTSDKBuild',
'DTSDKName', 'DTXcode', 'DTXcodeBuild'):
if key in input_plist:
del input_plist[key]
if key in output_plist:
del output_plist[key]
elif key == 'KSChannelID' or key.startswith('KSChannelID-'):
# These keys are Chrome-specific, where it’s only present in the
# outer browser .app’s Info.plist.
#
# Ensure that the values match the expected format as a
# prerequisite to what follows.
key_tail = key[len('KSChannelID'):]
input_value = input_plist.get(key, '')
output_value = output_plist.get(key, '')
assert input_value.endswith(key_tail)
assert output_value.endswith(key_tail)
# Find the longest common trailing sequence of hyphen-separated
# elements, and use that as the trailing sequence of the new
# value.
input_parts = reversed(input_value.split('-'))
output_parts = output_value.split('-')
output_parts.reverse()
new_parts = []
for input_part, output_part in zip(input_parts, output_parts):
if input_part == output_part:
new_parts.append(output_part)
else:
break
# Prepend “universal” to the entire value if it’s not already
# there.
if len(new_parts) == 0 or new_parts[-1] != 'universal':
new_parts.append('universal')
output_plist[key] = '-'.join(reversed(new_parts))
assert output_plist[key] != ''
else:
raise CantMergeException(input_paths[index], output_path)
_write_plist(output_plist, output_path)
def _universalize(input_paths, output_path, root):
"""Merges multiple trees into a “universal” tree.
This function provides the recursive internal implementation for
universalize.
Args:
input_paths: The input directory trees to be merged.
output_path: The merged tree to produce.
root: True if operating at the root of the input and output trees.
"""
input_stats = [_stat_or_none(x, root) for x in input_paths]
for index in range(len(input_paths) - 1, -1, -1):
if input_stats[index] is None:
del input_paths[index]
del input_stats[index]
input_types = [_file_type_for_stat(x) for x in input_stats]
type = _sole_list_element(
input_types,
'varying types %r for input paths %r' % (input_types, input_paths))
if type == 'file':
identical = True
for index in range(1, len(input_paths)):
if not filecmp.cmp(input_paths[0], input_paths[index]):
identical = False
if (os.path.basename(output_path) == 'Info.plist' or
os.path.basename(output_path).endswith('-Info.plist')):
_merge_info_plists(input_paths, output_path)
else:
command = ['lipo', '-create']
command.extend(input_paths)
command.extend(['-output', output_path])
subprocess.check_call(command)
if identical:
shutil.copyfile(input_paths[0], output_path)
elif type == 'directory':
os.mkdir(output_path)
entries = set()
for input in input_paths:
entries.update(os.listdir(input))
for entry in entries:
input_entry_paths = [os.path.join(x, entry) for x in input_paths]
output_entry_path = os.path.join(output_path, entry)
_universalize(input_entry_paths, output_entry_path, False)
elif type == 'symbolic_link':
targets = [os.readlink(x) for x in input_paths]
target = _sole_list_element(
targets, 'varying symbolic link targets %r for input paths %r' %
(targets, input_paths))
os.symlink(target, output_path)
input_permissions = [stat.S_IMODE(x.st_mode) for x in input_stats]
permission = _sole_list_element(
input_permissions, 'varying permissions %r for input paths %r' %
(['0o%o' % x for x in input_permissions], input_paths))
os.lchmod(output_path, permission)
if type != 'file' or identical:
input_mtimes = [x.st_mtime for x in input_stats]
if len(set(input_mtimes)) == 1:
times = (time.time(), input_mtimes[0])
try:
# follow_symlinks is only available since Python 3.3.
os.utime(output_path, times, follow_symlinks=False)
except TypeError:
# If it’s a symbolic link and this version of Python isn’t able
# to set its timestamp, just leave it alone.
if type != 'symbolic_link':
os.utime(output_path, times)
elif type == 'directory':
# Always touch directories, in case a directory is a bundle, as a
# cue to LaunchServices to invalidate anything it may have cached
# about the bundle as it was being built.
os.utime(output_path, None)
def universalize(input_paths, output_path):
"""Merges multiple trees into a “universal” tree.
Args:
input_paths: The input directory trees to be merged.
output_path: The merged tree to produce.
input_paths are expected to be parallel directory trees. Each directory
entry at a given subpath in the input_paths, if present, must be identical
to all others when present, with these exceptions:
- Mach-O files that are not identical are merged using lipo.
- Info.plist files that are not identical are merged by _merge_info_plists.
"""
rmtree_on_error = not os.path.exists(output_path)
try:
return _universalize(input_paths, output_path, True)
except:
if rmtree_on_error and os.path.exists(output_path):
shutil.rmtree(output_path)
raise
def main(args):
parser = argparse.ArgumentParser(
description='Merge multiple single-architecture directory trees into a '
'single universal tree.')
parser.add_argument(
'inputs',
nargs='+',
metavar='input',
help='An input directory tree to be merged. At least two inputs must '
'be provided.')
parser.add_argument('output', help='The merged directory tree to produce.')
parsed = parser.parse_args(args)
if len(parsed.inputs) < 2:
raise Exception('too few inputs')
universalize(parsed.inputs, parsed.output)
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
|
[
"os.mkdir",
"argparse.ArgumentParser",
"os.lchmod",
"plistlib.dump",
"shutil.rmtree",
"filecmp.cmp",
"os.path.join",
"os.utime",
"subprocess.check_call",
"os.path.exists",
"shutil.copyfile",
"stat.S_ISDIR",
"stat.S_ISLNK",
"stat.S_ISREG",
"os.stat",
"os.path.basename",
"os.listdir",
"plistlib.load",
"os.readlink",
"plistlib.readPlist",
"time.time",
"stat.S_IMODE",
"os.symlink",
"plistlib.writePlist",
"os.lstat"
] |
[((1904, 1928), 'stat.S_ISLNK', 'stat.S_ISLNK', (['st.st_mode'], {}), '(st.st_mode)\n', (1916, 1928), False, 'import stat\n'), ((1968, 1992), 'stat.S_ISREG', 'stat.S_ISREG', (['st.st_mode'], {}), '(st.st_mode)\n', (1980, 1992), False, 'import stat\n'), ((2023, 2047), 'stat.S_ISDIR', 'stat.S_ISDIR', (['st.st_mode'], {}), '(st.st_mode)\n', (2035, 2047), False, 'import stat\n'), ((9368, 9402), 'os.lchmod', 'os.lchmod', (['output_path', 'permission'], {}), '(output_path, permission)\n', (9377, 9402), False, 'import os\n'), ((11158, 11287), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Merge multiple single-architecture directory trees into a single universal tree."""'}), "(description=\n 'Merge multiple single-architecture directory trees into a single universal tree.'\n )\n", (11181, 11287), False, 'import argparse\n'), ((1508, 1521), 'os.stat', 'os.stat', (['path'], {}), '(path)\n', (1515, 1521), False, 'import os\n'), ((1547, 1561), 'os.lstat', 'os.lstat', (['path'], {}), '(path)\n', (1555, 1561), False, 'import os\n'), ((9143, 9166), 'stat.S_IMODE', 'stat.S_IMODE', (['x.st_mode'], {}), '(x.st_mode)\n', (9155, 9166), False, 'import stat\n'), ((10904, 10931), 'os.path.exists', 'os.path.exists', (['output_path'], {}), '(output_path)\n', (10918, 10931), False, 'import os\n'), ((2784, 2803), 'plistlib.load', 'plistlib.load', (['file'], {}), '(file)\n', (2797, 2803), False, 'import plistlib\n'), ((3158, 3184), 'plistlib.dump', 'plistlib.dump', (['value', 'file'], {}), '(value, file)\n', (3171, 3184), False, 'import plistlib\n'), ((8382, 8426), 'shutil.copyfile', 'shutil.copyfile', (['input_paths[0]', 'output_path'], {}), '(input_paths[0], output_path)\n', (8397, 8426), False, 'import shutil\n'), ((8465, 8486), 'os.mkdir', 'os.mkdir', (['output_path'], {}), '(output_path)\n', (8473, 8486), False, 'import os\n'), ((2922, 2946), 'plistlib.readPlist', 'plistlib.readPlist', (['file'], {}), '(file)\n', (2940, 2946), False, 'import plistlib\n'), ((3296, 3328), 'plistlib.writePlist', 'plistlib.writePlist', (['value', 'file'], {}), '(value, file)\n', (3315, 3328), False, 'import plistlib\n'), ((7818, 7865), 'filecmp.cmp', 'filecmp.cmp', (['input_paths[0]', 'input_paths[index]'], {}), '(input_paths[0], input_paths[index])\n', (7829, 7865), False, 'import filecmp\n'), ((8733, 8765), 'os.path.join', 'os.path.join', (['output_path', 'entry'], {}), '(output_path, entry)\n', (8745, 8765), False, 'import os\n'), ((9085, 9116), 'os.symlink', 'os.symlink', (['target', 'output_path'], {}), '(target, output_path)\n', (9095, 9116), False, 'import os\n'), ((9558, 9569), 'time.time', 'time.time', ([], {}), '()\n', (9567, 9569), False, 'import time\n'), ((9691, 9742), 'os.utime', 'os.utime', (['output_path', 'times'], {'follow_symlinks': '(False)'}), '(output_path, times, follow_symlinks=False)\n', (9699, 9742), False, 'import os\n'), ((10263, 10290), 'os.utime', 'os.utime', (['output_path', 'None'], {}), '(output_path, None)\n', (10271, 10290), False, 'import os\n'), ((11045, 11072), 'os.path.exists', 'os.path.exists', (['output_path'], {}), '(output_path)\n', (11059, 11072), False, 'import os\n'), ((11086, 11112), 'shutil.rmtree', 'shutil.rmtree', (['output_path'], {}), '(output_path)\n', (11099, 11112), False, 'import shutil\n'), ((8316, 8346), 'subprocess.check_call', 'subprocess.check_call', (['command'], {}), '(command)\n', (8337, 8346), False, 'import subprocess\n'), ((8573, 8590), 'os.listdir', 'os.listdir', (['input'], {}), '(input)\n', (8583, 8590), False, 'import os\n'), ((8656, 8678), 'os.path.join', 'os.path.join', (['x', 'entry'], {}), '(x, entry)\n', (8668, 8678), False, 'import os\n'), ((8890, 8904), 'os.readlink', 'os.readlink', (['x'], {}), '(x)\n', (8901, 8904), False, 'import os\n'), ((7921, 7950), 'os.path.basename', 'os.path.basename', (['output_path'], {}), '(output_path)\n', (7937, 7950), False, 'import os\n'), ((9978, 10006), 'os.utime', 'os.utime', (['output_path', 'times'], {}), '(output_path, times)\n', (9986, 10006), False, 'import os\n'), ((7994, 8023), 'os.path.basename', 'os.path.basename', (['output_path'], {}), '(output_path)\n', (8010, 8023), False, 'import os\n')]
|
from .metric import Metric, metric_path
import pandas as pd
import math
class JobMakespanMetric(Metric):
def __init__(self, plot, scenarios):
super().__init__(plot, scenarios)
self.name = "job_makespan"
self.x_axis_label = "Job makespan (seconds)"
def get_data(self, scenario):
job_df = pd.read_parquet(metric_path("job-lifecycle", scenario))
task_df = pd.read_parquet(metric_path("task-lifecycle", scenario))
for job_id in job_df.job_id.unique():
tasks = task_df[task_df.job_id == job_id]
# job makespan: time elapsed from first-task submission of job until last completion of task from job
first_task_submission_time = tasks.submission_time.min()
last_task_finish_time = tasks.finish_time.max()
makespan = (last_task_finish_time - first_task_submission_time) // 1000
if math.isnan(makespan):
continue
yield makespan
|
[
"math.isnan"
] |
[((905, 925), 'math.isnan', 'math.isnan', (['makespan'], {}), '(makespan)\n', (915, 925), False, 'import math\n')]
|
from sg.StanfordGap import StanfordGap
import numpy as np
import matplotlib.pyplot as plt
from sklearn.cluster import KMeans
from sklearn import datasets
class StanfordGapDemo(object):
def run(self):
"""
Run the Stanford Gap Statistic Analysis on the iris data set presented in
http://scikit-learn.org/stable/auto_examples/cluster/plot_cluster_iris.html#sphx-glr-auto-examples-cluster-plot-cluster-iris-py
:return:
"""
np.random.seed(42)
iris = datasets.load_iris()
X = iris.data
gaps = np.zeros((20, 1))
s = np.zeros((20, 1))
for k in range(0, 20):
est = KMeans(n_clusters=(k + 1))
est.fit(X)
sg = StanfordGap(B=10)
sg.fit(X, est.labels_, est.cluster_centers_)
gaps[k] = sg.gap
s[k] = sg.s
# Plot Gap(k)
# Choose the smallest k such that Gap(k)>=Gap(k+1) - s_(k+1)
plt.plot(gaps[0:18])
plt.plot(gaps[1:19] - s[1:19])
plt.legend(['Gap(k)', 'Gap(k+1) - s_k+1'])
plt.xticks(np.arange(20), np.arange(1, 20))
plt.xlabel('K')
plt.show()
|
[
"sklearn.datasets.load_iris",
"numpy.random.seed",
"matplotlib.pyplot.show",
"matplotlib.pyplot.plot",
"sklearn.cluster.KMeans",
"matplotlib.pyplot.legend",
"numpy.zeros",
"numpy.arange",
"matplotlib.pyplot.xlabel",
"sg.StanfordGap.StanfordGap"
] |
[((473, 491), 'numpy.random.seed', 'np.random.seed', (['(42)'], {}), '(42)\n', (487, 491), True, 'import numpy as np\n'), ((507, 527), 'sklearn.datasets.load_iris', 'datasets.load_iris', ([], {}), '()\n', (525, 527), False, 'from sklearn import datasets\n'), ((566, 583), 'numpy.zeros', 'np.zeros', (['(20, 1)'], {}), '((20, 1))\n', (574, 583), True, 'import numpy as np\n'), ((596, 613), 'numpy.zeros', 'np.zeros', (['(20, 1)'], {}), '((20, 1))\n', (604, 613), True, 'import numpy as np\n'), ((958, 978), 'matplotlib.pyplot.plot', 'plt.plot', (['gaps[0:18]'], {}), '(gaps[0:18])\n', (966, 978), True, 'import matplotlib.pyplot as plt\n'), ((987, 1017), 'matplotlib.pyplot.plot', 'plt.plot', (['(gaps[1:19] - s[1:19])'], {}), '(gaps[1:19] - s[1:19])\n', (995, 1017), True, 'import matplotlib.pyplot as plt\n'), ((1026, 1068), 'matplotlib.pyplot.legend', 'plt.legend', (["['Gap(k)', 'Gap(k+1) - s_k+1']"], {}), "(['Gap(k)', 'Gap(k+1) - s_k+1'])\n", (1036, 1068), True, 'import matplotlib.pyplot as plt\n'), ((1129, 1144), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""K"""'], {}), "('K')\n", (1139, 1144), True, 'import matplotlib.pyplot as plt\n'), ((1153, 1163), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1161, 1163), True, 'import matplotlib.pyplot as plt\n'), ((663, 687), 'sklearn.cluster.KMeans', 'KMeans', ([], {'n_clusters': '(k + 1)'}), '(n_clusters=k + 1)\n', (669, 687), False, 'from sklearn.cluster import KMeans\n'), ((730, 747), 'sg.StanfordGap.StanfordGap', 'StanfordGap', ([], {'B': '(10)'}), '(B=10)\n', (741, 747), False, 'from sg.StanfordGap import StanfordGap\n'), ((1088, 1101), 'numpy.arange', 'np.arange', (['(20)'], {}), '(20)\n', (1097, 1101), True, 'import numpy as np\n'), ((1103, 1119), 'numpy.arange', 'np.arange', (['(1)', '(20)'], {}), '(1, 20)\n', (1112, 1119), True, 'import numpy as np\n')]
|
import logging
import numpy as np
import glob
from beis_indicators import project_dir
from beis_indicators.geo import NutsCoder, LepCoder
from beis_indicators.indicators import points_to_indicator, save_indicator
from beis_indicators.travel.travel_work_processing import get_travel_work_data
import pandas as pd
logger = logging.getLogger(__name__)
coders = {
'nuts2': NutsCoder(level=2),
'nuts3': NutsCoder(level=3),
'lep': LepCoder()
}
get_travel_work_data()
destination_df = pd.read_csv(f'{project_dir}/data/interim/travel_to_work_all_years.csv')
for geo, coder in coders.items():
time_mean = points_to_indicator(destination_df, value_col='Mean', coder=coder,
aggfunc=np.mean, value_rename= 'Mean',
projection='EPSG:4326', x_col='long', y_col='lat')
if geo == 'lep':
time_mean = time_mean.rename(columns = {'Mean': 'travel_time_to_work'}).sort_values(['lep_id', 'year']).reset_index(drop=True)
else:
time_mean = time_mean.rename(columns = {'Mean': 'travel_time_to_work'}).sort_values(['nuts_id', 'year']).reset_index(drop=True)
# print(time_mean.head())
save_indicator(time_mean, 'travel', geo)
|
[
"pandas.read_csv",
"beis_indicators.indicators.save_indicator",
"beis_indicators.travel.travel_work_processing.get_travel_work_data",
"beis_indicators.indicators.points_to_indicator",
"beis_indicators.geo.LepCoder",
"logging.getLogger",
"beis_indicators.geo.NutsCoder"
] |
[((337, 364), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (354, 364), False, 'import logging\n'), ((484, 506), 'beis_indicators.travel.travel_work_processing.get_travel_work_data', 'get_travel_work_data', ([], {}), '()\n', (504, 506), False, 'from beis_indicators.travel.travel_work_processing import get_travel_work_data\n'), ((525, 596), 'pandas.read_csv', 'pd.read_csv', (['f"""{project_dir}/data/interim/travel_to_work_all_years.csv"""'], {}), "(f'{project_dir}/data/interim/travel_to_work_all_years.csv')\n", (536, 596), True, 'import pandas as pd\n'), ((397, 415), 'beis_indicators.geo.NutsCoder', 'NutsCoder', ([], {'level': '(2)'}), '(level=2)\n', (406, 415), False, 'from beis_indicators.geo import NutsCoder, LepCoder\n'), ((431, 449), 'beis_indicators.geo.NutsCoder', 'NutsCoder', ([], {'level': '(3)'}), '(level=3)\n', (440, 449), False, 'from beis_indicators.geo import NutsCoder, LepCoder\n'), ((463, 473), 'beis_indicators.geo.LepCoder', 'LepCoder', ([], {}), '()\n', (471, 473), False, 'from beis_indicators.geo import NutsCoder, LepCoder\n'), ((649, 813), 'beis_indicators.indicators.points_to_indicator', 'points_to_indicator', (['destination_df'], {'value_col': '"""Mean"""', 'coder': 'coder', 'aggfunc': 'np.mean', 'value_rename': '"""Mean"""', 'projection': '"""EPSG:4326"""', 'x_col': '"""long"""', 'y_col': '"""lat"""'}), "(destination_df, value_col='Mean', coder=coder, aggfunc=\n np.mean, value_rename='Mean', projection='EPSG:4326', x_col='long',\n y_col='lat')\n", (668, 813), False, 'from beis_indicators.indicators import points_to_indicator, save_indicator\n'), ((1194, 1234), 'beis_indicators.indicators.save_indicator', 'save_indicator', (['time_mean', '"""travel"""', 'geo'], {}), "(time_mean, 'travel', geo)\n", (1208, 1234), False, 'from beis_indicators.indicators import points_to_indicator, save_indicator\n')]
|
from collections import defaultdict
from os.path import abspath
from os.path import expanduser
from os.path import isdir
from os.path import isfile
from os.path import join
import sys
from types import ModuleType
from typing import Any
from typing import Callable
from typing import DefaultDict
from typing import Dict
from typing import Iterator
from typing import List
from typing import Optional
from typing import Set
from typing import Union
from typing import cast
from ddtrace.internal.compat import PY2
from ddtrace.internal.logger import get_logger
from ddtrace.internal.utils import get_argument_value
log = get_logger(__name__)
ModuleHookType = Callable[[ModuleType], None]
_run_code = None
_post_run_module_hooks = [] # type: List[ModuleHookType]
def _wrapped_run_code(*args, **kwargs):
# type: (*Any, **Any) -> Dict[str, Any]
global _run_code, _post_run_module_hooks
# DEV: If we are calling this wrapper then _run_code must have been set to
# the original runpy._run_code.
assert _run_code is not None
mod_name = get_argument_value(args, kwargs, 3, "mod_name")
try:
return _run_code(*args, **kwargs)
finally:
module = sys.modules[mod_name]
for hook in _post_run_module_hooks:
hook(module)
def _patch_run_code():
# type: () -> None
global _run_code
if _run_code is None:
import runpy
_run_code = runpy._run_code # type: ignore[attr-defined]
runpy._run_code = _wrapped_run_code # type: ignore[attr-defined]
def register_post_run_module_hook(hook):
# type: (ModuleHookType) -> None
"""Register a post run module hook.
The hooks gets called after the module is loaded. For this to work, the
hook needs to be registered during the interpreter initialization, e.g. as
part of a sitecustomize.py script.
"""
global _run_code, _post_run_module_hooks
_patch_run_code()
_post_run_module_hooks.append(hook)
def unregister_post_run_module_hook(hook):
# type: (ModuleHookType) -> None
"""Unregister a post run module hook.
If the hook was not registered, a ``ValueError`` exception is raised.
"""
global _post_run_module_hooks
_post_run_module_hooks.remove(hook)
def origin(module):
# type: (ModuleType) -> str
"""Get the origin source file of the module."""
try:
orig = abspath(module.__file__) # type: ignore[type-var]
except (AttributeError, TypeError):
# Module is probably only partially initialised, so we look at its
# spec instead
try:
orig = abspath(module.__spec__.origin) # type: ignore
except (AttributeError, ValueError, TypeError):
orig = None
if orig is not None and isfile(orig):
if orig.endswith(".pyc"):
orig = orig[:-1]
return orig
return "<unknown origin>"
def _resolve(path):
# type: (str) -> Optional[str]
"""Resolve a (relative) path with respect to sys.path."""
for base in sys.path:
if isdir(base):
resolved_path = abspath(join(base, expanduser(path)))
if isfile(resolved_path):
return resolved_path
return None
# Borrowed from the wrapt module
# https://github.com/GrahamDumpleton/wrapt/blob/df0e62c2740143cceb6cafea4c306dae1c559ef8/src/wrapt/importer.py
if PY2:
find_spec = ModuleSpec = None
Loader = object
else:
from importlib.abc import Loader
from importlib.machinery import ModuleSpec
from importlib.util import find_spec
# DEV: This is used by Python 2 only
class _ImportHookLoader(object):
def __init__(self, callback):
# type: (Callable[[ModuleType], None]) -> None
self.callback = callback
def load_module(self, fullname):
# type: (str) -> ModuleType
module = sys.modules[fullname]
self.callback(module)
return module
class _ImportHookChainedLoader(Loader):
def __init__(self, loader, callback):
# type: (Loader, Callable[[ModuleType], None]) -> None
self.loader = loader
self.callback = callback
# DEV: load_module is deprecated so we define it at runtime if also
# defined by the default loader. We also check and define for the
# methods that are supposed to replace the load_module functionality.
if hasattr(loader, "load_module"):
self.load_module = self._load_module # type: ignore[assignment]
if hasattr(loader, "create_module"):
self.create_module = self._create_module # type: ignore[assignment]
if hasattr(loader, "exec_module"):
self.exec_module = self._exec_module # type: ignore[assignment]
def _load_module(self, fullname):
# type: (str) -> ModuleType
module = self.loader.load_module(fullname)
self.callback(module)
return module
def _create_module(self, spec):
return self.loader.create_module(spec)
def _exec_module(self, module):
self.loader.exec_module(module)
self.callback(sys.modules[module.__name__])
def get_code(self, mod_name):
return self.loader.get_code(mod_name)
class ModuleWatchdog(dict):
"""Module watchdog.
Replace the standard ``sys.modules`` dictionary to detect when modules are
loaded/unloaded. This is also responsible for triggering any registered
import hooks.
Subclasses might customize the default behavior by overriding the
``after_import`` method, which is triggered on every module import, once
the subclass is installed.
"""
_instance = None # type: Optional[ModuleWatchdog]
def __init__(self):
# type: () -> None
self._hook_map = defaultdict(list) # type: DefaultDict[str, List[ModuleHookType]]
self._origin_map = {origin(module): module for module in sys.modules.values()}
self._modules = sys.modules # type: Union[dict, ModuleWatchdog]
self._finding = set() # type: Set[str]
def __getitem__(self, item):
# type: (str) -> ModuleType
return self._modules.__getitem__(item)
def __setitem__(self, name, module):
# type: (str, ModuleType) -> None
self._modules.__setitem__(name, module)
def _add_to_meta_path(self):
# type: () -> None
sys.meta_path.insert(0, self) # type: ignore[arg-type]
@classmethod
def _find_in_meta_path(cls):
# type: () -> Optional[int]
for i, meta_path in enumerate(sys.meta_path):
if type(meta_path) is cls:
return i
return None
@classmethod
def _remove_from_meta_path(cls):
# type: () -> None
i = cls._find_in_meta_path()
if i is not None:
sys.meta_path.pop(i)
def after_import(self, module):
# type: (ModuleType) -> None
path = origin(module)
self._origin_map[path] = module
# Collect all hooks by module origin and name
hooks = []
if path in self._hook_map:
hooks.extend(self._hook_map[path])
if module.__name__ in self._hook_map:
hooks.extend(self._hook_map[module.__name__])
if hooks:
log.debug("Calling %d registered hooks on import of module '%s'", len(hooks), module.__name__)
for hook in hooks:
hook(module)
@classmethod
def get_by_origin(cls, origin):
# type: (str) -> Optional[ModuleType]
"""Lookup a module by its origin."""
cls._check_installed()
path = _resolve(origin)
if path is not None:
return cls._instance._origin_map.get(path) # type: ignore[union-attr]
return None
def __delitem__(self, name):
# type: (str) -> None
try:
path = origin(sys.modules[name])
# Drop the module reference to reclaim memory
del self._origin_map[path]
except KeyError:
pass
self._modules.__delitem__(name)
def __getattribute__(self, name):
# type: (str) -> Any
try:
return super(ModuleWatchdog, self).__getattribute__("_modules").__getattribute__(name)
except AttributeError:
return super(ModuleWatchdog, self).__getattribute__(name)
def __contains__(self, name):
# type: (object) -> bool
return self._modules.__contains__(name)
def __len__(self):
# type: () -> int
return self._modules.__len__()
def __iter__(self):
# type: () -> Iterator
return self._modules.__iter__()
def find_module(self, fullname, path=None):
# type: (str, Optional[str]) -> Union[ModuleWatchdog, _ImportHookChainedLoader, None]
if fullname in self._finding:
return None
self._finding.add(fullname)
try:
if PY2:
__import__(fullname)
return _ImportHookLoader(self.after_import)
loader = getattr(find_spec(fullname), "loader", None)
if loader and not isinstance(loader, _ImportHookChainedLoader):
return _ImportHookChainedLoader(loader, self.after_import)
finally:
self._finding.remove(fullname)
return None
def find_spec(self, fullname, path=None, target=None):
# type: (str, Optional[str], Optional[ModuleType]) -> Optional[ModuleSpec]
if fullname in self._finding:
return None
self._finding.add(fullname)
try:
spec = find_spec(fullname)
if spec is None:
return None
loader = getattr(spec, "loader", None)
if loader and not isinstance(loader, _ImportHookChainedLoader):
spec.loader = _ImportHookChainedLoader(loader, self.after_import)
return spec
finally:
self._finding.remove(fullname)
@classmethod
def register_origin_hook(cls, origin, hook):
# type: (str, ModuleHookType) -> None
"""Register a hook to be called when the module with the given origin is
imported.
The hook will be called with the module object as argument.
"""
cls._check_installed()
# DEV: Under the hypothesis that this is only ever called by the probe
# poller thread, there are no further actions to take. Should this ever
# change, then thread-safety might become a concern.
path = _resolve(origin)
if path is None:
raise ValueError("Cannot resolve module origin %s" % origin)
log.debug("Registering hook '%r' on path '%s'", hook, path)
instance = cast(ModuleWatchdog, cls._instance)
instance._hook_map[path].append(hook)
try:
module = instance._origin_map[path]
except KeyError:
# The module is not loaded yet. Nothing more we can do.
return
# The module was already imported so we invoke the hook straight-away
log.debug("Calling hook '%r' on already imported module '%s'", hook, module.__name__)
hook(module)
@classmethod
def unregister_origin_hook(cls, origin, hook):
# type: (str, Any) -> None
"""Unregister the hook registered with the given module origin and
argument.
"""
cls._check_installed()
path = _resolve(origin)
if path is None:
raise ValueError("Module origin %s cannot be resolved", origin)
instance = cast(ModuleWatchdog, cls._instance)
if path not in instance._hook_map:
raise ValueError("No hooks registered for origin %s" % origin)
try:
if path in instance._hook_map:
hooks = instance._hook_map[path]
hooks.remove(hook)
if not hooks:
del instance._hook_map[path]
except ValueError:
raise ValueError("Hook %r not registered for origin %s" % (hook, origin))
@classmethod
def register_module_hook(cls, module, hook):
# type: (str, ModuleHookType) -> None
"""Register a hook to be called when the module with the given name is
imported.
The hook will be called with the module object as argument.
"""
cls._check_installed()
log.debug("Registering hook '%r' on module '%s'", hook, module)
instance = cast(ModuleWatchdog, cls._instance)
instance._hook_map[module].append(hook)
try:
module_object = instance[module]
except KeyError:
# The module is not loaded yet. Nothing more we can do.
return
# The module was already imported so we invoke the hook straight-away
log.debug("Calling hook '%r' on already imported module '%s'", hook, module)
hook(module_object)
@classmethod
def unregister_module_hook(cls, module, hook):
# type: (str, ModuleHookType) -> None
"""Unregister the hook registered with the given module name and
argument.
"""
cls._check_installed()
instance = cast(ModuleWatchdog, cls._instance)
if module not in instance._hook_map:
raise ValueError("No hooks registered for module %s" % module)
try:
if module in instance._hook_map:
hooks = instance._hook_map[module]
hooks.remove(hook)
if not hooks:
del instance._hook_map[module]
except ValueError:
raise ValueError("Hook %r not registered for module %r" % (hook, module))
@classmethod
def _check_installed(cls):
# type: () -> None
if not cls.is_installed():
raise RuntimeError("%s is not installed" % cls.__name__)
@classmethod
def install(cls):
# type: () -> None
"""Install the module watchdog."""
if cls.is_installed():
raise RuntimeError("%s is already installed" % cls.__name__)
cls._instance = sys.modules = cls()
sys.modules._add_to_meta_path()
log.debug("%s installed", cls)
@classmethod
def is_installed(cls):
"""Check whether this module watchdog class is installed."""
return cls._instance is not None and type(cls._instance) is cls
@classmethod
def uninstall(cls):
# type: () -> None
"""Uninstall the module watchdog.
This will uninstall only the most recently installed instance of this
class.
"""
cls._check_installed()
parent, current = None, sys.modules
while isinstance(current, ModuleWatchdog):
if type(current) is cls:
cls._remove_from_meta_path()
if parent is not None:
setattr(parent, "_modules", getattr(current, "_modules"))
else:
sys.modules = getattr(current, "_modules")
cls._instance = None
log.debug("ModuleWatchdog uninstalled")
return
parent = current
current = current._modules
|
[
"ddtrace.internal.utils.get_argument_value",
"os.path.abspath",
"ddtrace.internal.logger.get_logger",
"sys.meta_path.insert",
"os.path.isdir",
"typing.cast",
"importlib.util.find_spec",
"collections.defaultdict",
"sys.modules._add_to_meta_path",
"os.path.isfile",
"sys.modules.values",
"sys.meta_path.pop",
"os.path.expanduser"
] |
[((621, 641), 'ddtrace.internal.logger.get_logger', 'get_logger', (['__name__'], {}), '(__name__)\n', (631, 641), False, 'from ddtrace.internal.logger import get_logger\n'), ((1062, 1109), 'ddtrace.internal.utils.get_argument_value', 'get_argument_value', (['args', 'kwargs', '(3)', '"""mod_name"""'], {}), "(args, kwargs, 3, 'mod_name')\n", (1080, 1109), False, 'from ddtrace.internal.utils import get_argument_value\n'), ((2385, 2409), 'os.path.abspath', 'abspath', (['module.__file__'], {}), '(module.__file__)\n', (2392, 2409), False, 'from os.path import abspath\n'), ((2763, 2775), 'os.path.isfile', 'isfile', (['orig'], {}), '(orig)\n', (2769, 2775), False, 'from os.path import isfile\n'), ((3047, 3058), 'os.path.isdir', 'isdir', (['base'], {}), '(base)\n', (3052, 3058), False, 'from os.path import isdir\n'), ((5742, 5759), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (5753, 5759), False, 'from collections import defaultdict\n'), ((6334, 6363), 'sys.meta_path.insert', 'sys.meta_path.insert', (['(0)', 'self'], {}), '(0, self)\n', (6354, 6363), False, 'import sys\n'), ((10693, 10728), 'typing.cast', 'cast', (['ModuleWatchdog', 'cls._instance'], {}), '(ModuleWatchdog, cls._instance)\n', (10697, 10728), False, 'from typing import cast\n'), ((11536, 11571), 'typing.cast', 'cast', (['ModuleWatchdog', 'cls._instance'], {}), '(ModuleWatchdog, cls._instance)\n', (11540, 11571), False, 'from typing import cast\n'), ((12437, 12472), 'typing.cast', 'cast', (['ModuleWatchdog', 'cls._instance'], {}), '(ModuleWatchdog, cls._instance)\n', (12441, 12472), False, 'from typing import cast\n'), ((13152, 13187), 'typing.cast', 'cast', (['ModuleWatchdog', 'cls._instance'], {}), '(ModuleWatchdog, cls._instance)\n', (13156, 13187), False, 'from typing import cast\n'), ((14094, 14125), 'sys.modules._add_to_meta_path', 'sys.modules._add_to_meta_path', ([], {}), '()\n', (14123, 14125), False, 'import sys\n'), ((3141, 3162), 'os.path.isfile', 'isfile', (['resolved_path'], {}), '(resolved_path)\n', (3147, 3162), False, 'from os.path import isfile\n'), ((6772, 6792), 'sys.meta_path.pop', 'sys.meta_path.pop', (['i'], {}), '(i)\n', (6789, 6792), False, 'import sys\n'), ((9556, 9575), 'importlib.util.find_spec', 'find_spec', (['fullname'], {}), '(fullname)\n', (9565, 9575), False, 'from importlib.util import find_spec\n'), ((2606, 2637), 'os.path.abspath', 'abspath', (['module.__spec__.origin'], {}), '(module.__spec__.origin)\n', (2613, 2637), False, 'from os.path import abspath\n'), ((5873, 5893), 'sys.modules.values', 'sys.modules.values', ([], {}), '()\n', (5891, 5893), False, 'import sys\n'), ((9011, 9030), 'importlib.util.find_spec', 'find_spec', (['fullname'], {}), '(fullname)\n', (9020, 9030), False, 'from importlib.util import find_spec\n'), ((3107, 3123), 'os.path.expanduser', 'expanduser', (['path'], {}), '(path)\n', (3117, 3123), False, 'from os.path import expanduser\n')]
|
#!/usr/bin/env python3
from typing import Optional, Tuple
import torch
from .. import settings
from ..distributions import Delta, MultivariateNormal
from ..lazy import DiagLazyTensor, MatmulLazyTensor, SumLazyTensor, lazify
from ..module import Module
from ..utils import linear_cg
from ..utils.broadcasting import _mul_broadcast_shape
from ..utils.memoize import cached
from ._variational_strategy import _VariationalStrategy
from .natural_variational_distribution import NaturalVariationalDistribution
class _NgdInterpTerms(torch.autograd.Function):
"""
This function takes in
- the kernel interpolation term K_ZZ^{-1/2} k_ZX
- the natural parameters of the variational distribution
and returns
- the predictive distribution mean/covariance
- the inducing KL divergence KL( q(u) || p(u))
However, the gradients will be with respect to the **cannonical parameters**
of the variational distribution, rather than the **natural parameters**.
This corresponds to performing natural gradient descent on the variational distribution.
"""
@staticmethod
def forward(
ctx, interp_term: torch.Tensor, natural_vec: torch.Tensor, natural_mat: torch.Tensor
) -> Tuple[torch.Tensor, torch.Tensor, torch.Tensor]:
# Compute precision
prec = natural_mat.mul(-2.0)
diag = prec.diagonal(dim1=-1, dim2=-2).unsqueeze(-1)
# Make sure that interp_term and natural_vec are the same batch shape
batch_shape = _mul_broadcast_shape(interp_term.shape[:-2], natural_vec.shape[:-1])
expanded_interp_term = interp_term.expand(*batch_shape, *interp_term.shape[-2:])
expanded_natural_vec = natural_vec.expand(*batch_shape, natural_vec.size(-1))
# Compute necessary solves with the precision. We need
# m = expec_vec = S * natural_vec
# S K^{-1/2} k
solves = linear_cg(
prec.matmul,
torch.cat([expanded_natural_vec.unsqueeze(-1), expanded_interp_term], dim=-1),
n_tridiag=0,
max_iter=settings.max_cg_iterations.value(),
tolerance=min(settings.eval_cg_tolerance.value(), settings.cg_tolerance.value()),
max_tridiag_iter=settings.max_lanczos_quadrature_iterations.value(),
preconditioner=lambda x: x / diag,
)
expec_vec = solves[..., 0]
s_times_interp_term = solves[..., 1:]
# Compute the interpolated mean
# k^T K^{-1/2} m
interp_mean = (s_times_interp_term.transpose(-1, -2) @ natural_vec.unsqueeze(-1)).squeeze(-1)
# Compute the interpolated variance
# k^T K^{-1/2} S K^{-1/2} k = k^T K^{-1/2} (expec_mat - expec_vec expec_vec^T) K^{-1/2} k
interp_var = (s_times_interp_term * interp_term).sum(dim=-2)
# Let's not bother actually computing the KL-div in the foward pass
# 1/2 ( -log | S | + tr(S) + m^T m - len(m) )
# = 1/2 ( -log | expec_mat - expec_vec expec_vec^T | + tr(expec_mat) - len(m) )
kl_div = torch.zeros_like(interp_mean[..., 0])
# We're done!
ctx.save_for_backward(interp_term, s_times_interp_term, interp_mean, natural_vec, expec_vec, prec)
return interp_mean, interp_var, kl_div
@staticmethod
def backward(
ctx, interp_mean_grad: torch.Tensor, interp_var_grad: torch.Tensor, kl_div_grad: torch.Tensor
) -> Tuple[torch.Tensor, torch.Tensor, torch.Tensor]:
# Get the saved terms
interp_term, s_times_interp_term, interp_mean, natural_vec, expec_vec, prec = ctx.saved_tensors
# Expand data-depenedent gradients
interp_mean_grad = interp_mean_grad.unsqueeze(-2)
interp_var_grad = interp_var_grad.unsqueeze(-2)
# Compute gradient of interp term (K^{-1/2} k)
# interp_mean component: m
# interp_var component: S K^{-1/2} k
# kl component: 0
interp_term_grad = (interp_var_grad * s_times_interp_term).mul(2.0) + (
interp_mean_grad * expec_vec.unsqueeze(-1)
)
# Compute gradient of expected vector (m)
# interp_mean component: K^{-1/2} k
# interp_var component: (k^T K^{-1/2} m) K^{-1/2} k
# kl component: S^{-1} m
expec_vec_grad = sum(
[
(interp_var_grad * interp_mean.unsqueeze(-2) * interp_term).sum(dim=-1).mul(-2),
(interp_mean_grad * interp_term).sum(dim=-1),
(kl_div_grad.unsqueeze(-1) * natural_vec),
]
)
# Compute gradient of expected matrix (mm^T + S)
# interp_mean component: 0
# interp_var component: K^{-1/2} k k^T K^{-1/2}
# kl component: 1/2 ( I - S^{-1} )
eye = torch.eye(expec_vec.size(-1), device=expec_vec.device, dtype=expec_vec.dtype)
expec_mat_grad = torch.add(
(interp_var_grad * interp_term) @ interp_term.transpose(-1, -2),
(kl_div_grad.unsqueeze(-1).unsqueeze(-1) * (eye - prec).mul(0.5)),
)
# We're done!
return interp_term_grad, expec_vec_grad, expec_mat_grad, None # Extra "None" for the kwarg
class CiqVariationalStrategy(_VariationalStrategy):
r"""
Similar to :class:`~gpytorch.variational.VariationalStrategy`,
except the whitening operation is performed using Contour Integral Quadrature
rather than Cholesky (see `Pleiss et al. (2020)`_ for more info).
See the `CIQ-SVGP tutorial`_ for an example.
Contour Integral Quadrature uses iterative matrix-vector multiplication to approximate
the :math:`\mathbf K_{\mathbf Z \mathbf Z}^{-1/2}` matrix used for the whitening operation.
This can be more efficient than the standard variational strategy for large numbers
of inducing points (e.g. :math:`M > 1000`) or when the inducing points have structure
(e.g. they lie on an evenly-spaced grid).
.. note::
It is recommended that this object is used in conjunction with
:obj:`~gpytorch.variational.NaturalVariationalDistribution` and
`natural gradient descent`_.
:param ~gpytorch.models.ApproximateGP model: Model this strategy is applied to.
Typically passed in when the VariationalStrategy is created in the
__init__ method of the user defined model.
:param torch.Tensor inducing_points: Tensor containing a set of inducing
points to use for variational inference.
:param ~gpytorch.variational.VariationalDistribution variational_distribution: A
VariationalDistribution object that represents the form of the variational distribution :math:`q(\mathbf u)`
:param learn_inducing_locations: (Default True): Whether or not
the inducing point locations :math:`\mathbf Z` should be learned (i.e. are they
parameters of the model).
:type learn_inducing_locations: `bool`, optional
.. _Pleiss et al. (2020):
https://arxiv.org/pdf/2006.11267.pdf
.. _CIQ-SVGP tutorial:
examples/04_Variational_and_Approximate_GPs/SVGP_CIQ.html
.. _natural gradient descent:
examples/04_Variational_and_Approximate_GPs/Natural_Gradient_Descent.html
"""
def _ngd(self):
return isinstance(self._variational_distribution, NaturalVariationalDistribution)
@property
@cached(name="prior_distribution_memo")
def prior_distribution(self):
zeros = torch.zeros(
self._variational_distribution.shape(),
dtype=self._variational_distribution.dtype,
device=self._variational_distribution.device,
)
ones = torch.ones_like(zeros)
res = MultivariateNormal(zeros, DiagLazyTensor(ones))
return res
@property
@cached(name="variational_distribution_memo")
def variational_distribution(self):
if self._ngd():
raise RuntimeError(
"Variational distribution for NGD-CIQ should be computed during forward calls. "
"This is probably a bug in GPyTorch."
)
return super().variational_distribution
def forward(
self,
x: torch.Tensor,
inducing_points: torch.Tensor,
inducing_values: torch.Tensor,
variational_inducing_covar: Optional[MultivariateNormal] = None,
**kwargs,
) -> MultivariateNormal:
# Compute full prior distribution
full_inputs = torch.cat([inducing_points, x], dim=-2)
full_output = self.model.forward(full_inputs)
full_covar = full_output.lazy_covariance_matrix
# Covariance terms
num_induc = inducing_points.size(-2)
test_mean = full_output.mean[..., num_induc:]
induc_induc_covar = full_covar[..., :num_induc, :num_induc].evaluate_kernel().add_jitter(1e-2)
induc_data_covar = full_covar[..., :num_induc, num_induc:].evaluate()
data_data_covar = full_covar[..., num_induc:, num_induc:].add_jitter(1e-4)
# Compute interpolation terms
# K_XZ K_ZZ^{-1} \mu_z
# K_XZ K_ZZ^{-1/2} \mu_Z
with settings.max_preconditioner_size(0): # Turn off preconditioning for CIQ
interp_term = lazify(induc_induc_covar).sqrt_inv_matmul(induc_data_covar)
# Compute interpolated mean and variance terms
# We have separate computation rules for NGD versus standard GD
if self._ngd():
interp_mean, interp_var, kl_div = _NgdInterpTerms().apply(
interp_term, self._variational_distribution.natural_vec, self._variational_distribution.natural_mat,
)
# Compute the covariance of q(f)
predictive_var = data_data_covar.diag() - interp_term.pow(2).sum(dim=-2) + interp_var
predictive_var = torch.clamp_min(predictive_var, settings.min_variance.value(predictive_var.dtype))
predictive_covar = DiagLazyTensor(predictive_var)
# Also compute and cache the KL divergence
if not hasattr(self, "_memoize_cache"):
self._memoize_cache = dict()
self._memoize_cache["kl"] = kl_div
else:
# Compute interpolated mean term
interp_mean = torch.matmul(
interp_term.transpose(-1, -2), (inducing_values - self.prior_distribution.mean).unsqueeze(-1)
).squeeze(-1)
# Compute the covariance of q(f)
middle_term = self.prior_distribution.lazy_covariance_matrix.mul(-1)
if variational_inducing_covar is not None:
middle_term = SumLazyTensor(variational_inducing_covar, middle_term)
predictive_covar = SumLazyTensor(
data_data_covar.add_jitter(1e-4),
MatmulLazyTensor(interp_term.transpose(-1, -2), middle_term @ interp_term),
)
# Compute the mean of q(f)
# k_XZ K_ZZ^{-1/2} (m - K_ZZ^{-1/2} \mu_Z) + \mu_X
predictive_mean = interp_mean + test_mean
# Return the distribution
return MultivariateNormal(predictive_mean, predictive_covar)
def kl_divergence(self):
"""
Compute the KL divergence between the variational inducing distribution :math:`q(\mathbf u)`
and the prior inducing distribution :math:`p(\mathbf u)`.
:rtype: torch.Tensor
"""
if self._ngd():
if hasattr(self, "_memoize_cache") and "kl" in self._memoize_cache:
return self._memoize_cache["kl"]
else:
raise RuntimeError(
"KL divergence for NGD-CIQ should be computed during forward calls."
"This is probably a bug in GPyTorch."
)
else:
return super().kl_divergence()
def __call__(self, x: torch.Tensor, prior: bool = False, **kwargs) -> MultivariateNormal:
# This is mostly the same as _VariationalStrategy.__call__()
# but with special rules for natural gradient descent (to prevent O(M^3) computation)
# If we're in prior mode, then we're done!
if prior:
return self.model.forward(x)
# Delete previously cached items from the training distribution
if self.training:
self._clear_cache()
# (Maybe) initialize variational distribution
if not self.variational_params_initialized.item():
if self._ngd():
noise = torch.randn_like(self.prior_distribution.mean).mul_(1e-3)
eye = torch.eye(noise.size(-1), dtype=noise.dtype, device=noise.device).mul(-0.5)
self._variational_distribution.natural_vec.data.copy_(noise)
self._variational_distribution.natural_mat.data.copy_(eye)
self.variational_params_initialized.fill_(1)
else:
prior_dist = self.prior_distribution
self._variational_distribution.initialize_variational_distribution(prior_dist)
self.variational_params_initialized.fill_(1)
# Ensure inducing_points and x are the same size
inducing_points = self.inducing_points
if inducing_points.shape[:-2] != x.shape[:-2]:
x, inducing_points = self._expand_inputs(x, inducing_points)
# Get q(f)
if self._ngd():
return Module.__call__(
self, x, inducing_points, inducing_values=None, variational_inducing_covar=None, **kwargs,
)
else:
# Get p(u)/q(u)
variational_dist_u = self.variational_distribution
if isinstance(variational_dist_u, MultivariateNormal):
return Module.__call__(
self,
x,
inducing_points,
inducing_values=variational_dist_u.mean,
variational_inducing_covar=variational_dist_u.lazy_covariance_matrix,
**kwargs,
)
elif isinstance(variational_dist_u, Delta):
return Module.__call__(
self,
x,
inducing_points,
inducing_values=variational_dist_u.mean,
variational_inducing_covar=None,
ngd=False,
**kwargs,
)
else:
raise RuntimeError(
f"Invalid variational distribuition ({type(variational_dist_u)}). "
"Expected a multivariate normal or a delta distribution."
)
|
[
"torch.ones_like",
"torch.randn_like",
"torch.cat",
"torch.zeros_like"
] |
[((3044, 3081), 'torch.zeros_like', 'torch.zeros_like', (['interp_mean[..., 0]'], {}), '(interp_mean[..., 0])\n', (3060, 3081), False, 'import torch\n'), ((7576, 7598), 'torch.ones_like', 'torch.ones_like', (['zeros'], {}), '(zeros)\n', (7591, 7598), False, 'import torch\n'), ((8373, 8412), 'torch.cat', 'torch.cat', (['[inducing_points, x]'], {'dim': '(-2)'}), '([inducing_points, x], dim=-2)\n', (8382, 8412), False, 'import torch\n'), ((12361, 12407), 'torch.randn_like', 'torch.randn_like', (['self.prior_distribution.mean'], {}), '(self.prior_distribution.mean)\n', (12377, 12407), False, 'import torch\n')]
|
# Generated by Django 3.0.6 on 2020-06-26 09:42
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('sessions', '0001_initial'),
('core', '0004_auto_20200603_1414'),
]
operations = [
migrations.CreateModel(
name='SiteContacted',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('timestamp', models.DateTimeField(auto_now_add=True, verbose_name='timestamp')),
('message', models.CharField(max_length=500)),
('name', models.CharField(blank=True, max_length=100, null=True)),
('email', models.EmailField(blank=True, max_length=100, null=True)),
('user', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='sessions.Session')),
],
),
]
|
[
"django.db.models.CharField",
"django.db.models.ForeignKey",
"django.db.models.EmailField",
"django.db.models.AutoField",
"django.db.models.DateTimeField"
] |
[((405, 498), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (421, 498), False, 'from django.db import migrations, models\n'), ((527, 592), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)', 'verbose_name': '"""timestamp"""'}), "(auto_now_add=True, verbose_name='timestamp')\n", (547, 592), False, 'from django.db import migrations, models\n'), ((623, 655), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(500)'}), '(max_length=500)\n', (639, 655), False, 'from django.db import migrations, models\n'), ((683, 738), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(100)', 'null': '(True)'}), '(blank=True, max_length=100, null=True)\n', (699, 738), False, 'from django.db import migrations, models\n'), ((767, 823), 'django.db.models.EmailField', 'models.EmailField', ([], {'blank': '(True)', 'max_length': '(100)', 'null': '(True)'}), '(blank=True, max_length=100, null=True)\n', (784, 823), False, 'from django.db import migrations, models\n'), ((851, 965), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'blank': '(True)', 'null': '(True)', 'on_delete': 'django.db.models.deletion.SET_NULL', 'to': '"""sessions.Session"""'}), "(blank=True, null=True, on_delete=django.db.models.\n deletion.SET_NULL, to='sessions.Session')\n", (868, 965), False, 'from django.db import migrations, models\n')]
|
from . import Loader
import pandas as pd
class SOFROISLoader(Loader):
dataset = 'SOFR'
fileglob = 'SOFR_OIS_*.csv'
columns = ['Trade Date', 'Exchange Code', 'Currency','Commodity Code',
'Short Description','Long Description', 'Curve Date', 'Offset',
'Discount Factor', 'Forward rate', 'Rate']
dtypes = {'category': ('Exchange Code', 'Currency', 'Commodity Code',
'Short Description', 'Long Description','Curve Date','Forward rate'),
'int64': ('Offset',),
'float': ('Discount Factor','Rate'),
'date:%Y%m%d': ('Trade Date',)}
def _load(self, file):
# Assumption: the header from the value column provides
# the name of the measure for that CSV file.
df = pd.read_csv(file, low_memory=False)
return df
sofroisLoader = SOFROISLoader()
|
[
"pandas.read_csv"
] |
[((813, 848), 'pandas.read_csv', 'pd.read_csv', (['file'], {'low_memory': '(False)'}), '(file, low_memory=False)\n', (824, 848), True, 'import pandas as pd\n')]
|
#!/usr/bin/python
import numpy as np
import pylab as plt
import seaborn as sns
sns.set_context("poster")
#with open("traj.dat") as f:
# data = f.read()
#
# data = data.split('\n')
#
# x = [row.split(' ')[0] for row in data]
# y = [row.split(' ')[1] for row in data]
#
# fig = plt.figure()
#
# ax1 = fig.add_subplot(111)
#
# ax1.set_title("Plot title...")
# ax1.set_xlabel('your x label..')
# ax1.set_ylabel('your y label...')
#
# ax1.plot(x,y, c='r', label='the data')
#
# leg = ax1.legend()
#fig = plt.figure()
plt.subplot(121)
#plt.ylim(-8,8)
data = np.genfromtxt(fname='q.dat')
#data = np.loadtxt('traj.dat')
for x in range(1,data.shape[1]):
plt.plot(data[:,0],data[:,x])
#plt.figure(1)
#plt.plot(x,y1,'-')
#plt.plot(x,y2,'g-')
plt.xlabel('time')
#plt.ylabel('position')
#plt.title('traj')
ax2 = plt.subplot(122)
data = np.genfromtxt(fname='c.dat')
#data = np.loadtxt('traj.dat')
for x in range(1,data.shape[1]):
plt.plot(data[:,0],data[:,x])
plt.xlabel('time')
ax2.yaxis.tick_right()
ax2.yaxis.set_ticks_position('both')
plt.ylim(-0.2,5)
#plt.subplot(2,2,3)
#data = np.genfromtxt(fname='norm')
#plt.plot(data[:,0],data[:,1],'r-',linewidth=2)
#plt.ylabel('Norm')
#plt.ylim(0,2)
plt.legend()
plt.savefig('traj.pdf')
plt.show()
|
[
"pylab.show",
"numpy.genfromtxt",
"pylab.plot",
"pylab.subplot",
"pylab.savefig",
"pylab.ylim",
"pylab.xlabel",
"pylab.legend",
"seaborn.set_context"
] |
[((81, 106), 'seaborn.set_context', 'sns.set_context', (['"""poster"""'], {}), "('poster')\n", (96, 106), True, 'import seaborn as sns\n'), ((551, 567), 'pylab.subplot', 'plt.subplot', (['(121)'], {}), '(121)\n', (562, 567), True, 'import pylab as plt\n'), ((591, 619), 'numpy.genfromtxt', 'np.genfromtxt', ([], {'fname': '"""q.dat"""'}), "(fname='q.dat')\n", (604, 619), True, 'import numpy as np\n'), ((777, 795), 'pylab.xlabel', 'plt.xlabel', (['"""time"""'], {}), "('time')\n", (787, 795), True, 'import pylab as plt\n'), ((846, 862), 'pylab.subplot', 'plt.subplot', (['(122)'], {}), '(122)\n', (857, 862), True, 'import pylab as plt\n'), ((870, 898), 'numpy.genfromtxt', 'np.genfromtxt', ([], {'fname': '"""c.dat"""'}), "(fname='c.dat')\n", (883, 898), True, 'import numpy as np\n'), ((998, 1016), 'pylab.xlabel', 'plt.xlabel', (['"""time"""'], {}), "('time')\n", (1008, 1016), True, 'import pylab as plt\n'), ((1078, 1095), 'pylab.ylim', 'plt.ylim', (['(-0.2)', '(5)'], {}), '(-0.2, 5)\n', (1086, 1095), True, 'import pylab as plt\n'), ((1236, 1248), 'pylab.legend', 'plt.legend', ([], {}), '()\n', (1246, 1248), True, 'import pylab as plt\n'), ((1249, 1272), 'pylab.savefig', 'plt.savefig', (['"""traj.pdf"""'], {}), "('traj.pdf')\n", (1260, 1272), True, 'import pylab as plt\n'), ((1274, 1284), 'pylab.show', 'plt.show', ([], {}), '()\n', (1282, 1284), True, 'import pylab as plt\n'), ((689, 721), 'pylab.plot', 'plt.plot', (['data[:, 0]', 'data[:, x]'], {}), '(data[:, 0], data[:, x])\n', (697, 721), True, 'import pylab as plt\n'), ((968, 1000), 'pylab.plot', 'plt.plot', (['data[:, 0]', 'data[:, x]'], {}), '(data[:, 0], data[:, x])\n', (976, 1000), True, 'import pylab as plt\n')]
|
"""
This example acts as a keyboard to peer devices.
"""
# import board
import sys
import time
import adafruit_ble
from adafruit_ble.advertising import Advertisement
from adafruit_ble.advertising.standard import ProvideServicesAdvertisement
from adafruit_ble.services.standard.hid import HIDService
from adafruit_ble.services.standard.device_info import DeviceInfoService
from adafruit_hid.keyboard import Keyboard
from adafruit_hid.keyboard_layout_us import KeyboardLayoutUS
# Use default HID descriptor
hid = HIDService()
device_info = DeviceInfoService(
software_revision=adafruit_ble.__version__, manufacturer="Adafruit Industries"
)
advertisement = ProvideServicesAdvertisement(hid)
advertisement.appearance = 961
scan_response = Advertisement()
ble = adafruit_ble.BLERadio()
if ble.connected:
for c in ble.connections:
c.disconnect()
print("advertising")
ble.start_advertising(advertisement, scan_response)
k = Keyboard(hid.devices)
kl = KeyboardLayoutUS(k)
while True:
while not ble.connected:
pass
print("Start typing:")
while ble.connected:
c = sys.stdin.read(1)
sys.stdout.write(c)
kl.write(c)
# print("sleeping")
time.sleep(0.1)
ble.start_advertising(advertisement)
|
[
"sys.stdout.write",
"sys.stdin.read",
"adafruit_hid.keyboard.Keyboard",
"adafruit_ble.services.standard.hid.HIDService",
"time.sleep",
"adafruit_hid.keyboard_layout_us.KeyboardLayoutUS",
"adafruit_ble.BLERadio",
"adafruit_ble.advertising.Advertisement",
"adafruit_ble.services.standard.device_info.DeviceInfoService",
"adafruit_ble.advertising.standard.ProvideServicesAdvertisement"
] |
[((514, 526), 'adafruit_ble.services.standard.hid.HIDService', 'HIDService', ([], {}), '()\n', (524, 526), False, 'from adafruit_ble.services.standard.hid import HIDService\n'), ((541, 643), 'adafruit_ble.services.standard.device_info.DeviceInfoService', 'DeviceInfoService', ([], {'software_revision': 'adafruit_ble.__version__', 'manufacturer': '"""Adafruit Industries"""'}), "(software_revision=adafruit_ble.__version__, manufacturer=\n 'Adafruit Industries')\n", (558, 643), False, 'from adafruit_ble.services.standard.device_info import DeviceInfoService\n'), ((661, 694), 'adafruit_ble.advertising.standard.ProvideServicesAdvertisement', 'ProvideServicesAdvertisement', (['hid'], {}), '(hid)\n', (689, 694), False, 'from adafruit_ble.advertising.standard import ProvideServicesAdvertisement\n'), ((742, 757), 'adafruit_ble.advertising.Advertisement', 'Advertisement', ([], {}), '()\n', (755, 757), False, 'from adafruit_ble.advertising import Advertisement\n'), ((765, 788), 'adafruit_ble.BLERadio', 'adafruit_ble.BLERadio', ([], {}), '()\n', (786, 788), False, 'import adafruit_ble\n'), ((939, 960), 'adafruit_hid.keyboard.Keyboard', 'Keyboard', (['hid.devices'], {}), '(hid.devices)\n', (947, 960), False, 'from adafruit_hid.keyboard import Keyboard\n'), ((966, 985), 'adafruit_hid.keyboard_layout_us.KeyboardLayoutUS', 'KeyboardLayoutUS', (['k'], {}), '(k)\n', (982, 985), False, 'from adafruit_hid.keyboard_layout_us import KeyboardLayoutUS\n'), ((1104, 1121), 'sys.stdin.read', 'sys.stdin.read', (['(1)'], {}), '(1)\n', (1118, 1121), False, 'import sys\n'), ((1130, 1149), 'sys.stdout.write', 'sys.stdout.write', (['c'], {}), '(c)\n', (1146, 1149), False, 'import sys\n'), ((1206, 1221), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (1216, 1221), False, 'import time\n')]
|
"""
Defining standard tensorflow optimizers as modules.
"""
import tensorflow as tf
from deeplearning import module
from deeplearning import tf_util as U
class SGD(module.Optimizer):
ninputs = 1
def __init__(self, name, loss, lr=1e-4, momentum=0.0, clip_norm=None):
super().__init__(name, loss)
self.lr = lr
self.momentum = momentum
self.clip_norm = clip_norm
def _build(self, loss):
# ops for updating the learning rate
self._lr = tf.Variable(self.lr, name='lr', trainable=False)
self._lr_placeholder = tf.placeholder(tf.float32, shape=(), name='lr_ph')
self._update_lr = self._lr.assign(self._lr_placeholder)
params = self.trainable_variables()
self._flatgrad = U.flatgrad(loss, params, self.clip_norm)
grads = tf.gradients(loss, params)
if self.clip_norm is not None:
grads, grad_norm = tf.clip_by_global_norm(grads, self.clip_norm)
opt = tf.train.MomentumOptimizer(self.lr, momentum=self.momentum)
return grads, opt.apply_gradients(list(zip(grads, params)))
def _add_run_args(self, outs, feed_dict, **flags):
super()._add_run_args(outs, feed_dict, **flags)
if 'flatgrad' in flags and flags['flatgrad']:
outs['flatgrad'] = self._flatgrad
def update_lr(self, new_lr):
self.lr = new_lr
sess = tf.get_default_session()
sess.run(self._update_lr, feed_dict={self._lr_placeholder:self.lr})
# convenience method
def flatgrad(self, inputs, state=[]):
return self.run(inputs, state, out=False, state_out=False, flatgrad=True)['flatgrad']
class Adam(module.Optimizer):
ninputs = 1
def __init__(self, name, loss, lr=1e-4, beta1=0.9, beta2=0.999, clip_norm=None):
super().__init__(name, loss)
self.lr = lr
self.beta1 = beta1
self.beta2 = beta2
self.clip_norm = clip_norm
def _build(self, loss):
# ops for updating the learning rate
self._lr = tf.Variable(self.lr, name='lr', trainable=False)
self._lr_placeholder = tf.placeholder(tf.float32, shape=(), name='lr_ph')
self._update_lr = self._lr.assign(self._lr_placeholder)
params = self.trainable_variables()
self._flatgrad = U.flatgrad(loss, params, self.clip_norm)
grads = tf.gradients(loss, params)
if self.clip_norm is not None:
grads, grad_norm = tf.clip_by_global_norm(grads, self.clip_norm)
opt = tf.train.AdamOptimizer(self.lr, beta1=self.beta1, beta2=self.beta2)
return grads, opt.apply_gradients(list(zip(grads, params)))
def _add_run_args(self, outs, feed_dict, **flags):
super()._add_run_args(outs, feed_dict, **flags)
if 'flatgrad' in flags and flags['flatgrad']:
outs['flatgrad'] = self._flatgrad
def update_lr(self, new_lr):
self.lr = new_lr
sess = tf.get_default_session()
sess.run(self._update_lr, feed_dict={self._lr_placeholder:self.lr})
# convenience method
def flatgrad(self, inputs, state=[]):
return self.run(inputs, state, out=False, state_out=False, flatgrad=True)['flatgrad']
|
[
"deeplearning.tf_util.flatgrad",
"tensorflow.placeholder",
"tensorflow.Variable",
"tensorflow.train.MomentumOptimizer",
"tensorflow.gradients",
"tensorflow.train.AdamOptimizer",
"tensorflow.clip_by_global_norm",
"tensorflow.get_default_session"
] |
[((495, 543), 'tensorflow.Variable', 'tf.Variable', (['self.lr'], {'name': '"""lr"""', 'trainable': '(False)'}), "(self.lr, name='lr', trainable=False)\n", (506, 543), True, 'import tensorflow as tf\n'), ((575, 625), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32'], {'shape': '()', 'name': '"""lr_ph"""'}), "(tf.float32, shape=(), name='lr_ph')\n", (589, 625), True, 'import tensorflow as tf\n'), ((760, 800), 'deeplearning.tf_util.flatgrad', 'U.flatgrad', (['loss', 'params', 'self.clip_norm'], {}), '(loss, params, self.clip_norm)\n', (770, 800), True, 'from deeplearning import tf_util as U\n'), ((817, 843), 'tensorflow.gradients', 'tf.gradients', (['loss', 'params'], {}), '(loss, params)\n', (829, 843), True, 'import tensorflow as tf\n'), ((974, 1033), 'tensorflow.train.MomentumOptimizer', 'tf.train.MomentumOptimizer', (['self.lr'], {'momentum': 'self.momentum'}), '(self.lr, momentum=self.momentum)\n', (1000, 1033), True, 'import tensorflow as tf\n'), ((1388, 1412), 'tensorflow.get_default_session', 'tf.get_default_session', ([], {}), '()\n', (1410, 1412), True, 'import tensorflow as tf\n'), ((2024, 2072), 'tensorflow.Variable', 'tf.Variable', (['self.lr'], {'name': '"""lr"""', 'trainable': '(False)'}), "(self.lr, name='lr', trainable=False)\n", (2035, 2072), True, 'import tensorflow as tf\n'), ((2104, 2154), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32'], {'shape': '()', 'name': '"""lr_ph"""'}), "(tf.float32, shape=(), name='lr_ph')\n", (2118, 2154), True, 'import tensorflow as tf\n'), ((2289, 2329), 'deeplearning.tf_util.flatgrad', 'U.flatgrad', (['loss', 'params', 'self.clip_norm'], {}), '(loss, params, self.clip_norm)\n', (2299, 2329), True, 'from deeplearning import tf_util as U\n'), ((2346, 2372), 'tensorflow.gradients', 'tf.gradients', (['loss', 'params'], {}), '(loss, params)\n', (2358, 2372), True, 'import tensorflow as tf\n'), ((2503, 2570), 'tensorflow.train.AdamOptimizer', 'tf.train.AdamOptimizer', (['self.lr'], {'beta1': 'self.beta1', 'beta2': 'self.beta2'}), '(self.lr, beta1=self.beta1, beta2=self.beta2)\n', (2525, 2570), True, 'import tensorflow as tf\n'), ((2925, 2949), 'tensorflow.get_default_session', 'tf.get_default_session', ([], {}), '()\n', (2947, 2949), True, 'import tensorflow as tf\n'), ((914, 959), 'tensorflow.clip_by_global_norm', 'tf.clip_by_global_norm', (['grads', 'self.clip_norm'], {}), '(grads, self.clip_norm)\n', (936, 959), True, 'import tensorflow as tf\n'), ((2443, 2488), 'tensorflow.clip_by_global_norm', 'tf.clip_by_global_norm', (['grads', 'self.clip_norm'], {}), '(grads, self.clip_norm)\n', (2465, 2488), True, 'import tensorflow as tf\n')]
|
'''font.py: Class to manage individual fonts.'''
import os
import codecs
from typing import Dict, Any, Optional
from fontTools.ttLib import TTFont
from fonty.lib.variants import FontAttribute
from fonty.lib.font_name_ids import FONT_NAMEID_FAMILY, FONT_NAMEID_FAMILY_PREFFERED, \
FONT_NAMEID_VARIANT, FONT_NAMEID_VARIANT_PREFFERED
from .font_format import FontFormat
class Font(object):
'''Class to manage individual fonts.'''
# Class Properties ------------------------------------------------------- #
path_to_font: str
family: str
variant: FontAttribute
name_table: Optional[Dict[Any, Any]] = None
# Constructor ------------------------------------------------------------ #
def __init__(
self,
path_to_font: str,
family: str = None,
variant: FontAttribute = None
) -> None:
self.path_to_font = path_to_font
# Get family name
self.family = family if family else self.get_family_name()
# Get variant
self.variant = variant if variant else self.get_variant()
# Class Methods ----------------------------------------------------------- #
def install(self):
'''Installs this font to the system.'''
from fonty.lib.install import install_fonts
# Install the font on to the system
installed_font = install_fonts(self)
return installed_font
def generate_filename(self, ext: str = None) -> str:
'''Generate a suitable filename from this font's name tables.'''
family_name = self.get_family_name()
variant = self.get_variant()
if ext is None:
_, ext = os.path.splitext(self.path_to_font)
ext = ext if ext is not '' else '.otf' # Fallback to .otf
return '{family}-{variant}{ext}'.format(
family=family_name,
variant=variant.print(long=True),
ext=ext
)
def parse(self) -> 'Font':
'''Parse the font's metadata from the font's name table.'''
if not self.path_to_font or not os.path.isfile(self.path_to_font):
raise Exception
font = TTFont(file=self.path_to_font)
if self.name_table is None:
self.name_table = {}
# Parse font file and retrieve family name and variant
for record in font['name'].names:
# Decode bytes
if b'\x00' in record.string:
data = record.string.decode('utf-16-be')
elif b'\xa9' in record.string:
data = codecs.decode(record.string, errors='ignore')
else:
data = codecs.decode(record.string, errors='ignore')
self.name_table[str(record.nameID)] = data
return self
def get_name_data_from_id(self, name_id: str) -> str:
'''Gets data from the font's name table via the name id.'''
if self.name_table is None:
self.parse()
return self.name_table.get(name_id, None)
def get_family_name(self) -> str:
'''Get family name from the font's name tables.'''
if self.name_table is None:
self.parse()
family_name = self.get_name_data_from_id(FONT_NAMEID_FAMILY)
family_name_preferred = self.get_name_data_from_id(FONT_NAMEID_FAMILY_PREFFERED)
return family_name_preferred if family_name_preferred else family_name
def get_variant(self) -> FontAttribute:
'''Get the font attributes from the font's name tables.'''
if self.name_table is None:
self.parse()
variant = self.get_name_data_from_id(FONT_NAMEID_VARIANT)
variant_preferred = self.get_name_data_from_id(FONT_NAMEID_VARIANT_PREFFERED)
variant = variant_preferred if variant_preferred else variant
return FontAttribute.parse(variant)
def convert(self, path: str, font_format: 'FontFormat' = None) -> str:
'''Converts this font to either woff or woff2 formats.'''
_, ext = os.path.splitext(os.path.basename(self.path_to_font))
font = TTFont(file=self.path_to_font)
# Get font flavor
if font_format:
if font_format == FontFormat.WOFF:
font.flavor = 'woff'
ext = '.woff'
elif font_format == FontFormat.WOFF2:
font.flavor = 'woff2'
ext = '.woff2'
else:
raise Exception # Only woff and woff2 supported for now
# Create output directory if it doesn't exist
path = os.path.abspath(path)
if not os.path.exists(path):
os.makedirs(path, exist_ok=True)
if os.path.isdir(path):
path = os.path.join(path, '') # Append trailing slash
# Generate output paths
output_path = os.path.join(os.path.dirname(path), self.generate_filename(ext))
# Convert and save
font.save(file=output_path)
return output_path
|
[
"os.path.abspath",
"fontTools.ttLib.TTFont",
"os.makedirs",
"os.path.basename",
"os.path.isdir",
"codecs.decode",
"os.path.dirname",
"os.path.exists",
"os.path.isfile",
"os.path.splitext",
"fonty.lib.install.install_fonts",
"os.path.join",
"fonty.lib.variants.FontAttribute.parse"
] |
[((1406, 1425), 'fonty.lib.install.install_fonts', 'install_fonts', (['self'], {}), '(self)\n', (1419, 1425), False, 'from fonty.lib.install import install_fonts\n'), ((2199, 2229), 'fontTools.ttLib.TTFont', 'TTFont', ([], {'file': 'self.path_to_font'}), '(file=self.path_to_font)\n', (2205, 2229), False, 'from fontTools.ttLib import TTFont\n'), ((3854, 3882), 'fonty.lib.variants.FontAttribute.parse', 'FontAttribute.parse', (['variant'], {}), '(variant)\n', (3873, 3882), False, 'from fonty.lib.variants import FontAttribute\n'), ((4111, 4141), 'fontTools.ttLib.TTFont', 'TTFont', ([], {'file': 'self.path_to_font'}), '(file=self.path_to_font)\n', (4117, 4141), False, 'from fontTools.ttLib import TTFont\n'), ((4586, 4607), 'os.path.abspath', 'os.path.abspath', (['path'], {}), '(path)\n', (4601, 4607), False, 'import os\n'), ((4701, 4720), 'os.path.isdir', 'os.path.isdir', (['path'], {}), '(path)\n', (4714, 4720), False, 'import os\n'), ((1716, 1751), 'os.path.splitext', 'os.path.splitext', (['self.path_to_font'], {}), '(self.path_to_font)\n', (1732, 1751), False, 'import os\n'), ((4059, 4094), 'os.path.basename', 'os.path.basename', (['self.path_to_font'], {}), '(self.path_to_font)\n', (4075, 4094), False, 'import os\n'), ((4623, 4643), 'os.path.exists', 'os.path.exists', (['path'], {}), '(path)\n', (4637, 4643), False, 'import os\n'), ((4657, 4689), 'os.makedirs', 'os.makedirs', (['path'], {'exist_ok': '(True)'}), '(path, exist_ok=True)\n', (4668, 4689), False, 'import os\n'), ((4741, 4763), 'os.path.join', 'os.path.join', (['path', '""""""'], {}), "(path, '')\n", (4753, 4763), False, 'import os\n'), ((4856, 4877), 'os.path.dirname', 'os.path.dirname', (['path'], {}), '(path)\n', (4871, 4877), False, 'import os\n'), ((2120, 2153), 'os.path.isfile', 'os.path.isfile', (['self.path_to_font'], {}), '(self.path_to_font)\n', (2134, 2153), False, 'import os\n'), ((2596, 2641), 'codecs.decode', 'codecs.decode', (['record.string'], {'errors': '"""ignore"""'}), "(record.string, errors='ignore')\n", (2609, 2641), False, 'import codecs\n'), ((2683, 2728), 'codecs.decode', 'codecs.decode', (['record.string'], {'errors': '"""ignore"""'}), "(record.string, errors='ignore')\n", (2696, 2728), False, 'import codecs\n')]
|
from flask import g, abort, redirect, url_for
from app.instances import db
from app.models.Post import Post
from app.models.Answer import Answer
from app.models.PostVote import PostVote
from app.models.AnswerVote import AnswerVote
# noinspection PyUnresolvedReferences
import app.routes.post
# noinspection PyUnresolvedReferences
import app.routes.user_settings
# noinspection PyUnresolvedReferences
import app.routes.auth
def get_post_vote_breakdown(post_id):
post = Post.query.filter_by(id=post_id).first()
if post is None:
return abort(404)
votes = list(map(lambda vote: vote.vote, PostVote.query.filter_by(post_id=post_id).all()))
upvotes = votes.count(1)
downvotes = votes.count(-1)
return {"upvote": upvotes, "downvote": downvotes}
def get_answer_vote_breakdown(answer_id):
answer = Answer.query.filter_by(id=answer_id).first()
if answer is None:
return abort(404)
votes = list(map(lambda vote: vote.vote, AnswerVote.query.filter_by(answer_id=answer_id).all()))
upvotes = votes.count(1)
downvotes = votes.count(-1)
return {"upvote": upvotes, "downvote": downvotes}
def get_post_vote(post_id):
current_user = g.user
if current_user is None:
return {"vote": 0, "breakdown": get_post_vote_breakdown(post_id)}
post_votes = PostVote.query.filter_by(post_id=post_id, user_id=current_user.id).first()
if post_votes is None:
vote = 0
else:
vote = post_votes.vote
return {"vote": vote, "breakdown": get_post_vote_breakdown(post_id)}
def get_answer_vote(answer_id):
current_user = g.user
if current_user is None:
return {"vote": 0, "breakdown": get_answer_vote_breakdown(answer_id)}
answer_votes = AnswerVote.query.filter_by(answer_id=answer_id, user_id=current_user.id).first()
if answer_votes is None:
vote = 0
else:
vote = answer_votes.vote
return {"vote": vote, "breakdown": get_answer_vote_breakdown(answer_id)}
def do_post_vote(post_id, vote):
current_user = g.user
if current_user is None:
return abort(401)
# ensure that vote is a valid value
try:
vote = int(vote)
except ValueError:
return abort(400)
if vote not in (-1, 0, 1):
return abort(400)
post = Post.query.filter_by(id=post_id).first()
# ensure that user is not voting on own content
if post.user_id == g.user.id:
return abort(403)
# handle changing existing vote
prev_vote = PostVote.query.filter_by(post_id=post_id, user_id=current_user.id).first()
if prev_vote is not None:
prev_vote.vote = vote
db.session.commit()
else:
new_vote = PostVote(post_id=post_id, vote=vote, user_id=current_user.id)
current_user.post_votes.append(new_vote)
post = Post.query.filter_by(id=post_id).first()
post.votes.append(new_vote)
db.session.add(new_vote)
db.session.commit()
return {"vote": vote, "breakdown": get_post_vote_breakdown(post_id)}
def do_answer_vote(answer_id, vote):
current_user = g.user
if current_user is None:
return abort(401)
# ensure that vote is a valid value
try:
vote = int(vote)
except ValueError:
return abort(400)
if vote not in (-1, 0, 1):
return abort(400)
answer = Answer.query.filter_by(id=answer_id).first()
# ensure that user is not voting on own content
if answer.user_id == g.user.id:
return abort(403)
# handle changing existing vote
prev_vote = AnswerVote.query.filter_by(answer_id=answer_id, user_id=current_user.id).first()
if prev_vote is not None:
prev_vote.vote = vote
db.session.commit()
else:
new_vote = AnswerVote(answer_id=answer_id, vote=vote, user_id=current_user.id)
current_user.answer_votes.append(new_vote)
answer.votes.append(new_vote)
db.session.add(new_vote)
db.session.commit()
return {"vote": vote, "breakdown": get_answer_vote_breakdown(answer_id)}
|
[
"app.models.PostVote.PostVote",
"app.instances.db.session.commit",
"flask.abort",
"app.models.Answer.Answer.query.filter_by",
"app.models.Post.Post.query.filter_by",
"app.models.AnswerVote.AnswerVote.query.filter_by",
"app.models.AnswerVote.AnswerVote",
"app.instances.db.session.add",
"app.models.PostVote.PostVote.query.filter_by"
] |
[((553, 563), 'flask.abort', 'abort', (['(404)'], {}), '(404)\n', (558, 563), False, 'from flask import g, abort, redirect, url_for\n'), ((914, 924), 'flask.abort', 'abort', (['(404)'], {}), '(404)\n', (919, 924), False, 'from flask import g, abort, redirect, url_for\n'), ((2092, 2102), 'flask.abort', 'abort', (['(401)'], {}), '(401)\n', (2097, 2102), False, 'from flask import g, abort, redirect, url_for\n'), ((2273, 2283), 'flask.abort', 'abort', (['(400)'], {}), '(400)\n', (2278, 2283), False, 'from flask import g, abort, redirect, url_for\n'), ((2438, 2448), 'flask.abort', 'abort', (['(403)'], {}), '(403)\n', (2443, 2448), False, 'from flask import g, abort, redirect, url_for\n'), ((2645, 2664), 'app.instances.db.session.commit', 'db.session.commit', ([], {}), '()\n', (2662, 2664), False, 'from app.instances import db\n'), ((2694, 2755), 'app.models.PostVote.PostVote', 'PostVote', ([], {'post_id': 'post_id', 'vote': 'vote', 'user_id': 'current_user.id'}), '(post_id=post_id, vote=vote, user_id=current_user.id)\n', (2702, 2755), False, 'from app.models.PostVote import PostVote\n'), ((2906, 2930), 'app.instances.db.session.add', 'db.session.add', (['new_vote'], {}), '(new_vote)\n', (2920, 2930), False, 'from app.instances import db\n'), ((2939, 2958), 'app.instances.db.session.commit', 'db.session.commit', ([], {}), '()\n', (2956, 2958), False, 'from app.instances import db\n'), ((3142, 3152), 'flask.abort', 'abort', (['(401)'], {}), '(401)\n', (3147, 3152), False, 'from flask import g, abort, redirect, url_for\n'), ((3323, 3333), 'flask.abort', 'abort', (['(400)'], {}), '(400)\n', (3328, 3333), False, 'from flask import g, abort, redirect, url_for\n'), ((3497, 3507), 'flask.abort', 'abort', (['(403)'], {}), '(403)\n', (3502, 3507), False, 'from flask import g, abort, redirect, url_for\n'), ((3710, 3729), 'app.instances.db.session.commit', 'db.session.commit', ([], {}), '()\n', (3727, 3729), False, 'from app.instances import db\n'), ((3759, 3826), 'app.models.AnswerVote.AnswerVote', 'AnswerVote', ([], {'answer_id': 'answer_id', 'vote': 'vote', 'user_id': 'current_user.id'}), '(answer_id=answer_id, vote=vote, user_id=current_user.id)\n', (3769, 3826), False, 'from app.models.AnswerVote import AnswerVote\n'), ((3925, 3949), 'app.instances.db.session.add', 'db.session.add', (['new_vote'], {}), '(new_vote)\n', (3939, 3949), False, 'from app.instances import db\n'), ((3958, 3977), 'app.instances.db.session.commit', 'db.session.commit', ([], {}), '()\n', (3975, 3977), False, 'from app.instances import db\n'), ((476, 508), 'app.models.Post.Post.query.filter_by', 'Post.query.filter_by', ([], {'id': 'post_id'}), '(id=post_id)\n', (496, 508), False, 'from app.models.Post import Post\n'), ((831, 867), 'app.models.Answer.Answer.query.filter_by', 'Answer.query.filter_by', ([], {'id': 'answer_id'}), '(id=answer_id)\n', (853, 867), False, 'from app.models.Answer import Answer\n'), ((1318, 1384), 'app.models.PostVote.PostVote.query.filter_by', 'PostVote.query.filter_by', ([], {'post_id': 'post_id', 'user_id': 'current_user.id'}), '(post_id=post_id, user_id=current_user.id)\n', (1342, 1384), False, 'from app.models.PostVote import PostVote\n'), ((1739, 1811), 'app.models.AnswerVote.AnswerVote.query.filter_by', 'AnswerVote.query.filter_by', ([], {'answer_id': 'answer_id', 'user_id': 'current_user.id'}), '(answer_id=answer_id, user_id=current_user.id)\n', (1765, 1811), False, 'from app.models.AnswerVote import AnswerVote\n'), ((2216, 2226), 'flask.abort', 'abort', (['(400)'], {}), '(400)\n', (2221, 2226), False, 'from flask import g, abort, redirect, url_for\n'), ((2296, 2328), 'app.models.Post.Post.query.filter_by', 'Post.query.filter_by', ([], {'id': 'post_id'}), '(id=post_id)\n', (2316, 2328), False, 'from app.models.Post import Post\n'), ((2502, 2568), 'app.models.PostVote.PostVote.query.filter_by', 'PostVote.query.filter_by', ([], {'post_id': 'post_id', 'user_id': 'current_user.id'}), '(post_id=post_id, user_id=current_user.id)\n', (2526, 2568), False, 'from app.models.PostVote import PostVote\n'), ((3266, 3276), 'flask.abort', 'abort', (['(400)'], {}), '(400)\n', (3271, 3276), False, 'from flask import g, abort, redirect, url_for\n'), ((3348, 3384), 'app.models.Answer.Answer.query.filter_by', 'Answer.query.filter_by', ([], {'id': 'answer_id'}), '(id=answer_id)\n', (3370, 3384), False, 'from app.models.Answer import Answer\n'), ((3561, 3633), 'app.models.AnswerVote.AnswerVote.query.filter_by', 'AnswerVote.query.filter_by', ([], {'answer_id': 'answer_id', 'user_id': 'current_user.id'}), '(answer_id=answer_id, user_id=current_user.id)\n', (3587, 3633), False, 'from app.models.AnswerVote import AnswerVote\n'), ((2820, 2852), 'app.models.Post.Post.query.filter_by', 'Post.query.filter_by', ([], {'id': 'post_id'}), '(id=post_id)\n', (2840, 2852), False, 'from app.models.Post import Post\n'), ((609, 650), 'app.models.PostVote.PostVote.query.filter_by', 'PostVote.query.filter_by', ([], {'post_id': 'post_id'}), '(post_id=post_id)\n', (633, 650), False, 'from app.models.PostVote import PostVote\n'), ((970, 1017), 'app.models.AnswerVote.AnswerVote.query.filter_by', 'AnswerVote.query.filter_by', ([], {'answer_id': 'answer_id'}), '(answer_id=answer_id)\n', (996, 1017), False, 'from app.models.AnswerVote import AnswerVote\n')]
|
from keras.models import load_model
import numpy as np
from encoding import encode
from encoding import decode
model = load_model('Model-0.1.hf')
post_title = input("What do you want to know from u/rogersimon10? \n")
post_title = "What’s the worst thing you’ve eaten out of politeness?"
encoded_title = np.array(encode(post_title, padding=192))
encoded_title.reshape((-1))
print(encoded_title)
print(encoded_title.shape)
print(len(encoded_title))
encoded_answer = model.predict(encoded_title)
decoded_answer = decode(encoded_answer)
print(decoded_answer)
|
[
"keras.models.load_model",
"encoding.decode",
"encoding.encode"
] |
[((120, 146), 'keras.models.load_model', 'load_model', (['"""Model-0.1.hf"""'], {}), "('Model-0.1.hf')\n", (130, 146), False, 'from keras.models import load_model\n'), ((512, 534), 'encoding.decode', 'decode', (['encoded_answer'], {}), '(encoded_answer)\n', (518, 534), False, 'from encoding import decode\n'), ((314, 345), 'encoding.encode', 'encode', (['post_title'], {'padding': '(192)'}), '(post_title, padding=192)\n', (320, 345), False, 'from encoding import encode\n')]
|
import os
import sys
import numpy as np
import pytest
from matchms import Spectrum
from spec2vec import Spec2Vec
from spec2vec import SpectrumDocument
path_root = os.path.dirname(os.getcwd())
sys.path.insert(0, os.path.join(path_root, "matchmsextras"))
from matchmsextras.library_search import library_matching
def test_library_matching():
spectrum_1 = Spectrum(mz=np.array([100, 150, 200.]),
intensities=np.array([0.7, 0.2, 0.1]),
metadata={'precursor_mz': 500.5})
spectrum_2 = Spectrum(mz=np.array([100, 140, 190.]),
intensities=np.array([0.4, 0.2, 0.1]),
metadata={'precursor_mz': 500.11})
spectrum_3 = Spectrum(mz=np.array([100, 140, 190.]),
intensities=np.array([0.3, 0.5, 0.2]),
metadata={'precursor_mz': 501.1})
spectrum_4 = Spectrum(mz=np.array([97.5, 137.5, 200.]),
intensities=np.array([0.8, 0.5, 0.4]),
metadata={'precursor_mz': 500.1})
documents_library = [SpectrumDocument(s) for s in [spectrum_1, spectrum_2, spectrum_3]]
documents_query = [SpectrumDocument(spectrum_4)]
found_matches = library_matching(documents_query, documents_library,
model=None,
presearch_based_on=["precursor_mz"],
include_scores=["cosine", "modcosine"],
ignore_non_annotated=False,
intensity_weighting_power=0.5,
allowed_missing_percentage=5.0,
cosine_tol=2.0,
mass_tolerance=2.0,
mass_tolerance_type="Dalton")
scores_cosine = found_matches[0].values[:,0]
expected_scores_cosine = np.array([0.05312127152597306, 0.0, 0.0])
scores_modcos = found_matches[0].values[:,2]
expected_scores_modcos = np.array([0.05312127152597306, 0.0, 0.7757282939050968])
assert list(scores_cosine) == [pytest.approx(x, 1e-6) for x in expected_scores_cosine], \
"Expected different scores."
assert list(scores_modcos) == [pytest.approx(x, 1e-6) for x in expected_scores_modcos], \
"Expected different mod. cosine scores."
assert np.all(found_matches[0].values[:,3] == np.array([1, 0, 2])), \
"Expected different number of matches"
assert np.all(found_matches[0].values[:,4]), "Expected all mass matches to be True"
|
[
"matchmsextras.library_search.library_matching",
"os.getcwd",
"spec2vec.SpectrumDocument",
"numpy.array",
"pytest.approx",
"os.path.join",
"numpy.all"
] |
[((180, 191), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (189, 191), False, 'import os\n'), ((212, 252), 'os.path.join', 'os.path.join', (['path_root', '"""matchmsextras"""'], {}), "(path_root, 'matchmsextras')\n", (224, 252), False, 'import os\n'), ((1240, 1554), 'matchmsextras.library_search.library_matching', 'library_matching', (['documents_query', 'documents_library'], {'model': 'None', 'presearch_based_on': "['precursor_mz']", 'include_scores': "['cosine', 'modcosine']", 'ignore_non_annotated': '(False)', 'intensity_weighting_power': '(0.5)', 'allowed_missing_percentage': '(5.0)', 'cosine_tol': '(2.0)', 'mass_tolerance': '(2.0)', 'mass_tolerance_type': '"""Dalton"""'}), "(documents_query, documents_library, model=None,\n presearch_based_on=['precursor_mz'], include_scores=['cosine',\n 'modcosine'], ignore_non_annotated=False, intensity_weighting_power=0.5,\n allowed_missing_percentage=5.0, cosine_tol=2.0, mass_tolerance=2.0,\n mass_tolerance_type='Dalton')\n", (1256, 1554), False, 'from matchmsextras.library_search import library_matching\n'), ((1951, 1992), 'numpy.array', 'np.array', (['[0.05312127152597306, 0.0, 0.0]'], {}), '([0.05312127152597306, 0.0, 0.0])\n', (1959, 1992), True, 'import numpy as np\n'), ((2071, 2127), 'numpy.array', 'np.array', (['[0.05312127152597306, 0.0, 0.7757282939050968]'], {}), '([0.05312127152597306, 0.0, 0.7757282939050968])\n', (2079, 2127), True, 'import numpy as np\n'), ((2534, 2571), 'numpy.all', 'np.all', (['found_matches[0].values[:, 4]'], {}), '(found_matches[0].values[:, 4])\n', (2540, 2571), True, 'import numpy as np\n'), ((1100, 1119), 'spec2vec.SpectrumDocument', 'SpectrumDocument', (['s'], {}), '(s)\n', (1116, 1119), False, 'from spec2vec import SpectrumDocument\n'), ((1190, 1218), 'spec2vec.SpectrumDocument', 'SpectrumDocument', (['spectrum_4'], {}), '(spectrum_4)\n', (1206, 1218), False, 'from spec2vec import SpectrumDocument\n'), ((372, 399), 'numpy.array', 'np.array', (['[100, 150, 200.0]'], {}), '([100, 150, 200.0])\n', (380, 399), True, 'import numpy as np\n'), ((438, 463), 'numpy.array', 'np.array', (['[0.7, 0.2, 0.1]'], {}), '([0.7, 0.2, 0.1])\n', (446, 463), True, 'import numpy as np\n'), ((554, 581), 'numpy.array', 'np.array', (['[100, 140, 190.0]'], {}), '([100, 140, 190.0])\n', (562, 581), True, 'import numpy as np\n'), ((620, 645), 'numpy.array', 'np.array', (['[0.4, 0.2, 0.1]'], {}), '([0.4, 0.2, 0.1])\n', (628, 645), True, 'import numpy as np\n'), ((737, 764), 'numpy.array', 'np.array', (['[100, 140, 190.0]'], {}), '([100, 140, 190.0])\n', (745, 764), True, 'import numpy as np\n'), ((803, 828), 'numpy.array', 'np.array', (['[0.3, 0.5, 0.2]'], {}), '([0.3, 0.5, 0.2])\n', (811, 828), True, 'import numpy as np\n'), ((919, 949), 'numpy.array', 'np.array', (['[97.5, 137.5, 200.0]'], {}), '([97.5, 137.5, 200.0])\n', (927, 949), True, 'import numpy as np\n'), ((988, 1013), 'numpy.array', 'np.array', (['[0.8, 0.5, 0.4]'], {}), '([0.8, 0.5, 0.4])\n', (996, 1013), True, 'import numpy as np\n'), ((2163, 2186), 'pytest.approx', 'pytest.approx', (['x', '(1e-06)'], {}), '(x, 1e-06)\n', (2176, 2186), False, 'import pytest\n'), ((2294, 2317), 'pytest.approx', 'pytest.approx', (['x', '(1e-06)'], {}), '(x, 1e-06)\n', (2307, 2317), False, 'import pytest\n'), ((2452, 2471), 'numpy.array', 'np.array', (['[1, 0, 2]'], {}), '([1, 0, 2])\n', (2460, 2471), True, 'import numpy as np\n')]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
""" Unit Tests
__author__: <NAME>, <NAME>, <NAME>
"""
import os
import sys
import unittest
import numpy as np
from scipy.io import loadmat
sys.path.append(".")
from inferactively.distributions import Categorical, Dirichlet # nopep8
class TestDirichlet(unittest.TestCase):
def test_init_empty(self):
d = Dirichlet()
self.assertEqual(d.ndim, 2)
def test_init_overload(self):
with self.assertRaises(ValueError):
values = np.random.rand(3, 2)
_ = Dirichlet(dims=2, values=values)
def test_float_conversion(self):
values = np.array([2, 3])
self.assertEqual(values.dtype, np.int)
d = Dirichlet(values=values)
self.assertEqual(d.values.dtype, np.float64)
def test_init_dims_expand(self):
d = Dirichlet(dims=[5])
self.assertEqual(d.shape, (5, 1))
def test_init_dims_int_expand(self):
d = Dirichlet(dims=5)
self.assertEqual(d.shape, (5, 1))
def test_multi_factor_init_dims(self):
d = Dirichlet(dims=[[5, 4], [4, 3]])
self.assertEqual(d.shape, (2,))
self.assertEqual(d[0].shape, (5, 4))
self.assertEqual(d[1].shape, (4, 3))
def test_multi_factor_init_values(self):
values_1 = np.random.rand(5, 4)
values_2 = np.random.rand(4, 3)
values = np.array([values_1, values_2])
d = Dirichlet(values=values)
self.assertEqual(d.shape, (2,))
self.assertEqual(d[0].shape, (5, 4))
self.assertEqual(d[1].shape, (4, 3))
def test_multi_factor_init_values_expand(self):
values_1 = np.random.rand(5)
values_2 = np.random.rand(4)
values = np.array([values_1, values_2])
d = Dirichlet(values=values)
self.assertEqual(d.shape, (2,))
self.assertEqual(d[0].shape, (5, 1))
self.assertEqual(d[1].shape, (4, 1))
def test_normalize_multi_factor(self):
values_1 = np.random.rand(5)
values_2 = np.random.rand(4, 3)
values = np.array([values_1, values_2])
d = Dirichlet(values=values)
normed = Categorical(values=d.mean(return_numpy=True))
self.assertTrue(normed.is_normalized())
def test_normalize_single_dim(self):
values = np.array([1.0, 1.0])
d = Dirichlet(values=values)
expected_values = np.array([[0.5], [0.5]])
self.assertTrue(np.array_equal(d.mean(return_numpy=True), expected_values))
def test_normalize_two_dim(self):
values = np.array([[1.0, 1.0], [1.0, 1.0]])
d = Dirichlet(values=values)
expected_values = np.array([[0.5, 0.5], [0.5, 0.5]])
self.assertTrue(np.array_equal(d.mean(return_numpy=True), expected_values))
def test_remove_zeros(self):
values = np.array([[1.0, 0.0], [1.0, 1.0]])
d = Dirichlet(values=values)
self.assertTrue((d.values == 0.0).any())
d.remove_zeros()
self.assertFalse((d.values == 0.0).any())
def test_contains_zeros(self):
values = np.array([[1.0, 0.0], [1.0, 1.0]])
d = Dirichlet(values=values)
self.assertTrue(d.contains_zeros())
values = np.array([[1.0, 1.0], [1.0, 1.0]])
d = Dirichlet(values=values)
self.assertFalse(d.contains_zeros())
"""
def test_entropy(self):
values = np.random.rand(3, 2)
entropy = -np.sum(values * np.log(values), 0)
d = Dirichlet(values=values)
self.assertTrue(np.array_equal(d.entropy(return_numpy=True), entropy))
"""
def test_log(self):
values = np.random.rand(3, 2)
log_values = np.log(values)
d = Dirichlet(values=values)
self.assertTrue(np.array_equal(d.log(return_numpy=True), log_values))
def test_copy(self):
values = np.random.rand(3, 2)
d = Dirichlet(values=values)
d_copy = d.copy()
self.assertTrue(np.array_equal(d_copy.values, d.values))
d_copy.values = d_copy.values * 2
self.assertFalse(np.array_equal(d_copy.values, d.values))
def test_ndim(self):
values = np.random.rand(3, 2)
d = Dirichlet(values=values)
self.assertEqual(d.ndim, d.values.ndim)
def test_shape(self):
values = np.random.rand(3, 2)
d = Dirichlet(values=values)
self.assertEqual(d.shape, (3, 2))
def test_expectation_single_factor(self):
""" tests implementation of expect_log method against matlab version (single factor)
"""
array_path = os.path.join(os.getcwd(), "tests/data/wnorm_a.mat")
mat_contents = loadmat(file_name=array_path)
result = mat_contents["result"]
d = Dirichlet(values=mat_contents["A"])
result_py = d.expectation_of_log(return_numpy=True)
self.assertTrue(np.isclose(result, result_py).all())
def test_expectation_multi_factor(self):
""" tests implementation of expect_log method against matlab version (multi factor)
"""
array_path = os.path.join(os.getcwd(), "tests/data/wnorm_b.mat")
mat_contents = loadmat(file_name=array_path)
result_1 = mat_contents["result_1"]
result_2 = mat_contents["result_2"]
d = Dirichlet(values=mat_contents["A"][0])
result_py = d.expectation_of_log(return_numpy=True)
self.assertTrue(
np.isclose(result_1, result_py[0]).all() and np.isclose(result_2, result_py[1]).all()
)
if __name__ == "__main__":
unittest.main()
|
[
"sys.path.append",
"unittest.main",
"numpy.array_equal",
"numpy.log",
"scipy.io.loadmat",
"inferactively.distributions.Dirichlet",
"os.getcwd",
"numpy.isclose",
"numpy.array",
"numpy.random.rand"
] |
[((189, 209), 'sys.path.append', 'sys.path.append', (['"""."""'], {}), "('.')\n", (204, 209), False, 'import sys\n'), ((5509, 5524), 'unittest.main', 'unittest.main', ([], {}), '()\n', (5522, 5524), False, 'import unittest\n'), ((368, 379), 'inferactively.distributions.Dirichlet', 'Dirichlet', ([], {}), '()\n', (377, 379), False, 'from inferactively.distributions import Categorical, Dirichlet\n'), ((641, 657), 'numpy.array', 'np.array', (['[2, 3]'], {}), '([2, 3])\n', (649, 657), True, 'import numpy as np\n'), ((717, 741), 'inferactively.distributions.Dirichlet', 'Dirichlet', ([], {'values': 'values'}), '(values=values)\n', (726, 741), False, 'from inferactively.distributions import Categorical, Dirichlet\n'), ((845, 864), 'inferactively.distributions.Dirichlet', 'Dirichlet', ([], {'dims': '[5]'}), '(dims=[5])\n', (854, 864), False, 'from inferactively.distributions import Categorical, Dirichlet\n'), ((961, 978), 'inferactively.distributions.Dirichlet', 'Dirichlet', ([], {'dims': '(5)'}), '(dims=5)\n', (970, 978), False, 'from inferactively.distributions import Categorical, Dirichlet\n'), ((1077, 1109), 'inferactively.distributions.Dirichlet', 'Dirichlet', ([], {'dims': '[[5, 4], [4, 3]]'}), '(dims=[[5, 4], [4, 3]])\n', (1086, 1109), False, 'from inferactively.distributions import Categorical, Dirichlet\n'), ((1305, 1325), 'numpy.random.rand', 'np.random.rand', (['(5)', '(4)'], {}), '(5, 4)\n', (1319, 1325), True, 'import numpy as np\n'), ((1345, 1365), 'numpy.random.rand', 'np.random.rand', (['(4)', '(3)'], {}), '(4, 3)\n', (1359, 1365), True, 'import numpy as np\n'), ((1383, 1413), 'numpy.array', 'np.array', (['[values_1, values_2]'], {}), '([values_1, values_2])\n', (1391, 1413), True, 'import numpy as np\n'), ((1426, 1450), 'inferactively.distributions.Dirichlet', 'Dirichlet', ([], {'values': 'values'}), '(values=values)\n', (1435, 1450), False, 'from inferactively.distributions import Categorical, Dirichlet\n'), ((1653, 1670), 'numpy.random.rand', 'np.random.rand', (['(5)'], {}), '(5)\n', (1667, 1670), True, 'import numpy as np\n'), ((1690, 1707), 'numpy.random.rand', 'np.random.rand', (['(4)'], {}), '(4)\n', (1704, 1707), True, 'import numpy as np\n'), ((1725, 1755), 'numpy.array', 'np.array', (['[values_1, values_2]'], {}), '([values_1, values_2])\n', (1733, 1755), True, 'import numpy as np\n'), ((1768, 1792), 'inferactively.distributions.Dirichlet', 'Dirichlet', ([], {'values': 'values'}), '(values=values)\n', (1777, 1792), False, 'from inferactively.distributions import Categorical, Dirichlet\n'), ((1986, 2003), 'numpy.random.rand', 'np.random.rand', (['(5)'], {}), '(5)\n', (2000, 2003), True, 'import numpy as np\n'), ((2023, 2043), 'numpy.random.rand', 'np.random.rand', (['(4)', '(3)'], {}), '(4, 3)\n', (2037, 2043), True, 'import numpy as np\n'), ((2061, 2091), 'numpy.array', 'np.array', (['[values_1, values_2]'], {}), '([values_1, values_2])\n', (2069, 2091), True, 'import numpy as np\n'), ((2104, 2128), 'inferactively.distributions.Dirichlet', 'Dirichlet', ([], {'values': 'values'}), '(values=values)\n', (2113, 2128), False, 'from inferactively.distributions import Categorical, Dirichlet\n'), ((2299, 2319), 'numpy.array', 'np.array', (['[1.0, 1.0]'], {}), '([1.0, 1.0])\n', (2307, 2319), True, 'import numpy as np\n'), ((2332, 2356), 'inferactively.distributions.Dirichlet', 'Dirichlet', ([], {'values': 'values'}), '(values=values)\n', (2341, 2356), False, 'from inferactively.distributions import Categorical, Dirichlet\n'), ((2383, 2407), 'numpy.array', 'np.array', (['[[0.5], [0.5]]'], {}), '([[0.5], [0.5]])\n', (2391, 2407), True, 'import numpy as np\n'), ((2548, 2582), 'numpy.array', 'np.array', (['[[1.0, 1.0], [1.0, 1.0]]'], {}), '([[1.0, 1.0], [1.0, 1.0]])\n', (2556, 2582), True, 'import numpy as np\n'), ((2595, 2619), 'inferactively.distributions.Dirichlet', 'Dirichlet', ([], {'values': 'values'}), '(values=values)\n', (2604, 2619), False, 'from inferactively.distributions import Categorical, Dirichlet\n'), ((2646, 2680), 'numpy.array', 'np.array', (['[[0.5, 0.5], [0.5, 0.5]]'], {}), '([[0.5, 0.5], [0.5, 0.5]])\n', (2654, 2680), True, 'import numpy as np\n'), ((2816, 2850), 'numpy.array', 'np.array', (['[[1.0, 0.0], [1.0, 1.0]]'], {}), '([[1.0, 0.0], [1.0, 1.0]])\n', (2824, 2850), True, 'import numpy as np\n'), ((2863, 2887), 'inferactively.distributions.Dirichlet', 'Dirichlet', ([], {'values': 'values'}), '(values=values)\n', (2872, 2887), False, 'from inferactively.distributions import Categorical, Dirichlet\n'), ((3065, 3099), 'numpy.array', 'np.array', (['[[1.0, 0.0], [1.0, 1.0]]'], {}), '([[1.0, 0.0], [1.0, 1.0]])\n', (3073, 3099), True, 'import numpy as np\n'), ((3112, 3136), 'inferactively.distributions.Dirichlet', 'Dirichlet', ([], {'values': 'values'}), '(values=values)\n', (3121, 3136), False, 'from inferactively.distributions import Categorical, Dirichlet\n'), ((3198, 3232), 'numpy.array', 'np.array', (['[[1.0, 1.0], [1.0, 1.0]]'], {}), '([[1.0, 1.0], [1.0, 1.0]])\n', (3206, 3232), True, 'import numpy as np\n'), ((3245, 3269), 'inferactively.distributions.Dirichlet', 'Dirichlet', ([], {'values': 'values'}), '(values=values)\n', (3254, 3269), False, 'from inferactively.distributions import Categorical, Dirichlet\n'), ((3611, 3631), 'numpy.random.rand', 'np.random.rand', (['(3)', '(2)'], {}), '(3, 2)\n', (3625, 3631), True, 'import numpy as np\n'), ((3653, 3667), 'numpy.log', 'np.log', (['values'], {}), '(values)\n', (3659, 3667), True, 'import numpy as np\n'), ((3680, 3704), 'inferactively.distributions.Dirichlet', 'Dirichlet', ([], {'values': 'values'}), '(values=values)\n', (3689, 3704), False, 'from inferactively.distributions import Categorical, Dirichlet\n'), ((3826, 3846), 'numpy.random.rand', 'np.random.rand', (['(3)', '(2)'], {}), '(3, 2)\n', (3840, 3846), True, 'import numpy as np\n'), ((3859, 3883), 'inferactively.distributions.Dirichlet', 'Dirichlet', ([], {'values': 'values'}), '(values=values)\n', (3868, 3883), False, 'from inferactively.distributions import Categorical, Dirichlet\n'), ((4126, 4146), 'numpy.random.rand', 'np.random.rand', (['(3)', '(2)'], {}), '(3, 2)\n', (4140, 4146), True, 'import numpy as np\n'), ((4159, 4183), 'inferactively.distributions.Dirichlet', 'Dirichlet', ([], {'values': 'values'}), '(values=values)\n', (4168, 4183), False, 'from inferactively.distributions import Categorical, Dirichlet\n'), ((4276, 4296), 'numpy.random.rand', 'np.random.rand', (['(3)', '(2)'], {}), '(3, 2)\n', (4290, 4296), True, 'import numpy as np\n'), ((4309, 4333), 'inferactively.distributions.Dirichlet', 'Dirichlet', ([], {'values': 'values'}), '(values=values)\n', (4318, 4333), False, 'from inferactively.distributions import Categorical, Dirichlet\n'), ((4625, 4654), 'scipy.io.loadmat', 'loadmat', ([], {'file_name': 'array_path'}), '(file_name=array_path)\n', (4632, 4654), False, 'from scipy.io import loadmat\n'), ((4708, 4743), 'inferactively.distributions.Dirichlet', 'Dirichlet', ([], {'values': "mat_contents['A']"}), "(values=mat_contents['A'])\n", (4717, 4743), False, 'from inferactively.distributions import Categorical, Dirichlet\n'), ((5112, 5141), 'scipy.io.loadmat', 'loadmat', ([], {'file_name': 'array_path'}), '(file_name=array_path)\n', (5119, 5141), False, 'from scipy.io import loadmat\n'), ((5243, 5281), 'inferactively.distributions.Dirichlet', 'Dirichlet', ([], {'values': "mat_contents['A'][0]"}), "(values=mat_contents['A'][0])\n", (5252, 5281), False, 'from inferactively.distributions import Categorical, Dirichlet\n'), ((516, 536), 'numpy.random.rand', 'np.random.rand', (['(3)', '(2)'], {}), '(3, 2)\n', (530, 536), True, 'import numpy as np\n'), ((553, 585), 'inferactively.distributions.Dirichlet', 'Dirichlet', ([], {'dims': '(2)', 'values': 'values'}), '(dims=2, values=values)\n', (562, 585), False, 'from inferactively.distributions import Categorical, Dirichlet\n'), ((3934, 3973), 'numpy.array_equal', 'np.array_equal', (['d_copy.values', 'd.values'], {}), '(d_copy.values, d.values)\n', (3948, 3973), True, 'import numpy as np\n'), ((4042, 4081), 'numpy.array_equal', 'np.array_equal', (['d_copy.values', 'd.values'], {}), '(d_copy.values, d.values)\n', (4056, 4081), True, 'import numpy as np\n'), ((4563, 4574), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (4572, 4574), False, 'import os\n'), ((5050, 5061), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (5059, 5061), False, 'import os\n'), ((4828, 4857), 'numpy.isclose', 'np.isclose', (['result', 'result_py'], {}), '(result, result_py)\n', (4838, 4857), True, 'import numpy as np\n'), ((5380, 5414), 'numpy.isclose', 'np.isclose', (['result_1', 'result_py[0]'], {}), '(result_1, result_py[0])\n', (5390, 5414), True, 'import numpy as np\n'), ((5425, 5459), 'numpy.isclose', 'np.isclose', (['result_2', 'result_py[1]'], {}), '(result_2, result_py[1])\n', (5435, 5459), True, 'import numpy as np\n')]
|
from sqladmin.helpers import secure_filename
def test_secure_filename(monkeypatch):
assert secure_filename("My cool movie.mov") == "My_cool_movie.mov"
assert secure_filename("../../../etc/passwd") == "etc_passwd"
assert (
secure_filename("i contain cool \xfcml\xe4uts.txt")
== "i_contain_cool_umlauts.txt"
)
assert secure_filename("__filename__") == "filename"
assert secure_filename("foo$&^*)bar") == "foobar"
|
[
"sqladmin.helpers.secure_filename"
] |
[((97, 133), 'sqladmin.helpers.secure_filename', 'secure_filename', (['"""My cool movie.mov"""'], {}), "('My cool movie.mov')\n", (112, 133), False, 'from sqladmin.helpers import secure_filename\n'), ((168, 206), 'sqladmin.helpers.secure_filename', 'secure_filename', (['"""../../../etc/passwd"""'], {}), "('../../../etc/passwd')\n", (183, 206), False, 'from sqladmin.helpers import secure_filename\n'), ((244, 289), 'sqladmin.helpers.secure_filename', 'secure_filename', (['"""i contain cool ümläuts.txt"""'], {}), "('i contain cool ümläuts.txt')\n", (259, 289), False, 'from sqladmin.helpers import secure_filename\n'), ((353, 384), 'sqladmin.helpers.secure_filename', 'secure_filename', (['"""__filename__"""'], {}), "('__filename__')\n", (368, 384), False, 'from sqladmin.helpers import secure_filename\n'), ((410, 440), 'sqladmin.helpers.secure_filename', 'secure_filename', (['"""foo$&^*)bar"""'], {}), "('foo$&^*)bar')\n", (425, 440), False, 'from sqladmin.helpers import secure_filename\n')]
|
import os, sys, time, re
import cv2
import deeptool
sameImages= []
cachedImages = None
def isSameImage(imghist, checkhist):
ret = cv2.compareHist(imghist, checkhist, 0)
ret2 = cv2.compareHist(imghist, checkhist, 1)
ret3 = cv2.compareHist(imghist, checkhist, 2)
ret4 = cv2.compareHist(imghist, checkhist, 3)
return ret > 0.995 and ret2 < 300, (ret, ret2, ret3, ret4)
def eraseByHist(file):
global sameImages, cachedImages
img = cv2.imread(file)
img = cv2.resize(img, (64, 64))
imghist = cv2.calcHist([img], [0, 1, 2], None, [8, 8, 8], [0,256,0,256,0,256])
reg = re.compile(r"(\d+).(png|jpe?g)$")
sys.stdout.flush()
m = reg.search(file)
fileId = int(m.group(1))
if cachedImages is None:
cachedImages = {file: (imghist, fileId)}
else:
appended = False
score = (0, 0 , 0, 0)
for cachedfile in cachedImages:
# sys.stdout.write ("*")
# sys.stdout.flush()
cachedImgHist, cachedFileId = cachedImages[cachedfile]
isSame , score= isSameImage(imghist, cachedImgHist)
if isSame and fileId < cachedFileId + 210:
# sys.stdout.write ("!")
# sys.stdout.flush()
sameImages.append((file, cachedfile, score))
appended = True
break
if not appended:
cachedImages[file] = (imghist, fileId)
def main(args):
global sameImages, cachedImages
if len(args) == 1:
print(args[0]+" dirname")
exit()
cachedImages = None
if os.path.isdir(args[1]):
start = time.time()
i = 0
for file in deeptool.listDir(args[1]):
if os.path.isdir(file):
print("--"+file+"--")
sys.stdout.flush()
cachedImages = None
for file2 in deeptool.listDir(file):
eraseByHist(file2)
i = i + 1
if (i % 1000 == 0):
end = time.time()
print (" %g data / sec" % ( 1000 / (end - start) ))
start = end
else:
eraseByHist(file)
i = i + 1
if (i % 1000 == 0):
end = time.time()
print (" %g data / sec" % ( 1000 / (end - start) ))
start = end
else:
print(args[0] + " dirname")
exit()
print("--------------------------------")
for file, matchfile,score in sameImages:
os.unlink(file)
print (file, matchfile, score)
main(sys.argv)
|
[
"os.unlink",
"os.path.isdir",
"cv2.calcHist",
"deeptool.listDir",
"time.time",
"cv2.imread",
"sys.stdout.flush",
"cv2.compareHist",
"cv2.resize",
"re.compile"
] |
[((137, 175), 'cv2.compareHist', 'cv2.compareHist', (['imghist', 'checkhist', '(0)'], {}), '(imghist, checkhist, 0)\n', (152, 175), False, 'import cv2\n'), ((187, 225), 'cv2.compareHist', 'cv2.compareHist', (['imghist', 'checkhist', '(1)'], {}), '(imghist, checkhist, 1)\n', (202, 225), False, 'import cv2\n'), ((237, 275), 'cv2.compareHist', 'cv2.compareHist', (['imghist', 'checkhist', '(2)'], {}), '(imghist, checkhist, 2)\n', (252, 275), False, 'import cv2\n'), ((287, 325), 'cv2.compareHist', 'cv2.compareHist', (['imghist', 'checkhist', '(3)'], {}), '(imghist, checkhist, 3)\n', (302, 325), False, 'import cv2\n'), ((459, 475), 'cv2.imread', 'cv2.imread', (['file'], {}), '(file)\n', (469, 475), False, 'import cv2\n'), ((486, 511), 'cv2.resize', 'cv2.resize', (['img', '(64, 64)'], {}), '(img, (64, 64))\n', (496, 511), False, 'import cv2\n'), ((526, 599), 'cv2.calcHist', 'cv2.calcHist', (['[img]', '[0, 1, 2]', 'None', '[8, 8, 8]', '[0, 256, 0, 256, 0, 256]'], {}), '([img], [0, 1, 2], None, [8, 8, 8], [0, 256, 0, 256, 0, 256])\n', (538, 599), False, 'import cv2\n'), ((606, 639), 're.compile', 're.compile', (['"""(\\\\d+).(png|jpe?g)$"""'], {}), "('(\\\\d+).(png|jpe?g)$')\n", (616, 639), False, 'import os, sys, time, re\n'), ((644, 662), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (660, 662), False, 'import os, sys, time, re\n'), ((1586, 1608), 'os.path.isdir', 'os.path.isdir', (['args[1]'], {}), '(args[1])\n', (1599, 1608), False, 'import os, sys, time, re\n'), ((1626, 1637), 'time.time', 'time.time', ([], {}), '()\n', (1635, 1637), False, 'import os, sys, time, re\n'), ((1672, 1697), 'deeptool.listDir', 'deeptool.listDir', (['args[1]'], {}), '(args[1])\n', (1688, 1697), False, 'import deeptool\n'), ((2576, 2591), 'os.unlink', 'os.unlink', (['file'], {}), '(file)\n', (2585, 2591), False, 'import os, sys, time, re\n'), ((1714, 1733), 'os.path.isdir', 'os.path.isdir', (['file'], {}), '(file)\n', (1727, 1733), False, 'import os, sys, time, re\n'), ((1789, 1807), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (1805, 1807), False, 'import os, sys, time, re\n'), ((1873, 1895), 'deeptool.listDir', 'deeptool.listDir', (['file'], {}), '(file)\n', (1889, 1895), False, 'import deeptool\n'), ((2300, 2311), 'time.time', 'time.time', ([], {}), '()\n', (2309, 2311), False, 'import os, sys, time, re\n'), ((2036, 2047), 'time.time', 'time.time', ([], {}), '()\n', (2045, 2047), False, 'import os, sys, time, re\n')]
|
# -*- coding: utf-8 -*-
"""
Created on Thu Nov 23 14:54:35 2017
@author: user
"""
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from typing import Any
from typing import Dict
from typing import List
from typing import Text
from rasa_nlu.config import RasaNLUConfig
from rasa_nlu.tokenizers import Tokenizer, Token
from rasa_nlu.components import Component
from rasa_nlu.training_data import Message
from rasa_nlu.training_data import TrainingData
import sys
from yaha import Cuttor
reload(sys)
sys.setdefaultencoding('utf-8')
class YahaTokenizer(Tokenizer, Component):
name = "tokenizer_yaha"
provides = ["tokens"]
cuttor = Cuttor()
def __init__(self):
pass
@classmethod
def required_packages(cls):
# type: () -> List[Text]
return ["yaha"]
def train(self, training_data, config, **kwargs):
# type: (TrainingData, RasaNLUConfig, **Any) -> None
if config['language'] != 'zh':
raise Exception("tokenizer_yaha is only used for Chinese. Check your configure json file.")
for example in training_data.training_examples:
example.set("tokens", self.tokenize(example.text))
def process(self, message, **kwargs):
# type: (Message, **Any) -> None
message.set("tokens", self.tokenize(message.text))
def tokenize(self, text):
# type: (Text) -> List[Token]
tokenized = self.cuttor.tokenize(text.decode('utf-8'), search=True)
tokens = [Token(word, start) for (word, start, end) in tokenized]
return tokens
|
[
"rasa_nlu.tokenizers.Token",
"yaha.Cuttor",
"sys.setdefaultencoding"
] |
[((630, 661), 'sys.setdefaultencoding', 'sys.setdefaultencoding', (['"""utf-8"""'], {}), "('utf-8')\n", (652, 661), False, 'import sys\n'), ((798, 806), 'yaha.Cuttor', 'Cuttor', ([], {}), '()\n', (804, 806), False, 'from yaha import Cuttor\n'), ((1689, 1707), 'rasa_nlu.tokenizers.Token', 'Token', (['word', 'start'], {}), '(word, start)\n', (1694, 1707), False, 'from rasa_nlu.tokenizers import Tokenizer, Token\n')]
|
from models import *
from utils import *
from tensorboard_logger import configure, log_value
import os
try:
os.makedirs('../train_logs')
except OSError:
pass
vgg19_exc = VGG19_extractor(torchvision.models.vgg19(pretrained=True))
vgg19_exc = vgg19_exc.cuda()
E1 = Encoder(n_res_blocks=10)
D1 = Decoder(n_res_blocks=10)
A = AE(E1, D1)
A = A.cuda()
def train_ae(model, modelName, batchsz):
########## logging stuff
configure('../train_logs/'+modelName+'_bsz{}'.format(batchsz), flush_secs=5)
print('I configured .. ')
########################
def mynorm2(x):
m1 = torch.min(x)
m2 = torch.max(x)
if m2-m1 < 1e-6:
return x
else:
return (x-m1)/(m2-m1)
mytransform2 = transforms.Compose(
[transforms.RandomCrop((121,121)),
# transforms.Lambda( lambda x : Image.fromarray(gaussian_filter(x, sigma=(10,10,0)) )),
# transforms.Resize((41,41)),
transforms.ToTensor(),
transforms.Lambda( lambda x : mynorm2(x) )])
trainset = dsets.ImageFolder(root='../sample_dataset/train/',transform=mytransform2)
trainloader = torch.utils.data.DataLoader(trainset, batch_size=batchsz, shuffle=True, num_workers=2)
testset = dsets.ImageFolder(root='../sample_dataset/test/',transform=mytransform2)
testloader = torch.utils.data.DataLoader(testset, batch_size=batchsz, shuffle=True, num_workers=2)
# def mynorm2(x):
# m1 = torch.min(x)
# m2 = torch.max(x)
# return (x-m1)/(m2-m1)
# mytransform2 = transforms.Compose(
# [transforms.RandomCrop((41,41)),
# transforms.ToTensor(),
# transforms.Lambda( lambda x : mynorm2(x))])
# trainset = dsets.ImageFolder(root='../sample_dataset/train/',transform=mytransform2)
# trainloader = torch.utils.data.DataLoader(trainset, batch_size=batchsz, shuffle=True, num_workers=2)
# testset = dsets.ImageFolder(root='../sample_dataset/test/',transform=mytransform2)
# testloader = torch.utils.data.DataLoader(testset, batch_size=batchsz, shuffle=True, num_workers=2)
testiter = iter(testloader)
testX, _ = next(testiter)
def eval_model(model):
X = testX
print('input looks like ...')
plt.figure()
imshow(torchvision.utils.make_grid(X))
X = Variable(X).cuda()
Y = model(X)
print('output looks like ...')
plt.figure()
imshow2(torchvision.utils.make_grid(Y.data.cpu()))
nepoch = 500
Criterion2 = nn.MSELoss()
Criterion1 = nn.L1Loss()
optimizer = optim.Adam(model.parameters(), lr=1e-5)
loss_track = []
for eph in range(nepoch):
dataloader = iter(trainloader)
print('starting epoch {} ...'.format(eph))
mean_L2_term = 0
mean_vl3_term = 0
mean_total_loss = 0
tot_count = 0
for i, (X, _) in enumerate(dataloader):
tot_count += X.size()[0]
X = Variable(X).cuda()
optimizer.zero_grad()
reconX = model(X)
l2 = Criterion2(reconX, X)
t1, t2, t3 = vgg19_exc(X)
rt1, rt2, rt3 = vgg19_exc(reconX)
# t1 = Variable(t1.data)
# rt1 = Variable(rt1.data)
# t2 = Variable(t2.data)
# rt2 = Variable(rt2.data)
t3 = Variable(t3.data)
rt3 = Variable(rt3.data)
vl3 = Criterion2(rt3, t3)
reconTerm = 10*l2 + vl3
loss = reconTerm
loss.backward()
optimizer.step()
mean_L2_term += l2.data[0]
mean_vl3_term += vl3.data[0]
mean_total_loss += loss.data[0]
# if i%rec_interval == 0:
# loss_track.append(loss.data[0])
# if i%disp_interval == 0:
# print('epoch:{}, iter: {}, L2term:{}, vl3: {}, reconTerm: {}'.format(
# eph, i, l2.data[0], vl3.data[0], reconTerm.data[0]))
mean_L2_term /= tot_count
mean_vl3_term /= tot_count
mean_total_loss /= tot_count
log_value('L2_term', mean_L2_term, eph)
log_value('vl3_term', mean_vl3_term, eph)
log_value('total_loss', mean_total_loss, eph)
print('epoch:{}, mean_L2term:{}, mean_vl3: {}, mean_reconTerm: {}'.format(eph, mean_L2_term, mean_vl3_term, mean_total_loss))
save_model(model, modelName+'.pth')
return loss_track
|
[
"tensorboard_logger.log_value",
"os.makedirs"
] |
[((113, 141), 'os.makedirs', 'os.makedirs', (['"""../train_logs"""'], {}), "('../train_logs')\n", (124, 141), False, 'import os\n'), ((4121, 4160), 'tensorboard_logger.log_value', 'log_value', (['"""L2_term"""', 'mean_L2_term', 'eph'], {}), "('L2_term', mean_L2_term, eph)\n", (4130, 4160), False, 'from tensorboard_logger import configure, log_value\n'), ((4169, 4210), 'tensorboard_logger.log_value', 'log_value', (['"""vl3_term"""', 'mean_vl3_term', 'eph'], {}), "('vl3_term', mean_vl3_term, eph)\n", (4178, 4210), False, 'from tensorboard_logger import configure, log_value\n'), ((4219, 4264), 'tensorboard_logger.log_value', 'log_value', (['"""total_loss"""', 'mean_total_loss', 'eph'], {}), "('total_loss', mean_total_loss, eph)\n", (4228, 4264), False, 'from tensorboard_logger import configure, log_value\n')]
|
import torch
import math
class Node:
def __init__(self, state, probs, value, length, moves, terminal = False):
self.state = state
self.moves = torch.nonzero(moves)
self.P = probs[self.moves].view(-1)
self.P = self.P / self.P.sum()
self.value = value
self.length = length
size, _ = self.moves.shape
self.size = size
self.N = torch.zeros(size, dtype = torch.int32)
self.Q = torch.zeros(size)
self.L = torch.zeros(size)
self.T = terminal
self.children = {}
def getProbs(self, size):
probs = self.N.float() / self.N.sum()
all_probs = torch.zeros(size)
all_probs[self.moves.view(-1)] = probs
return all_probs
class MCTS:
def __init__(self, cpuct, beta, max_steps):
self.cpuct = cpuct
self.beta = beta
self.max_steps = max_steps * 1.0
self.nodes = {}
self.root = None
self.current_node = None
self.parents = []
def set_parents(self, parents):
self.parents = parents
def selection(self, step):
game_indicies = torch.nonzero(self.root.N == 0)
for index in game_indicies:
self.parents = [(self.root, index)]
self.current_node = None
return
parents = []
node = self.root
best_player = True
while True:
if node.T == True:
parents.reverse()
self.parents = parents
self.current_node = node
return
N_sum = node.N.sum().item()
sq = math.sqrt(float(N_sum))
if best_player:
alpha = step / self.max_steps
if N_sum > 0:
b = node.Q + self.cpuct * node.P * sq / (1.0 + node.N)
c = node.Q + self.beta * node.L
u = alpha * b + (1 - alpha) * c
index = torch.argmax(u).item()
else:
index = torch.argmax(node.P).item()
else:
if N_sum > 0:
u = node.Q + self.cpuct * node.P * sq / (1.0 + node.N)
index = torch.argmax(u).item()
else:
index = torch.argmax(node.P).item()
parents.append((node, index))
if index in node.children:
node = node.children[index]
else:
parents.reverse()
self.parents = parents
self.current_node = None
return
step += 1
def backup(self, node, parents):
v = node.value
l = node.length
for parent, i in parents:
v = - v
count = parent.N[i] + 1
parent.Q[i] = (parent.N[i] * parent.Q[i] + v) / count
parent.L[i] = (parent.N[i] * parent.L[i] + l) / count
parent.N[i] = count
l -= 1
|
[
"torch.zeros",
"torch.argmax",
"torch.nonzero"
] |
[((164, 184), 'torch.nonzero', 'torch.nonzero', (['moves'], {}), '(moves)\n', (177, 184), False, 'import torch\n'), ((402, 438), 'torch.zeros', 'torch.zeros', (['size'], {'dtype': 'torch.int32'}), '(size, dtype=torch.int32)\n', (413, 438), False, 'import torch\n'), ((458, 475), 'torch.zeros', 'torch.zeros', (['size'], {}), '(size)\n', (469, 475), False, 'import torch\n'), ((493, 510), 'torch.zeros', 'torch.zeros', (['size'], {}), '(size)\n', (504, 510), False, 'import torch\n'), ((662, 679), 'torch.zeros', 'torch.zeros', (['size'], {}), '(size)\n', (673, 679), False, 'import torch\n'), ((1139, 1170), 'torch.nonzero', 'torch.nonzero', (['(self.root.N == 0)'], {}), '(self.root.N == 0)\n', (1152, 1170), False, 'import torch\n'), ((1969, 1984), 'torch.argmax', 'torch.argmax', (['u'], {}), '(u)\n', (1981, 1984), False, 'import torch\n'), ((2042, 2062), 'torch.argmax', 'torch.argmax', (['node.P'], {}), '(node.P)\n', (2054, 2062), False, 'import torch\n'), ((2221, 2236), 'torch.argmax', 'torch.argmax', (['u'], {}), '(u)\n', (2233, 2236), False, 'import torch\n'), ((2294, 2314), 'torch.argmax', 'torch.argmax', (['node.P'], {}), '(node.P)\n', (2306, 2314), False, 'import torch\n')]
|
# Third-party Libraries
from flask_restful import Resource, reqparse
import pyvo
from astropy.io.votable import parse
class Search(Resource):
def __init__(self) -> None:
super().__init__()
self.service = pyvo.dal.TAPService("http://voparis-tap-planeto.obspm.fr/tap")
def get(self, database):
whitelist_databases = {
"exoplanet.epn_core": self.exoplanet()
}
if database in whitelist_databases:
return whitelist_databases.get(database)
def exoplanet(self):
parser = reqparse.RequestParser()
parser.add_argument("star_distance", required=True, location="args")
args = parser.parse_args()
star_distance = args["star_distance"]
service = self.service
query = "SELECT * FROM exoplanet.epn_core WHERE star_distance = %s" % (star_distance,)
results = service.search(query)
response = {}
for result in range(len(results)):
target_name = results[result].get("target_name")
star_name = results[result].get("star_name")
star_distance = results[result].get("star_distance")
response[target_name] = {
"star_name": star_name,
"star_distance": star_distance
}
return response
|
[
"flask_restful.reqparse.RequestParser",
"pyvo.dal.TAPService"
] |
[((226, 288), 'pyvo.dal.TAPService', 'pyvo.dal.TAPService', (['"""http://voparis-tap-planeto.obspm.fr/tap"""'], {}), "('http://voparis-tap-planeto.obspm.fr/tap')\n", (245, 288), False, 'import pyvo\n'), ((552, 576), 'flask_restful.reqparse.RequestParser', 'reqparse.RequestParser', ([], {}), '()\n', (574, 576), False, 'from flask_restful import Resource, reqparse\n')]
|
# -*- coding: utf-8 -*-
################################################################################
# | #
# | ______________________________________________________________ #
# | :~8a.`~888a:::::::::::::::88......88:::::::::::::::;a8~".a88::| #
# | ::::~8a.`~888a::::::::::::88......88::::::::::::;a8~".a888~:::| #
# | :::::::~8a.`~888a:::::::::88......88:::::::::;a8~".a888~::::::| #
# | ::::::::::~8a.`~888a::::::88......88::::::;a8~".a888~:::::::::| #
# | :::::::::::::~8a.`~888a:::88......88:::;a8~".a888~::::::::::::| #
# | :::::::::::: :~8a.`~888a:88 .....88;a8~".a888~:::::::::::::::| #
# | :::::::::::::::::::~8a.`~888......88~".a888~::::::::::::::::::| #
# | 8888888888888888888888888888......8888888888888888888888888888| #
# | ..............................................................| #
# | ..............................................................| #
# | 8888888888888888888888888888......8888888888888888888888888888| #
# | ::::::::::::::::::a888~".a88......888a."~8;:::::::::::::::::::| #
# | :::::::::::::::a888~".a8~:88......88~888a."~8;::::::::::::::::| #
# | ::::::::::::a888~".a8~::::88......88:::~888a."~8;:::::::::::::| #
# | :::::::::a888~".a8~:::::::88......88::::::~888a."~8;::::::::::| #
# | ::::::a888~".a8~::::::::::88......88:::::::::~888a."~8;:::::::| #
# | :::a888~".a8~:::::::::::::88......88::::::::::::~888a."~8;::::| #
# | a888~".a8~::::::::::::::::88......88:::::::::::::::~888a."~8;:| #
# | #
# | Rebirth Addon #
# | Copyright (C) 2017 Cypher #
# | #
# | This program is free software: you can redistribute it and/or modify #
# | it under the terms of the GNU General Public License as published by #
# | the Free Software Foundation, either version 3 of the License, or #
# | (at your option) any later version. #
# | #
# | This program is distributed in the hope that it will be useful, #
# | but WITHOUT ANY WARRANTY; without even the implied warranty of #
# | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# | GNU General Public License for more details. #
# | #
################################################################################
import ast
import hashlib
import re
import time
from resources.lib.modules import control
try:
from sqlite3 import dbapi2 as db, OperationalError
except ImportError:
from pysqlite2 import dbapi2 as db, OperationalError
"""
This module is used to get/set cache for every action done in the system
"""
cache_table = 'cache'
def get(function, duration, *args):
# type: (function, int, object) -> object or None
"""
Gets cached value for provided function with optional arguments, or executes and stores the result
:param function: Function to be executed
:param duration: Duration of validity of cache in hours
:param args: Optional arguments for the provided function
"""
try:
key = _hash_function(function, args)
cache_result = cache_get(key)
if cache_result:
if _is_cache_valid(cache_result['date'], duration):
return ast.literal_eval(cache_result['value'].encode('utf-8'))
fresh_result = repr(function(*args))
if not fresh_result:
# If the cache is old, but we didn't get fresh result, return the old cache
if cache_result:
return cache_result
return None
cache_insert(key, fresh_result)
return ast.literal_eval(fresh_result.encode('utf-8'))
except Exception:
return None
def timeout(function, *args):
try:
key = _hash_function(function, args)
result = cache_get(key)
return int(result['date'])
except Exception:
return None
def cache_get(key):
# type: (str, str) -> dict or None
try:
cursor = _get_connection_cursor()
cursor.execute("SELECT * FROM %s WHERE key = ?" % cache_table, [key])
return cursor.fetchone()
except OperationalError:
return None
def cache_insert(key, value):
# type: (str, str) -> None
cursor = _get_connection_cursor()
now = int(time.time())
cursor.execute(
"CREATE TABLE IF NOT EXISTS %s (key TEXT, value TEXT, date INTEGER, UNIQUE(key))"
% cache_table
)
update_result = cursor.execute(
"UPDATE %s SET value=?,date=? WHERE key=?"
% cache_table, (value, now, key))
if update_result.rowcount is 0:
cursor.execute(
"INSERT INTO %s Values (?, ?, ?)"
% cache_table, (key, value, now)
)
cursor.connection.commit()
def cache_clear():
try:
cursor = _get_connection_cursor()
for t in [cache_table, 'rel_list', 'rel_lib']:
try:
cursor.execute("DROP TABLE IF EXISTS %s" % t)
cursor.execute("VACUUM")
cursor.commit()
except:
pass
except:
pass
def _get_connection_cursor():
conn = _get_connection()
return conn.cursor()
def _get_connection():
control.makeFile(control.dataPath)
conn = db.connect(control.cacheFile)
conn.row_factory = _dict_factory
return conn
def _dict_factory(cursor, row):
d = {}
for idx, col in enumerate(cursor.description):
d[col[0]] = row[idx]
return d
def _hash_function(function_instance, *args):
return _get_function_name(function_instance) + _generate_md5(args)
def _get_function_name(function_instance):
return re.sub('.+\smethod\s|.+function\s|\sat\s.+|\sof\s.+', '', repr(function_instance))
def _generate_md5(*args):
md5_hash = hashlib.md5()
[md5_hash.update(str(arg)) for arg in args]
return str(md5_hash.hexdigest())
def _is_cache_valid(cached_time, cache_timeout):
now = int(time.time())
diff = now - cached_time
return (cache_timeout * 3600) > diff
|
[
"pysqlite2.dbapi2.connect",
"resources.lib.modules.control.makeFile",
"hashlib.md5",
"time.time"
] |
[((5827, 5861), 'resources.lib.modules.control.makeFile', 'control.makeFile', (['control.dataPath'], {}), '(control.dataPath)\n', (5843, 5861), False, 'from resources.lib.modules import control\n'), ((5873, 5902), 'pysqlite2.dbapi2.connect', 'db.connect', (['control.cacheFile'], {}), '(control.cacheFile)\n', (5883, 5902), True, 'from pysqlite2 import dbapi2 as db, OperationalError\n'), ((6395, 6408), 'hashlib.md5', 'hashlib.md5', ([], {}), '()\n', (6406, 6408), False, 'import hashlib\n'), ((4892, 4903), 'time.time', 'time.time', ([], {}), '()\n', (4901, 4903), False, 'import time\n'), ((6559, 6570), 'time.time', 'time.time', ([], {}), '()\n', (6568, 6570), False, 'import time\n')]
|
import torch
from utils.model_utils import *
#Class conditional loglikelihood!
def elbo_recon(prediction,target):
error = (prediction - target).view(prediction.size(0), -1)
error = error ** 2
error = torch.sum(error, dim=-1)
return error
def calculate_ELBO(model,real_images):
with torch.no_grad():
real_mu, real_logvar, z_real, rec = model(real_images)
loss_rec = elbo_recon(rec,real_images)
loss_kl = model.kl_loss(real_mu, real_logvar)
ELBO = loss_rec+loss_kl
return -ELBO.squeeze()
def estimate_loglikelihoods(dataloader_test, model,s=1000):
_loglikelihood_estimates = []
_elbo_estimates = []
_class = []
tensor_s = torch.tensor(s).float()
with torch.no_grad():
for iteration, (batch, c) in enumerate(tqdm.tqdm(dataloader_test)):
_elbo = []
for i in tqdm.trange(s):
with autocast():
ELBO = calculate_ELBO(model,batch.cuda())
_elbo.append(ELBO)
_elbo_estimates.append(ELBO)
likelihood_est = torch.stack(_elbo,dim=1)
# print(torch.logsumexp(likelihood_est,dim=1).cpu()-torch.log(tensor_s))
_loglikelihood_estimates.append(torch.logsumexp(likelihood_est,dim=1).cpu()-torch.log(tensor_s))
_class.append(c)
_elbo_estimates = torch.cat(_elbo_estimates,dim=0)
_class = torch.cat(_class,dim=0)
_loglikelihood_estimates = torch.cat(_loglikelihood_estimates,dim=0)
return _loglikelihood_estimates,_elbo_estimates,_class
def calculate_metrics(_loglikelihood_estimates,_elbo_estimates,_class):
with torch.no_grad():
loglikelihood_estimate = _loglikelihood_estimates.mean(0)
ELBO = _elbo_estimates.mean()
loglikelihood_estimate_A =_loglikelihood_estimates[~_class].mean(0)
loglikelihood_estimate_B = _loglikelihood_estimates[_class].mean(0)
ELBO_A = _elbo_estimates[~_class].mean()
ELBO_B = _elbo_estimates[_class].mean()
return loglikelihood_estimate.item(),ELBO.item(),loglikelihood_estimate_A.item(),loglikelihood_estimate_B.item(),ELBO_A.item(),ELBO_B.item()
|
[
"torch.logsumexp",
"torch.stack",
"torch.cat",
"torch.no_grad",
"torch.sum",
"torch.log",
"torch.tensor"
] |
[((213, 237), 'torch.sum', 'torch.sum', (['error'], {'dim': '(-1)'}), '(error, dim=-1)\n', (222, 237), False, 'import torch\n'), ((304, 319), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (317, 319), False, 'import torch\n'), ((728, 743), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (741, 743), False, 'import torch\n'), ((1355, 1388), 'torch.cat', 'torch.cat', (['_elbo_estimates'], {'dim': '(0)'}), '(_elbo_estimates, dim=0)\n', (1364, 1388), False, 'import torch\n'), ((1405, 1429), 'torch.cat', 'torch.cat', (['_class'], {'dim': '(0)'}), '(_class, dim=0)\n', (1414, 1429), False, 'import torch\n'), ((1464, 1506), 'torch.cat', 'torch.cat', (['_loglikelihood_estimates'], {'dim': '(0)'}), '(_loglikelihood_estimates, dim=0)\n', (1473, 1506), False, 'import torch\n'), ((1651, 1666), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (1664, 1666), False, 'import torch\n'), ((695, 710), 'torch.tensor', 'torch.tensor', (['s'], {}), '(s)\n', (707, 710), False, 'import torch\n'), ((1081, 1106), 'torch.stack', 'torch.stack', (['_elbo'], {'dim': '(1)'}), '(_elbo, dim=1)\n', (1092, 1106), False, 'import torch\n'), ((1279, 1298), 'torch.log', 'torch.log', (['tensor_s'], {}), '(tensor_s)\n', (1288, 1298), False, 'import torch\n'), ((1235, 1273), 'torch.logsumexp', 'torch.logsumexp', (['likelihood_est'], {'dim': '(1)'}), '(likelihood_est, dim=1)\n', (1250, 1273), False, 'import torch\n')]
|
import pytest
from docs_src.async_constructor import main
@pytest.mark.anyio("asyncio")
async def test_async_constructor() -> None:
await main()
|
[
"docs_src.async_constructor.main",
"pytest.mark.anyio"
] |
[((62, 90), 'pytest.mark.anyio', 'pytest.mark.anyio', (['"""asyncio"""'], {}), "('asyncio')\n", (79, 90), False, 'import pytest\n'), ((145, 151), 'docs_src.async_constructor.main', 'main', ([], {}), '()\n', (149, 151), False, 'from docs_src.async_constructor import main\n')]
|
from glob import glob
import cv2
import os
import sys
import yaml
import matplotlib as mpl
import numpy as np
from skimage.io import imread
import matplotlib.pyplot as plt
from ellipses import LSqEllipse # The code is pulled from https://github.com/bdhammel/least-squares-ellipse-fitting
import time
# This annotation script is written by <NAME> and <NAME> inspired by DeepVog repo by <NAME>
def _get_annotation_path_from_image_path(image_file_name, path, eye_part):
# print('get txt file name: ', path + image_file_name + eye_part + '.txt')
return path + image_file_name + eye_part + '.txt'
def fit_pupil(image_path, saving_directory, curr_image_number, plot=False, write_annotation=False, eye_part='pupil'):
# Mouse enumeration for development use only
# Todo: Remove this after we're done with the design
'''
BACK = 8
FORWARD = 9
LEFT = 1
MIDDLE = 2
RIGHT = 3
'''
upper_color = 'purple'
lower_color = 'green'
if 'pupil' in eye_part:
point_color = 'yellow'
fill_color = 'orange'
elif 'iris' in eye_part:
point_color = 'blue'
fill_color = 'cyan'
elif 'upper' in eye_part:
point_color = 'purple'
fill_color = 'grey'
elif 'lower' in eye_part:
point_color = 'green'
fill_color = 'white'
base = os.path.basename(image_path)
image_file_name = os.path.splitext(base)[0]
result = 'success'
while True:
plt.ion()
fig, ax = plt.subplots(figsize=(15, 15))
img = imread(image_path)
ax.set_title('Annotating {} for ID:{}\n File Name:{}'.format(eye_part.replace('_',''),curr_image_number, os.path.basename(image_path)))
ax.imshow(img, cmap='gray')
ax.set_xlim(-20, 420)
ax.set_ylim(-20, 420)
if 'upper' in eye_part or 'lower' in eye_part:
if 'upper' in eye_part:
annotated_text_file_name = _get_annotation_path_from_image_path(image_file_name, saving_directory,
'_lower')
my_color = lower_color
elif 'lower' in eye_part:
annotated_text_file_name = _get_annotation_path_from_image_path(image_file_name, saving_directory,
'_upper')
my_color = upper_color
if os.path.exists(annotated_text_file_name):
with open(annotated_text_file_name.replace(".txt", "_points.txt")) as f:
w, h = [x for x in next(f).split()] # read first line
array = []
for line in f: # read rest of lines
array.append([np.float(x) for x in line.split(',')])
previous_x = [np.float(x[0]) for x in array]
previous_y = [np.float(x[1]) for x in array]
ax.plot(previous_x, previous_y, c=my_color, marker='x')
key_points = plt.ginput(-1, mouse_pop=2, mouse_stop=3,
timeout=-1) # If negative, accumulate clicks until the input is terminated manually.
points_x = [x[0] for x in key_points]
points_y = [x[1] for x in key_points]
if not key_points:
plt.close()
result = 'proceed'
break
if 'pupil' in eye_part or 'iris' in eye_part:
fitted = LSqEllipse()
fitted.fit([points_x, points_y])
center_coord, width, height, angle = fitted.parameters()
axes = np.array([width, height])
angle = np.rad2deg(angle)
elif 'upper' in eye_part or 'lower' in eye_part:
poly = np.poly1d(np.polyfit(points_x, points_y, 4))
print("\npoly calculated:", poly)
print('\n')
if write_annotation:
annotated_text_file_name = _get_annotation_path_from_image_path(image_file_name, saving_directory,
eye_part)
with open(annotated_text_file_name, 'w+') as f:
# if all([c <= 50 for c in center_coord]):
# points_str = '-1:-1'
# else:
if 'pupil' in eye_part or 'iris' in eye_part:
points_str = '{}, {}'.format(center_coord[0], center_coord[1])
f.write(points_str)
elif 'upper' in eye_part or 'lower' in eye_part:
f.write('{}, {}\n'.format(min(points_x), points_y[np.argmin(points_x)]))
print("The left most eyelid point: {:.2f} {:.2f}".format(min(points_x), points_y[np.argmin(points_x)]))
f.write('{}, {}\n'.format(max(points_x), points_y[np.argmax(points_x)]))
print("The right most eyelid point: {:.2f} {:.2f}".format(max(points_x), points_y[np.argmax(points_x)]))
with open(annotated_text_file_name.replace(".txt","_points.txt"), 'w+') as f: # For detecting selected
for point in key_points:
f.write('{}, {}\n'.format(point[0], point[1]))
if plot:
all_x = [x[0] for x in key_points]
all_y = [x[1] for x in key_points]
plt.scatter(x=all_x, y=all_y, c=point_color, marker='x')
if 'pupil' in eye_part or 'iris' in eye_part:
ell = mpl.patches.Ellipse(xy=center_coord, width=axes[0] * 2,
height=axes[1] * 2, angle=angle, fill=True, color=fill_color, alpha=0.4)
ax.add_artist(ell)
elif 'upper' in eye_part or 'lower' in eye_part:
for my_x in np.arange(min(points_x), max(points_x), 1):
my_y = poly(my_x)
plt.plot(my_x, my_y, c=point_color, marker='o')
output_image_file = saving_directory + image_file_name + eye_part + "_ellipse.png"
fig.savefig(output_image_file)
print("saved: ", os.path.basename(output_image_file))
print('\n')
plt.show()
confirmation_point = plt.ginput(1, timeout=-1, mouse_add=3, mouse_stop=3)
plt.close()
if len(confirmation_point) == 0:
break
# Hacky way to read the q press to quit the loop otherwise the QT doesn't let go of the thread
# TODO: gracefully stop the tool
time.sleep(.01)
answer = input("")
print('q pressed!!', answer)
if answer == 'q':
print("\n\nQuiting the Annotation tool!")
result = 'quit'
# plt.ioff()
# plt.close()
# sys.exit(0)
break
return result
def annotate(image_directory, saving_directory, eye_part='pupil'):
images_paths = sorted(glob(os.path.join(image_directory, '*png')))
# imag_paths = sorted(glob(os.path.join(base_dir, '*jpg')))
annotation_paths = glob(os.path.join(saving_directory, '*txt'))
i = 0
for image_path in images_paths:
print("Running Annotation for: {} ID: {}/{}".format(os.path.basename(image_path), i, len(images_paths)))
base = os.path.basename(image_path)
image_file_name = os.path.splitext(base)[0]
annotated_text_file_name = _get_annotation_path_from_image_path(image_file_name, saving_directory, eye_part)
if annotated_text_file_name in annotation_paths:
print("Found the existing txt file for: ", os.path.basename(annotated_text_file_name))
else:
result = fit_pupil(image_path=image_path, saving_directory=saving_directory, curr_image_number=i, plot=True, write_annotation=True, eye_part=eye_part)
if result == 'quit':
print("\n\nQuit!!\n\n")
break
i = i + 1
def parse_pipeline_parameters(parameters_fpath):
param_dict = dict()
with open(parameters_fpath, "r") as stream:
param_dict = yaml.safe_load(stream)
return param_dict
if __name__ == '__main__':
plt = mpl.pyplot
fig = plt.figure()
# mpl.rcParams["savefig.directory"] = os.chdir(
# os.path.dirname('/home/kamran/Downloads/eye_image_annotation_results/'))
# File Path for the yaml file
parameters_fpath = os.getcwd() + "/annotation_parameters.yaml"
param_dict = parse_pipeline_parameters(parameters_fpath)
image_directory = param_dict['directory']['image_directory']
saving_directory = param_dict['directory']['saving_directory']
eye_part = param_dict['annotation']['eye_part']
print(param_dict)
print(eye_part)
if 'pupil' in eye_part:
eye_part = 'pupil'
elif 'iris' in eye_part:
eye_part = 'iris'
elif 'upper' in eye_part:
eye_part = 'upper'
elif 'lower' in eye_part:
eye_part = 'lower'
else:
raise ValueError("Wrong Eye Part for Annotation!!!")
annotate(image_directory=image_directory, saving_directory=saving_directory, eye_part='_' + eye_part)
sys.exit(0)
|
[
"numpy.polyfit",
"numpy.argmax",
"numpy.argmin",
"matplotlib.pyplot.figure",
"yaml.safe_load",
"os.path.join",
"matplotlib.pyplot.close",
"os.path.exists",
"ellipses.LSqEllipse",
"matplotlib.pyplot.subplots",
"skimage.io.imread",
"matplotlib.pyplot.show",
"os.path.basename",
"numpy.float",
"time.sleep",
"matplotlib.pyplot.ion",
"matplotlib.pyplot.ginput",
"matplotlib.patches.Ellipse",
"sys.exit",
"matplotlib.pyplot.plot",
"os.getcwd",
"matplotlib.pyplot.scatter",
"numpy.rad2deg",
"numpy.array",
"os.path.splitext"
] |
[((1337, 1365), 'os.path.basename', 'os.path.basename', (['image_path'], {}), '(image_path)\n', (1353, 1365), False, 'import os\n'), ((8092, 8104), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (8102, 8104), True, 'import matplotlib.pyplot as plt\n'), ((9034, 9045), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (9042, 9045), False, 'import sys\n'), ((1388, 1410), 'os.path.splitext', 'os.path.splitext', (['base'], {}), '(base)\n', (1404, 1410), False, 'import os\n'), ((1461, 1470), 'matplotlib.pyplot.ion', 'plt.ion', ([], {}), '()\n', (1468, 1470), True, 'import matplotlib.pyplot as plt\n'), ((1489, 1519), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'figsize': '(15, 15)'}), '(figsize=(15, 15))\n', (1501, 1519), True, 'import matplotlib.pyplot as plt\n'), ((1534, 1552), 'skimage.io.imread', 'imread', (['image_path'], {}), '(image_path)\n', (1540, 1552), False, 'from skimage.io import imread\n'), ((3013, 3066), 'matplotlib.pyplot.ginput', 'plt.ginput', (['(-1)'], {'mouse_pop': '(2)', 'mouse_stop': '(3)', 'timeout': '(-1)'}), '(-1, mouse_pop=2, mouse_stop=3, timeout=-1)\n', (3023, 3066), True, 'import matplotlib.pyplot as plt\n'), ((6452, 6468), 'time.sleep', 'time.sleep', (['(0.01)'], {}), '(0.01)\n', (6462, 6468), False, 'import time\n'), ((6985, 7023), 'os.path.join', 'os.path.join', (['saving_directory', '"""*txt"""'], {}), "(saving_directory, '*txt')\n", (6997, 7023), False, 'import os\n'), ((7199, 7227), 'os.path.basename', 'os.path.basename', (['image_path'], {}), '(image_path)\n', (7215, 7227), False, 'import os\n'), ((7987, 8009), 'yaml.safe_load', 'yaml.safe_load', (['stream'], {}), '(stream)\n', (8001, 8009), False, 'import yaml\n'), ((8298, 8309), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (8307, 8309), False, 'import os\n'), ((2426, 2466), 'os.path.exists', 'os.path.exists', (['annotated_text_file_name'], {}), '(annotated_text_file_name)\n', (2440, 2466), False, 'import os\n'), ((3304, 3315), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (3313, 3315), True, 'import matplotlib.pyplot as plt\n'), ((3440, 3452), 'ellipses.LSqEllipse', 'LSqEllipse', ([], {}), '()\n', (3450, 3452), False, 'from ellipses import LSqEllipse\n'), ((3586, 3611), 'numpy.array', 'np.array', (['[width, height]'], {}), '([width, height])\n', (3594, 3611), True, 'import numpy as np\n'), ((3632, 3649), 'numpy.rad2deg', 'np.rad2deg', (['angle'], {}), '(angle)\n', (3642, 3649), True, 'import numpy as np\n'), ((5288, 5344), 'matplotlib.pyplot.scatter', 'plt.scatter', ([], {'x': 'all_x', 'y': 'all_y', 'c': 'point_color', 'marker': '"""x"""'}), "(x=all_x, y=all_y, c=point_color, marker='x')\n", (5299, 5344), True, 'import matplotlib.pyplot as plt\n'), ((6112, 6122), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (6120, 6122), True, 'import matplotlib.pyplot as plt\n'), ((6156, 6208), 'matplotlib.pyplot.ginput', 'plt.ginput', (['(1)'], {'timeout': '(-1)', 'mouse_add': '(3)', 'mouse_stop': '(3)'}), '(1, timeout=-1, mouse_add=3, mouse_stop=3)\n', (6166, 6208), True, 'import matplotlib.pyplot as plt\n'), ((6221, 6232), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (6230, 6232), True, 'import matplotlib.pyplot as plt\n'), ((6853, 6890), 'os.path.join', 'os.path.join', (['image_directory', '"""*png"""'], {}), "(image_directory, '*png')\n", (6865, 6890), False, 'import os\n'), ((7254, 7276), 'os.path.splitext', 'os.path.splitext', (['base'], {}), '(base)\n', (7270, 7276), False, 'import os\n'), ((1666, 1694), 'os.path.basename', 'os.path.basename', (['image_path'], {}), '(image_path)\n', (1682, 1694), False, 'import os\n'), ((5426, 5558), 'matplotlib.patches.Ellipse', 'mpl.patches.Ellipse', ([], {'xy': 'center_coord', 'width': '(axes[0] * 2)', 'height': '(axes[1] * 2)', 'angle': 'angle', 'fill': '(True)', 'color': 'fill_color', 'alpha': '(0.4)'}), '(xy=center_coord, width=axes[0] * 2, height=axes[1] * 2,\n angle=angle, fill=True, color=fill_color, alpha=0.4)\n', (5445, 5558), True, 'import matplotlib as mpl\n'), ((6039, 6074), 'os.path.basename', 'os.path.basename', (['output_image_file'], {}), '(output_image_file)\n', (6055, 6074), False, 'import os\n'), ((7131, 7159), 'os.path.basename', 'os.path.basename', (['image_path'], {}), '(image_path)\n', (7147, 7159), False, 'import os\n'), ((7509, 7551), 'os.path.basename', 'os.path.basename', (['annotated_text_file_name'], {}), '(annotated_text_file_name)\n', (7525, 7551), False, 'import os\n'), ((2827, 2841), 'numpy.float', 'np.float', (['x[0]'], {}), '(x[0])\n', (2835, 2841), True, 'import numpy as np\n'), ((2888, 2902), 'numpy.float', 'np.float', (['x[1]'], {}), '(x[1])\n', (2896, 2902), True, 'import numpy as np\n'), ((3736, 3769), 'numpy.polyfit', 'np.polyfit', (['points_x', 'points_y', '(4)'], {}), '(points_x, points_y, 4)\n', (3746, 3769), True, 'import numpy as np\n'), ((5823, 5870), 'matplotlib.pyplot.plot', 'plt.plot', (['my_x', 'my_y'], {'c': 'point_color', 'marker': '"""o"""'}), "(my_x, my_y, c=point_color, marker='o')\n", (5831, 5870), True, 'import matplotlib.pyplot as plt\n'), ((2758, 2769), 'numpy.float', 'np.float', (['x'], {}), '(x)\n', (2766, 2769), True, 'import numpy as np\n'), ((4574, 4593), 'numpy.argmin', 'np.argmin', (['points_x'], {}), '(points_x)\n', (4583, 4593), True, 'import numpy as np\n'), ((4698, 4717), 'numpy.argmin', 'np.argmin', (['points_x'], {}), '(points_x)\n', (4707, 4717), True, 'import numpy as np\n'), ((4791, 4810), 'numpy.argmax', 'np.argmax', (['points_x'], {}), '(points_x)\n', (4800, 4810), True, 'import numpy as np\n'), ((4916, 4935), 'numpy.argmax', 'np.argmax', (['points_x'], {}), '(points_x)\n', (4925, 4935), True, 'import numpy as np\n')]
|
import numpy as np
from VariableUnittest import VariableUnitTest
from gwlfe.Output.AvAnimalNSum import AnimalN
class TestAnimalN(VariableUnitTest):
def test_AnimalN(self):
z = self.z
np.testing.assert_array_almost_equal(
AnimalN.AnimalN_f(z.NYrs, z.NGPctManApp, z.GrazingAnimal_0, z.NumAnimals, z.AvgAnimalWt, z.AnimalDailyN,
z.NGAppNRate, z.Prec, z.DaysMonth,
z.NGPctSoilIncRate, z.GRPctManApp, z.GRAppNRate, z.GRPctSoilIncRate, z.NGBarnNRate,
z.AWMSNgPct, z.NgAWMSCoeffN,
z.RunContPct, z.RunConCoeffN, z.PctGrazing, z.GRBarnNRate, z.AWMSGrPct, z.GrAWMSCoeffN,
z.PctStreams, z.GrazingNRate),
AnimalN.AnimalN(z.NYrs, z.NGPctManApp, z.GrazingAnimal_0, z.NumAnimals, z.AvgAnimalWt, z.AnimalDailyN,
z.NGAppNRate, z.Prec, z.DaysMonth,
z.NGPctSoilIncRate, z.GRPctManApp, z.GRAppNRate, z.GRPctSoilIncRate, z.NGBarnNRate,
z.AWMSNgPct, z.NgAWMSCoeffN,
z.RunContPct, z.RunConCoeffN, z.PctGrazing, z.GRBarnNRate, z.AWMSGrPct, z.GrAWMSCoeffN,
z.PctStreams, z.GrazingNRate), decimal=7)
|
[
"gwlfe.Output.AvAnimalNSum.AnimalN.AnimalN",
"gwlfe.Output.AvAnimalNSum.AnimalN.AnimalN_f"
] |
[((256, 649), 'gwlfe.Output.AvAnimalNSum.AnimalN.AnimalN_f', 'AnimalN.AnimalN_f', (['z.NYrs', 'z.NGPctManApp', 'z.GrazingAnimal_0', 'z.NumAnimals', 'z.AvgAnimalWt', 'z.AnimalDailyN', 'z.NGAppNRate', 'z.Prec', 'z.DaysMonth', 'z.NGPctSoilIncRate', 'z.GRPctManApp', 'z.GRAppNRate', 'z.GRPctSoilIncRate', 'z.NGBarnNRate', 'z.AWMSNgPct', 'z.NgAWMSCoeffN', 'z.RunContPct', 'z.RunConCoeffN', 'z.PctGrazing', 'z.GRBarnNRate', 'z.AWMSGrPct', 'z.GrAWMSCoeffN', 'z.PctStreams', 'z.GrazingNRate'], {}), '(z.NYrs, z.NGPctManApp, z.GrazingAnimal_0, z.NumAnimals, z\n .AvgAnimalWt, z.AnimalDailyN, z.NGAppNRate, z.Prec, z.DaysMonth, z.\n NGPctSoilIncRate, z.GRPctManApp, z.GRAppNRate, z.GRPctSoilIncRate, z.\n NGBarnNRate, z.AWMSNgPct, z.NgAWMSCoeffN, z.RunContPct, z.RunConCoeffN,\n z.PctGrazing, z.GRBarnNRate, z.AWMSGrPct, z.GrAWMSCoeffN, z.PctStreams,\n z.GrazingNRate)\n', (273, 649), False, 'from gwlfe.Output.AvAnimalNSum import AnimalN\n'), ((790, 1181), 'gwlfe.Output.AvAnimalNSum.AnimalN.AnimalN', 'AnimalN.AnimalN', (['z.NYrs', 'z.NGPctManApp', 'z.GrazingAnimal_0', 'z.NumAnimals', 'z.AvgAnimalWt', 'z.AnimalDailyN', 'z.NGAppNRate', 'z.Prec', 'z.DaysMonth', 'z.NGPctSoilIncRate', 'z.GRPctManApp', 'z.GRAppNRate', 'z.GRPctSoilIncRate', 'z.NGBarnNRate', 'z.AWMSNgPct', 'z.NgAWMSCoeffN', 'z.RunContPct', 'z.RunConCoeffN', 'z.PctGrazing', 'z.GRBarnNRate', 'z.AWMSGrPct', 'z.GrAWMSCoeffN', 'z.PctStreams', 'z.GrazingNRate'], {}), '(z.NYrs, z.NGPctManApp, z.GrazingAnimal_0, z.NumAnimals, z.\n AvgAnimalWt, z.AnimalDailyN, z.NGAppNRate, z.Prec, z.DaysMonth, z.\n NGPctSoilIncRate, z.GRPctManApp, z.GRAppNRate, z.GRPctSoilIncRate, z.\n NGBarnNRate, z.AWMSNgPct, z.NgAWMSCoeffN, z.RunContPct, z.RunConCoeffN,\n z.PctGrazing, z.GRBarnNRate, z.AWMSGrPct, z.GrAWMSCoeffN, z.PctStreams,\n z.GrazingNRate)\n', (805, 1181), False, 'from gwlfe.Output.AvAnimalNSum import AnimalN\n')]
|
import numpy as np
from math import log10
from math import sqrt
import time
import networkx as nx
import matplotlib.pyplot as plt
import pydot
import csv
class Graph(object):
def __init__(self):
self.root = None #root/source node is the start of the graph/tree and multicast source
self.nodes = []
self.leaves = []
class Node(object):
def __init__(self):
self.father = None #node's parent
self.id = None #ID of node
self.data = [] # tcpdump of the multicast packets of the end-nodes (and only end-nodes, internal nodes do not have data)
self.children = []
#In Packet Loss, we only care about the packet ID
def GetLinkLossDumps():
#Tcpdumps of source node stored at root.data
tcpfile = [line.rstrip('\n') for line in open('dumps/n1.txt')] #opens the tcpdump as a list of strings, we suppose that source connects only to one router to the rest of tree
for line in range(len(tcpfile)):
if "tos 0x7" in tcpfile[line]: # tos 0x7 is the characteristic I chose to distinguish my UDP packets used for tomography
temp = tcpfile[line].split()
graph.root.data.append(int(temp[7].replace(",",""))) #We keep only the packet ID
#tcpdump of every leave/destination node stored at node.data
for i in range(len(graph.leaves)):
filename = "dumps/n%d.txt" % (graph.leaves[i].id) #example tcpdump file path "thesisdumps/1/n1" if node 1 is a leaf
tcpfile = [line.rstrip('\n') for line in open(filename)]
for line in range(len(tcpfile)):
if "tos 0x7" in tcpfile[line]: # tos 0x7 is the characteristic I chose to distinguish my UDP packets used for tomography
temp = tcpfile[line].split()
graph.leaves[i].data.append(int(temp[7].replace(",",""))) #We keep only the packet ID
#In Link Delay and Utilization, we need both the packet ID and the timestamp of the packet
def GetLinkDelayDumps():
#Tcpdumps of source node stored at root.data
tcpfile = [line.rstrip('\n') for line in open('dumps/n1.txt')] #opens the tcpdump as a list of strings, we suppose that source connects only to one router to the rest of tree
for line in range(len(tcpfile)):
if "tos 0x7" in tcpfile[line]: # tos 0x7 is the characteristic I chose to distinguish my UDP packets used for tomography
temp = tcpfile[line].split()
graph.root.data.append([temp[0], int(temp[7].replace(",",""))]) #We keep the timestamp and the packet ID
#tcpdump of every leave/destination node stored at node.data
for i in range(len(graph.leaves)):
filename = "dumps/n%d.txt" % (graph.leaves[i].id) #example tcpdump file path "thesisdumps/1/n1" if node 1 is a leaf
tcpfile = [line.rstrip('\n') for line in open(filename)]
for line in range(len(tcpfile)):
if "tos 0x7" in tcpfile[line]: # tos 0x7 is the characteristic I chose to distinguish my UDP packets used for tomography
temp = tcpfile[line].split()
graph.leaves[i].data.append([temp[0], int(temp[7].replace(",",""))]) #We keep the timestamp and the packet ID
for node in range(len(graph.leaves)):
TimestampsIntoDelay(graph.root.data,graph.leaves[node].data,node) #we need to turn each timestamp into path delay for each packet
#root's delay is 0 in all packets (starting point)
for k in range(len(graph.root.data)):
graph.root.data[k][0] = float(0)
#Function that measures path Delay from a timestamp, in our algorithm turns every initial leaf's timestamps into delays (difference between start and finish)
def TimestampsIntoDelay(dump1,dump2,node):
startingpackets=len(dump1) #tcpdump of start node
endingpackets = len(dump2) #tcpdump of end node
for packet in range(endingpackets):
i = 0 # if we are sure that the packets will arive in order, i = packet for faster runtime
#find packets with same ID
while (dump1[i][1] != dump2[packet][1]):
i += 1
#measure delay for each packet
#seconds difference
timestamp1 = dump1[i][0]
timestamp2 = dump2[packet][0]
secondsdiff = (int(timestamp2[0:2])*3600+int(timestamp2[3:5])*60+int(timestamp2[6:8]))-(int(timestamp1[0:2])*3600+int(timestamp1[3:5])*60+int(timestamp1[6:8]))
#fractions of second
fraction1 = float("0"+timestamp1[8:15])
fraction2 = float("0"+timestamp2[8:15])
#delay
packetdelay=float("{0:.10f}".format(float(secondsdiff)+fraction2-fraction1))
graph.leaves[node].data[packet][0] = packetdelay #change timestamp with delay
# Function that estimates the distances based on the link loss parameter
def EstimateDistancesLoss():
# At this point, graph.nodes = U (= source + destination nodes)
NumberOfNodes = len(graph.nodes)
# Matrix is symmetric -> We only need to traverse through upper triangular and then complete the symmetrical elements
# Also, diagonal of the Matrix will be zero (by definition d(i,i) == 0)
for i in range(NumberOfNodes):
Xi = len(graph.nodes[i].data)/TotalProbes
for j in range(i+1,NumberOfNodes):
# How the distance metric is calculated can be seen in the provided documentation
Xj = len(graph.nodes[j].data)/TotalProbes
XiXj = len(set(graph.nodes[i].data)&set(graph.nodes[j].data))/TotalProbes
distance = log10(Xi*Xj/XiXj**2)
#Symmetric matrix
EstDistMatrix[graph.nodes[i].id][graph.nodes[j].id] = distance
EstDistMatrix[graph.nodes[j].id][graph.nodes[i].id] = distance
# Function that estimates the distances based on the link delay variance parameter
def EstimateDistancesDelayVar():
# At this point, graph.nodes = U (= source + destination nodes)
NumberOfNodes = len(graph.nodes)
# Matrix is symmetric -> We only need to traverse through upper triangular and then complete the symmetrical elements
# Also, diagonal of the Matrix will be zero (by definition d(i,i) == 0)
for i in range(NumberOfNodes):
meanTi = sum([graph.nodes[i].data[k][0] for k in range(len(graph.nodes[i].data))])/len(graph.nodes[i].data)
for j in range(i+1,NumberOfNodes):
# How the distance metric is calculated can be seen in the provided documentation
meanTj = sum([graph.nodes[j].data[k][0] for k in range(len(graph.nodes[j].data))])/len(graph.nodes[j].data)
# Compute the variances
varTi = (sum([(graph.nodes[i].data[k][0]-meanTi)**2 for k in range(len(graph.nodes[i].data))]))/(len(graph.nodes[i].data)-1)
varTj = (sum([(graph.nodes[j].data[k][0]-meanTj)**2 for k in range(len(graph.nodes[j].data))]))/(len(graph.nodes[j].data)-1)
# Find Common ID between the 2 nodes' packets
CommonIDs = []
for k1 in range(len(graph.nodes[i].data)):
for k2 in range(len(graph.nodes[j].data)):
if (graph.nodes[i].data[k1][1] == graph.nodes[j].data[k2][1]):
CommonIDs.append(graph.nodes[i].data[k1][1])
# Compute the covariance
covTiTj = Covariance(i,j,CommonIDs,meanTi,meanTj)
distance = varTi + varTj - 2*covTiTj
# Symmetric matrix
EstDistMatrix[graph.nodes[i].id][graph.nodes[j].id] = distance
EstDistMatrix[graph.nodes[j].id][graph.nodes[i].id] = distance
"""
# Function that estimates the distances based on the link utilization parameter
def EstimateDistancesUtil():
# At this point, graph.nodes = U (= source + destination nodes)
NumberOfNodes = len(graph.nodes)
# Epsilon is a small value to acount for possible measurement noise, defined by user
epsilon = 0.00001
# Matrix is symmetric -> We only need to traverse through upper triangular and then complete the symmetrical elements
# Also, diagonal of the Matrix will be zero (by definition d(i,i) == 0)
for i in range(NumberOfNodes):
minTi = min([graph.nodes[i].data[k][0] for k in range(len(graph.nodes[i].data))])
YiPackets = [graph.nodes[i].data[k][1] for k in range(len(graph.nodes[i].data)) if (graph.nodes[i].data[k][0]-minTi <= epsilon)]
Yi = len(YiPackets)/TotalProbes
for j in range(i+1,NumberOfNodes):
# How the distance metric is calculated can be seen in the provided documentation
minTj = min([graph.nodes[j].data[k][0] for k in range(len(graph.nodes[j].data))])
YjPackets = [graph.nodes[j].data[k][1] for k in range(len(graph.nodes[j].data)) if (graph.nodes[j].data[k][0]-minTj <= epsilon)]
Yj = len(YjPackets)/TotalProbes
YiYj = len(set(YiPackets)&set(YjPackets))/TotalProbes
distance = log10(Yi*Yj/YiYj**2)
# Symmetric matrix
EstDistMatrix[graph.nodes[i].id][graph.nodes[j].id] = distance
EstDistMatrix[graph.nodes[j].id][graph.nodes[i].id] = distance
"""
# Function that computes the covariance of nodes i,j
def Covariance(i,j,CommonIDs,meanTi,meanTj):
#Initiliazations
covar = 0
pos1 = 0
pos2 = 0
length1 = len(graph.nodes[i].data)
length2 = len(graph.nodes[j].data)
for packetID in CommonIDs:
#find position of packetID in node i
for k1 in range(pos1,length1):
if (graph.nodes[i].data[k1][1] == packetID):
pos1=k1
break
#find position of packetID in node j
for k2 in range(pos2,length2):
if (graph.nodes[j].data[k2][1] == packetID):
pos2=k2
break
covar += (graph.nodes[i].data[pos1][0]-meanTi)*(graph.nodes[j].data[pos2][0]-meanTj)
covar = covar/(len(CommonIDs)-1)
return covar
def EstimateScoreFunction():
# At this point, graph.leaves = D (= destination nodes)
NumberOfLeaves = len(graph.leaves)
# Matrix is symmetric -> We only need to traverse through upper triangular and then complete the symmetrical elements
# Also, diagonal of the Matrix will be equal to zero (we need pair of nodes)
for i in range(NumberOfLeaves):
for j in range(i+1,NumberOfLeaves):
# Score Function is calulated like this:
# ρ(i,j) = (d(s,i)+d(s,j)-d(i,j))/2
score = (EstDistMatrix[0][graph.leaves[i].id] + EstDistMatrix[0][graph.leaves[j].id] - EstDistMatrix[graph.leaves[i].id][graph.leaves[j].id])/2
#Symmetric matrix
ScoreFunction[graph.leaves[i].id][graph.leaves[j].id] = score
ScoreFunction[graph.leaves[j].id][graph.leaves[i].id] = score
# Function that calculates Δ(delta) so that the General Tree algorithm can be properly implemented
def CalculateDelta():
if (param == 'loss'):
successrate = 0.9995 #should be equal to the minimum link length in terms of loss, changes based on each topology
delta = -log10(successrate)
elif (param == 'delayvar'):
delta = 0.000001 #should be equal to the minimum link length in terms of delay variance, changes based on each topology
else:
pass #Link Utilization not measured in our algorithm
return delta
# Function that visualizes the discovered topology/tree in a .png file
def DrawTopology(param):
#Create Graph
G = pydot.Dot(graph_type='graph') #G =nx.Graph()
for i in range(len(graph.nodes)-1,-1,-1):
for j in range(len(graph.nodes[i].children)):
edge = pydot.Edge(graph.nodes[i].id,graph.nodes[i].children[j].id)
G.add_edge(edge)
#Draw Graph with desired Parameters
G.write_png('Results/'+param+'.png')
# Function that writes the results for each inference parameter in a .csv file
def ExtractResults(param):
# Success/Loss Rate of each Link
if (param == 'loss'):
with open('Results/Loss.csv', 'w') as csvfile:
filewriter = csv.writer(csvfile, delimiter=',',quotechar='|', quoting=csv.QUOTE_MINIMAL)
filewriter.writerow(['Link', 'Success Rate'])
for i in range(1,len(graph.nodes)):
SuccessRate = EstDistMatrix[graph.nodes[i].father.id][graph.nodes[i].id]
SuccessRate = 10**(-SuccessRate)
filewriter.writerow([graph.nodes[i].id,SuccessRate])
# Delay Variance of each Link
elif (param == 'delayvar'):
with open('Results/DelayVariance.csv', 'w') as csvfile:
filewriter = csv.writer(csvfile, delimiter=',',quotechar='|', quoting=csv.QUOTE_MINIMAL)
filewriter.writerow(['Link', 'Delay Variance'])
for i in range(1,len(graph.nodes)):
#LinkDelayVar = sqrt(EstDistMatrix[graph.nodes[i].father.id][graph.nodes[i].id]) ###If I want the Standard Deviation instead of Variance
LinkDelayVar = EstDistMatrix[graph.nodes[i].father.id][graph.nodes[i].id]
filewriter.writerow([graph.nodes[i].id,LinkDelayVar])
# Utilization of each LinkUtil
else:
"""
with open('Results/Utilization.csv', 'w') as csvfile:
filewriter = csv.writer(csvfile, delimiter=',',quotechar='|', quoting=csv.QUOTE_MINIMAL)
filewriter.writerow(['Link', 'Utilization'])
for i in range(1,len(graph.nodes)):
LinkUtil = EstDistMatrix[graph.nodes[i].father.id][graph.nodes[i].id]
LinkUtil = 10**(-LinkUtil)
filewriter.writerow([graph.nodes[i].id,LinkUtil])
"""
pass
### Start of Script ###
# input: Destination Nodes' IDs (Leaves) are given in the DstNodes.txt file
# Create a list with all the Destination Nodes=
DstNodes = [line.rstrip('\n').split(' ') for line in open('DstNodes.txt')]
DstNodes = list(map(int,DstNodes[0]))
# All the inference parameters we want to measure
inferparams = ['loss','delayvar','utilization']
# Perform the algorithm for each inference parameter in the inferparams list
for param in inferparams:
# Initial Graph Creation
# V = {s} : only source node initially on graph
# E = { } : no edges created initially
graph = Graph()
#creation of source node
node = Node()
node.id = 0 #node ID of root is 0
graph.root = node
graph.nodes.append(graph.root)
# Destination Nodes and Graph leaves are the same
# So we create the graph leaves (without any edges yet) to be able to extract the tcpdumps correctly
for i in range(len(DstNodes)):
node = Node()
node.id = DstNodes[i]
graph.nodes.append(node)
graph.leaves.append(node)
######### Algorithm: Rooted Neighbor-Joining (RNJ) Algorithm for Binary Trees #########
#We don't know number of nodes, so we start giving ID numbers to new nodes, starting from max ID of the existing Destination nodes
FreeID = max(DstNodes) + 1
#Get the tcpdumps for the root node and the leaves
if (param == 'loss'):
GetLinkLossDumps()
elif (param == 'delayvar'):
GetLinkDelayDumps()
else:
break #delete if you want to measure link utilization too
pass #GetLinkDelayDumps() used also for utilization
#Total Probes are equal to the probes sent from the source
TotalProbes = len(graph.root.data)
# Estimated Distance Matrix, default size = up to 200 nodes topology
# Holds the distance metric values for each path,
# (i,j) element -> Distance metric of path from node i to node j, d(i,j)
EstDistMatrix = np.zeros((200,200),dtype='f')
#Create the Estimate Distances Matrix
if (param == 'loss'):
EstimateDistancesLoss()
elif(param == 'delayvar'):
EstimateDistancesDelayVar()
else:
pass #EstimateDistancesUtil() used
# Step 1
# Score Function matrix, default size = up to 200 nodes topology (keep same with Estimated Distance matrix)
# Hold the score function for each pair of nodes i,j
# (i,j) element -> distance metric for pair of nodes i,j, ρ(i,j)
ScoreFunction = np.zeros((200,200),dtype='f')
EstimateScoreFunction()
# necessary to start the algorithm correctly, normally we shouldn't append destination nodes upon creation but it helped the tcpdumps function
graph.nodes = []
graph.nodes.append(graph.root)
# Step 2.1
while (len(graph.leaves) != 1):
# Find i*,j* in D with the largest ScoreFunction (tie is broken arbitrarily as we only take the first occurence)
NumberOfLeaves = len(graph.leaves)
# max initialization
maxScore = 0
Istar=Jstar = 0
# find the max score
for i in range(NumberOfLeaves):
for j in range(i+1,NumberOfLeaves):
if (ScoreFunction[graph.leaves[i].id][graph.leaves[j].id] >= maxScore):
maxScore = ScoreFunction[graph.leaves[i].id][graph.leaves[j].id]
Istar = graph.leaves[i].id
Jstar = graph.leaves[j].id
#Create a node f as parent of i* and j*
FatherNode = Node()
FatherNode.id = FreeID
FreeID += 1
# D = D \ {i*,j*}
# V = V U {i*,j*} , E = E U {(f,i*),(f,j*)}
for i in range(len(graph.leaves)): # for i*
if (graph.leaves[i].id == Istar):
graph.nodes.append(graph.leaves[i]) # V = V U {i*}
graph.leaves[i].father = FatherNode # E U {(f,i*)}
FatherNode.children.append(graph.leaves[i]) # E U {(f,i*)}
del graph.leaves[i] # D = D \ {i*}
break
for i in range(len(graph.leaves)): # for j*
if (graph.leaves[i].id == Jstar):
graph.nodes.append(graph.leaves[i]) # V = V U {j*}
graph.leaves[i].father = FatherNode # E U {(f,j*)}
FatherNode.children.append(graph.leaves[i]) # E U {(f,i*)}
del graph.leaves[i] # D = D \ {j*}
break
# Step 2.2
# d(s,f) = ρ(i*,j*)
EstDistMatrix[0][FatherNode.id] = ScoreFunction[Istar][Jstar]
EstDistMatrix[FatherNode.id][0] = EstDistMatrix[0][FatherNode.id] # SYMMETRY
# d(f,i*) = d(s,i*) - ρ(i*,j*)
EstDistMatrix[FatherNode.id][Istar] = EstDistMatrix[0][Istar] - ScoreFunction[Istar][Jstar]
EstDistMatrix[Istar][FatherNode.id] = EstDistMatrix[FatherNode.id][Istar] # SYMMETRY
# d(f,j*) = d(s,j*) - ρ(i*,j*)
EstDistMatrix[FatherNode.id][Jstar] = EstDistMatrix[0][Jstar] - ScoreFunction[Istar][Jstar]
EstDistMatrix[Jstar][FatherNode.id] = EstDistMatrix[FatherNode.id][Jstar] # SYMMETRY
# Step 2.3
# In this step we find if there are more than 2 siblings (if Istar,Jstar nodes have another sibling)
#Calculate Δ based on the link parameter that is inferred
delta = CalculateDelta()
# For every k in D such that ρ(i*,j*) - ρ(i*,k) <= Δ/2:
LeavesToDel = []
for k in range (len(graph.leaves)):
SiblID = graph.leaves[k].id # SiblID = node k ID
if (ScoreFunction[Istar][Jstar] - ScoreFunction[Istar][SiblID] <= delta/2):
# d(f,k) = d(s,k) - ρ(i*,j*)
EstDistMatrix[FatherNode.id][SiblID] = EstDistMatrix[0][SiblID] - ScoreFunction[Istar][Jstar]
EstDistMatrix[SiblID][FatherNode.id] = EstDistMatrix[FatherNode.id][SiblID] #SYMMETRY
# D = D \ {k}
# V = V U {k} , E = E U {(f,k)}
graph.nodes.append(graph.leaves[k]) # V = V U {k}
graph.leaves[k].father = FatherNode # E U {(f,k)}
FatherNode.children.append(graph.leaves[k]) # E U {(f,k)}
LeavesToDel.append(k) # D = D \ {k}, store the values to del together
#create a temporary list that will have all the graph.leaves nodes besides those that we want to delete from step 2.3
temp = []
for i in range(len(graph.leaves)):
if i not in LeavesToDel:
temp.append(graph.leaves[i])
graph.leaves = temp
# Step 2.4
for k in range(len(graph.leaves)):
# d(k,f) = 1/2[d(k,i*)-d(f,i*)] + 1/2[d(k,j*)-d(f,j*)]
EstDistMatrix[graph.leaves[k].id][FatherNode.id] = 0.5*(EstDistMatrix[graph.leaves[k].id][Istar]-EstDistMatrix[FatherNode.id][Istar]) + 0.5*(EstDistMatrix[graph.leaves[k].id][Jstar]-EstDistMatrix[FatherNode.id][Jstar])
EstDistMatrix[FatherNode.id][graph.leaves[k].id] = EstDistMatrix[graph.leaves[k].id][FatherNode.id] #SYMMETRY
# ρ(k,f) = 1/2[ρ(k,i*)+ρ(k,j*)]
ScoreFunction[graph.leaves[k].id][FatherNode.id] = 0.5*(ScoreFunction[graph.leaves[k].id][Istar]+ScoreFunction[graph.leaves[k].id][Jstar])
ScoreFunction[FatherNode.id][graph.leaves[k].id] = ScoreFunction[graph.leaves[k].id][FatherNode.id] # SYMMETRY
# D = D U f
graph.leaves.append(FatherNode)
# If |D| = 1, for the i in D: V = V U {i} , E = E U (s,i)
graph.nodes.append(graph.leaves[0])
graph.leaves[0].father = graph.root
graph.root.children = [graph.leaves[0]]
# Draw the Topology produced by Tomography
# variable "param" is used to draw the topology based on the specific inference parameter each time
DrawTopology(param)
# Write the results for each inference parameter performed in a csv file
ExtractResults(param)
|
[
"csv.writer",
"numpy.zeros",
"pydot.Dot",
"math.log10",
"pydot.Edge"
] |
[((11302, 11331), 'pydot.Dot', 'pydot.Dot', ([], {'graph_type': '"""graph"""'}), "(graph_type='graph')\n", (11311, 11331), False, 'import pydot\n'), ((15441, 15472), 'numpy.zeros', 'np.zeros', (['(200, 200)'], {'dtype': '"""f"""'}), "((200, 200), dtype='f')\n", (15449, 15472), True, 'import numpy as np\n'), ((15965, 15996), 'numpy.zeros', 'np.zeros', (['(200, 200)'], {'dtype': '"""f"""'}), "((200, 200), dtype='f')\n", (15973, 15996), True, 'import numpy as np\n'), ((5425, 5451), 'math.log10', 'log10', (['(Xi * Xj / XiXj ** 2)'], {}), '(Xi * Xj / XiXj ** 2)\n', (5430, 5451), False, 'from math import log10\n'), ((10912, 10930), 'math.log10', 'log10', (['successrate'], {}), '(successrate)\n', (10917, 10930), False, 'from math import log10\n'), ((11466, 11526), 'pydot.Edge', 'pydot.Edge', (['graph.nodes[i].id', 'graph.nodes[i].children[j].id'], {}), '(graph.nodes[i].id, graph.nodes[i].children[j].id)\n', (11476, 11526), False, 'import pydot\n'), ((11887, 11963), 'csv.writer', 'csv.writer', (['csvfile'], {'delimiter': '""","""', 'quotechar': '"""|"""', 'quoting': 'csv.QUOTE_MINIMAL'}), "(csvfile, delimiter=',', quotechar='|', quoting=csv.QUOTE_MINIMAL)\n", (11897, 11963), False, 'import csv\n'), ((12431, 12507), 'csv.writer', 'csv.writer', (['csvfile'], {'delimiter': '""","""', 'quotechar': '"""|"""', 'quoting': 'csv.QUOTE_MINIMAL'}), "(csvfile, delimiter=',', quotechar='|', quoting=csv.QUOTE_MINIMAL)\n", (12441, 12507), False, 'import csv\n')]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import ytu
def test_is_youtube():
tests = [
('http://youtu.be/zoLVUxKCWhY', True),
('http://www.youtube.com/watch?v=VvRC0wxM-yM', True),
('http://wwwwwwyoutube.com/watch?v=VvRC0wxM-yM', False),
('http://example.com/zoLVUxKCWhY', False)
]
for t in tests:
assert ytu.is_youtube(t[0]) is t[1]
def test_video_id():
# All these URLs occurred in the reddit submission corpus.
tests = [
('http://youtu.be/zoLVUxKCWhY', 'zoLVUxKCWhY'),
('http://www.youtube.com/watch?v=VvRC0wxM-yM', 'VvRC0wxM-yM'),
('http://www.youtube.com/watch?v=thsc60UTUIE&feature=youtu.be', 'thsc60UTUIE'),
('http://www.youtube.com/watch?v=a3asbkY0tTE?', 'a3asbkY0tTE'),
('http://www.youtube.com/watch?v=oHg5SJYRHA0???', 'oHg5SJYRHA0'),
('http://www.youtube.com/watch?v=55jUNNPT1eMads/4/NaQOUKyR9CY', '55jUNNPT1eM'),
('https://www.youtube.com/verify_age?next_url=http%3A//www.youtube.com/watch%3Fv%3DGqj1N9qeWXI%26feature%3Dmfu_in_order%26list%3DUL', 'Gqj1N9qeWXI'),
('https://www.youtube.com//watch?v=PQGrIsYUm4c', 'PQGrIsYUm4c'), # 2 leading slashes in path
('https://www.youtube.com/v/j4FNGsNY3nI&amp;rel=0&amp;egm=0&amp;showinfo=0&amp;fs=1', 'j4FNGsNY3nI'),
('https://www.youtube.com/embed/mGnyH-SCZpM?autoplay=1&hd=1&KeepThis=true&TB_iframe=true&height=370&width=640?autoplay=1&hd=1', 'mGnyH-SCZpM'),
('https://www.youtube.com/verify_age?&next_url=/watch%3Fv%3DsTPsFIsxM3w', 'sTPsFIsxM3w'),
('https://www.youtube.com/attribution_link?a=qbb_5VvcvY8&u=%2Fwatch%3Fv%3DFgFeVlw2Ywg%26feature%3Dshare', 'FgFeVlw2Ywg'),
('https://www.youtube.com/attribution_link?a=ar77oUQIEOcNs-Wdao4XJw&u=%2Fwatch%3Fv%3D0eXS1NI6Q6Y%26feature%3Dshare', '0eXS1NI6Q6Y'),
('https://www.youtube.com/?v=_RSaYVgd7yk', None),
('https://www.youtube.com/watch?v=U3M8pXZusQ', None)
]
for t in tests:
assert ytu.video_id(t[0]) == t[1]
|
[
"ytu.is_youtube",
"ytu.video_id"
] |
[((362, 382), 'ytu.is_youtube', 'ytu.is_youtube', (['t[0]'], {}), '(t[0])\n', (376, 382), False, 'import ytu\n'), ((2048, 2066), 'ytu.video_id', 'ytu.video_id', (['t[0]'], {}), '(t[0])\n', (2060, 2066), False, 'import ytu\n')]
|
import mock
import unittest
import dbt.adapters
import dbt.flags as flags
from pyhive import hive
from dbt.adapters.spark import SparkAdapter
import agate
from .utils import config_from_parts_or_dicts, inject_adapter
class TestSparkAdapter(unittest.TestCase):
def setUp(self):
flags.STRICT_MODE = True
self.project_cfg = {
'name': 'X',
'version': '0.1',
'profile': 'test',
'project-root': '/tmp/dbt/does-not-exist',
'quoting': {
'identifier': False,
'schema': False,
}
}
def get_target_http(self, project):
return config_from_parts_or_dicts(project, {
'outputs': {
'test': {
'type': 'spark',
'method': 'http',
'schema': 'analytics',
'host': 'myorg.sparkhost.com',
'port': 443,
'token': '<PASSWORD>',
'cluster': '01234-23423-coffeetime',
}
},
'target': 'test'
})
def get_target_thrift(self, project):
return config_from_parts_or_dicts(project, {
'outputs': {
'test': {
'type': 'spark',
'method': 'thrift',
'schema': 'analytics',
'host': 'myorg.sparkhost.com',
'port': 10001,
'user': 'dbt'
}
},
'target': 'test'
})
def test_http_connection(self):
config = self.get_target_http(self.project_cfg)
adapter = SparkAdapter(config)
def hive_http_connect(thrift_transport):
self.assertEqual(thrift_transport.scheme, 'https')
self.assertEqual(thrift_transport.port, 443)
self.assertEqual(thrift_transport.host, 'myorg.sparkhost.com')
self.assertEqual(thrift_transport.path, '/sql/protocolv1/o/0/01234-23423-coffeetime')
with mock.patch.object(hive, 'connect', new=hive_http_connect):
connection = adapter.acquire_connection('dummy')
self.assertEqual(connection.state, 'open')
self.assertNotEqual(connection.handle, None)
def test_thrift_connection(self):
config = self.get_target_thrift(self.project_cfg)
adapter = SparkAdapter(config)
def hive_thrift_connect(host, port, username):
self.assertEqual(host, 'myorg.sparkhost.com')
self.assertEqual(port, 10001)
self.assertEqual(username, 'dbt')
with mock.patch.object(hive, 'connect', new=hive_thrift_connect):
connection = adapter.acquire_connection('dummy')
self.assertEqual(connection.state, 'open')
self.assertNotEqual(connection.handle, None)
|
[
"dbt.adapters.spark.SparkAdapter",
"mock.patch.object"
] |
[((1673, 1693), 'dbt.adapters.spark.SparkAdapter', 'SparkAdapter', (['config'], {}), '(config)\n', (1685, 1693), False, 'from dbt.adapters.spark import SparkAdapter\n'), ((2400, 2420), 'dbt.adapters.spark.SparkAdapter', 'SparkAdapter', (['config'], {}), '(config)\n', (2412, 2420), False, 'from dbt.adapters.spark import SparkAdapter\n'), ((2052, 2109), 'mock.patch.object', 'mock.patch.object', (['hive', '"""connect"""'], {'new': 'hive_http_connect'}), "(hive, 'connect', new=hive_http_connect)\n", (2069, 2109), False, 'import mock\n'), ((2637, 2696), 'mock.patch.object', 'mock.patch.object', (['hive', '"""connect"""'], {'new': 'hive_thrift_connect'}), "(hive, 'connect', new=hive_thrift_connect)\n", (2654, 2696), False, 'import mock\n')]
|
from rest_framework import permissions
from drf_yasg.views import get_schema_view
from drf_yasg import openapi
ShemaView = get_schema_view(
openapi.Info(
title='Multauth Example API',
default_version='v1',
description='Authentication flow: email, password and passcode (using Google Authenticator or similar app)',
# terms_of_service="https://www.google.com/policies/terms/",
# contact=openapi.Contact(email="<EMAIL>"),
# license=openapi.License(name="BSD License"),
),
#validators=['flex', 'ssv'],
public=True,
permission_classes=(permissions.AllowAny,),
)
|
[
"drf_yasg.openapi.Info"
] |
[((146, 333), 'drf_yasg.openapi.Info', 'openapi.Info', ([], {'title': '"""Multauth Example API"""', 'default_version': '"""v1"""', 'description': '"""Authentication flow: email, password and passcode (using Google Authenticator or similar app)"""'}), "(title='Multauth Example API', default_version='v1',\n description=\n 'Authentication flow: email, password and passcode (using Google Authenticator or similar app)'\n )\n", (158, 333), False, 'from drf_yasg import openapi\n')]
|
# -*- coding: utf-8 -*-
"""
Created on Wed Nov 4 00:22:32 2020
@author: <NAME>
"""
import os
import copy
from typing import Union, Any
try:
import simplejson as json
except ImportError:
import json
from .plugins.base_file import BaseFilePlugin, _info
from .exceptions.file_exceptions import FileNotFoundException
from .options import Options
from .xnodes import XNode, XDict, XList, create_xnode
from .file_list import FileList
# plugins
from .plugins.plugin_json import PluginJson
from .plugins.plugin_xjson import PluginXJson
from .plugins.plugin_text import PluginText
from .plugins.plugin_csv import PluginCsv
from .plugins.plugin_yaml import PluginYaml
from .plugins.plugin_xml import PluginXml
# /plugins
_index, _aliases, _required_plugins, default_exts \
= 'index', '_aliases', {'PluginJson', 'PluginXJson', 'PluginText', 'PluginCsv', 'PluginXml', 'PluginYaml'}, ['json', 'xjson', 'xml']
class XJson:
def __init__(self, name: str = '', **options) -> None:
self._options = Options(options)
self.structure = XDict(owner=self) # result structure
self._load_plugins()
self.file_list = FileList()
if name > '':
self._scan(name)
def _load_plugins(self):
self.plugins = {}
list = _required_plugins
try:
list.update(set(self.options.plugins))
except KeyError:
pass
for name in list:
cl = globals().get(name, None)
if cl is not None:
self.plugins[name] = cl
def _scan(self, name: str) -> None:
''' Scan the directory or file to form common structure'''
file_name = name
exts = [''] + ['.' + val for val in default_exts]
for ext in exts:
if os.path.exists(file_name + ext):
self.structure = self._node_from_file(file_name + ext)
break
def _get_index_file(self, path):
"""find index file with extension priority from default_exts"""
_fn = os.path.join(path, "index")
for ext in default_exts:
fn = _fn + "." + ext
if os.path.exists(fn):
return fn
def _node_from_file(self, file_name: str) -> XNode:
"""Create nmode from file """
file = self.file_list.get(file_name)
if file.is_file:
node = self._apply_plugins(file_name)
else:
index_fn = self._get_index_file(file_name)
index_file = self.file_list.get(index_fn)
if index_file is None:
node = XDict(owner=self, _file=file)
else:
node = self._apply_plugins(index_fn)
files = os.listdir(file_name)
for fn in files:
#if fn == _index.split(".")[:-1]:
if index_file is not None and fn == index_file.name:
continue
(name, ext) = os.path.splitext(fn)
if name not in node:
node[name] = XDict(owner=self, _file=file)
value = self._node_from_file(os.path.join(file_name, fn))
if isinstance(value, XDict):
node[name].update(value)
elif isinstance(value, XList):
node[name].append(value)
else:
node[name] = value
return node
def _apply_plugins(self, file_name: str) -> XNode:
'''Apply plugins to the file file_name and create & return node'''
for name in self.plugins:
Plugin = self.plugins[name]
plugin = Plugin(file_name)
if plugin.check():
return plugin.get()
return XDict(self)
def __str__(self):
return self.dump(self.structure)
def clear(self):
self.structure = {}
def refresh(self, name = '') -> None:
self.clear()
self._scan(name)
def alias(self, name: str):
self.structure.alias(name)
def get_root_value(self, name) -> str:
""" return root value, only str or int (return str from int)"""
result = ''
if name in self.structure:
val = self.structure[name]
if isinstance(val, str):
result = val
elif isinstance(val, int):
result = str(val)
return result
@property
def options(self) -> Options:
return self._options
def _dump_val(self, node, key='', short=True, indent='', exclude_info=True):
return "{}{}{}\n".format(indent, key + (": " if key else ""), node)
def _dump_arr(self, node: XList, key='', short=True, indent='', exclude_info=True):
result = ''
n = 0
for value in node:
value = self.dump(value, key="#" + str(n), short=short, indent=indent + ". ", exclude_info=exclude_info)
result += value
n += 1
result = '{0}{1}{2}'.format(indent, (key + ": \n" if key else ""), result)
return result
def _dump_obj(self, node: XDict, key='', short=True, indent='', exclude_info=True):
result = ''
for name in node:
if exclude_info and name == _info:
continue
value = node[name]
result += self.dump(value, key=name, short=short, indent=indent + ". ", exclude_info=exclude_info)
result = '{}{}{}'.format(indent, (key + ": \n" if key else ""), result)
return result
def dump(self, node, key='', short=True, indent='', exclude_info = True):
result = ''
#if node is None:
# node = self.structure #TODO: зацикливается, если None внутри, надо обработать этот вариант по-другому
if isinstance(node, XList):
result = self._dump_arr(node, key=key, short=short, indent=indent)
elif isinstance(node, XDict):
result = self._dump_obj(node, key=key, short=short, indent=indent)
else:
result = self._dump_val(node, key=key, short=short, indent=indent)
return result
def _copy_node(self, node: Union[dict, list] = None, exclude_info = False):
if isinstance(node, dict): # for DICT
result = {}
for name in node:
if exclude_info and name == _info:
continue
value = node[name]
if isinstance(value, dict) or isinstance(value, list):
value = self._copy_node(value, exclude_info)
result[name] = value
else: # for LIST
result = []
for value in node:
if isinstance(value, dict) or isinstance(value, list):
value = self._copy_node(value, exclude_info)
result.append(value)
return result
def copy_from(self, src):
self._options = src.options
self._load_plugins()
self.structure = src._copy_node(src.structure, False)
return self
def from_dict(self, data: dict):
self.structure = copy.deepcopy(data)
return self
def to_dict(self, exclude_info = True):
return self._copy_node(self.structure, exclude_info)
|
[
"copy.deepcopy",
"os.path.exists",
"os.path.splitext",
"os.path.join",
"os.listdir"
] |
[((2111, 2138), 'os.path.join', 'os.path.join', (['path', '"""index"""'], {}), "(path, 'index')\n", (2123, 2138), False, 'import os\n'), ((7278, 7297), 'copy.deepcopy', 'copy.deepcopy', (['data'], {}), '(data)\n', (7291, 7297), False, 'import copy\n'), ((1855, 1886), 'os.path.exists', 'os.path.exists', (['(file_name + ext)'], {}), '(file_name + ext)\n', (1869, 1886), False, 'import os\n'), ((2223, 2241), 'os.path.exists', 'os.path.exists', (['fn'], {}), '(fn)\n', (2237, 2241), False, 'import os\n'), ((2805, 2826), 'os.listdir', 'os.listdir', (['file_name'], {}), '(file_name)\n', (2815, 2826), False, 'import os\n'), ((3039, 3059), 'os.path.splitext', 'os.path.splitext', (['fn'], {}), '(fn)\n', (3055, 3059), False, 'import os\n'), ((3208, 3235), 'os.path.join', 'os.path.join', (['file_name', 'fn'], {}), '(file_name, fn)\n', (3220, 3235), False, 'import os\n')]
|
#!/usr/bin/python3
# driver_trips.py: summarize miles per driver and show a list for each
# driver of the trips they took.
# Two approaches are demonstrated:
# - Two queries. First query retrieves the summary values, second the
# list entries. Print the list entries, preceding the list for each
# driver with the corresponding summary information.
# - Single query to retrieve the list entries. Iterate through the list
# once to compute the summary values, and a second time to print the
# summary and list information.
import mysql.connector
import cookbook
try:
conn = cookbook.connect()
print("Summary, method 1:")
#@ _TWO_QUERY_
# select total miles per driver and construct a dictionary that
# maps each driver name to days on the road and miles driven
name_map = {}
cursor = conn.cursor()
cursor.execute('''
SELECT name, COUNT(name), SUM(miles)
FROM driver_log GROUP BY name
''')
for (name, days, miles) in cursor:
name_map[name] = (days, miles)
# select trips for each driver and print the report, displaying the
# summary entry for each driver prior to the list of trips
cursor.execute('''
SELECT name, trav_date, miles
FROM driver_log ORDER BY name, trav_date
''')
cur_name = ""
for (name, trav_date, miles) in cursor:
if cur_name != name: # new driver; print driver's summary info
print("Name: %s; days on road: %d; miles driven: %d" %
(name, name_map[name][0], name_map[name][1]))
cur_name = name
print(" date: %s, trip length: %d" % (trav_date, miles))
cursor.close()
#@ _TWO_QUERY_
print("")
print("Summary, method 2:")
#@ _ONE_QUERY_
# get list of trips for the drivers
cursor = conn.cursor()
cursor.execute('''
SELECT name, trav_date, miles FROM driver_log
ORDER BY name, trav_date
''')
# fetch rows into data structure because we
# must iterate through them multiple times
rows = cursor.fetchall()
cursor.close()
# iterate through rows once to construct a dictionary that
# maps each driver name to days on the road and miles driven
# (the dictionary entries are lists rather than tuples because
# we need mutable values that can be modified in the loop)
name_map = {}
for (name, trav_date, miles) in rows:
if name not in name_map: # initialize entry if nonexistent
name_map[name] = [0, 0]
name_map[name][0] += 1 # count days
name_map[name][1] += miles # sum miles
# iterate through rows again to print the report, displaying the
# summary entry for each driver prior to the list of trips
cur_name = ""
for (name, trav_date, miles) in rows:
if cur_name != name: # new driver; print driver's summary info
print("Name: %s; days on road: %d; miles driven: %d" %
(name, name_map[name][0], name_map[name][1]))
cur_name = name
print(" date: %s, trip length: %d" % (trav_date, miles))
#@ _ONE_QUERY_
except mysql.connector.Error as e:
print("Error: %s" % e)
else:
conn.close()
|
[
"cookbook.connect"
] |
[((589, 607), 'cookbook.connect', 'cookbook.connect', ([], {}), '()\n', (605, 607), False, 'import cookbook\n')]
|
# Generated by Django 3.1.6 on 2021-02-12 07:40
import django.core.validators
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Region',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=128)),
('iso', models.CharField(blank=True, default=None, max_length=2)),
('flag', models.FileField(upload_to='flags', validators=[django.core.validators.FileExtensionValidator(['svg', 'png'])])),
('longitude', models.DecimalField(blank=True, decimal_places=8, max_digits=16, null=True)),
('latitude', models.DecimalField(blank=True, decimal_places=8, max_digits=16, null=True)),
('type', models.CharField(choices=[('CTY', 'Country'), ('PRV', 'Province')], max_length=3)),
('population', models.PositiveIntegerField(blank=True, null=True)),
('parent', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='reports.region')),
],
),
migrations.CreateModel(
name='Report',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date', models.DateField()),
('confirmed', models.PositiveIntegerField(blank=True, null=True)),
('active', models.PositiveIntegerField(blank=True, null=True)),
('deaths', models.PositiveIntegerField(blank=True, null=True)),
('recovered', models.PositiveIntegerField(blank=True, null=True)),
('region', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='reports', to='reports.region')),
],
),
]
|
[
"django.db.models.CharField",
"django.db.models.ForeignKey",
"django.db.models.PositiveIntegerField",
"django.db.models.AutoField",
"django.db.models.DecimalField",
"django.db.models.DateField"
] |
[((365, 458), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (381, 458), False, 'from django.db import migrations, models\n'), ((482, 514), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(128)'}), '(max_length=128)\n', (498, 514), False, 'from django.db import migrations, models\n'), ((541, 597), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'default': 'None', 'max_length': '(2)'}), '(blank=True, default=None, max_length=2)\n', (557, 597), False, 'from django.db import migrations, models\n'), ((769, 844), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'blank': '(True)', 'decimal_places': '(8)', 'max_digits': '(16)', 'null': '(True)'}), '(blank=True, decimal_places=8, max_digits=16, null=True)\n', (788, 844), False, 'from django.db import migrations, models\n'), ((876, 951), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'blank': '(True)', 'decimal_places': '(8)', 'max_digits': '(16)', 'null': '(True)'}), '(blank=True, decimal_places=8, max_digits=16, null=True)\n', (895, 951), False, 'from django.db import migrations, models\n'), ((979, 1064), 'django.db.models.CharField', 'models.CharField', ([], {'choices': "[('CTY', 'Country'), ('PRV', 'Province')]", 'max_length': '(3)'}), "(choices=[('CTY', 'Country'), ('PRV', 'Province')],\n max_length=3)\n", (995, 1064), False, 'from django.db import migrations, models\n'), ((1094, 1144), 'django.db.models.PositiveIntegerField', 'models.PositiveIntegerField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (1121, 1144), False, 'from django.db import migrations, models\n'), ((1174, 1285), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'blank': '(True)', 'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""reports.region"""'}), "(blank=True, null=True, on_delete=django.db.models.\n deletion.CASCADE, to='reports.region')\n", (1191, 1285), False, 'from django.db import migrations, models\n'), ((1412, 1505), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (1428, 1505), False, 'from django.db import migrations, models\n'), ((1529, 1547), 'django.db.models.DateField', 'models.DateField', ([], {}), '()\n', (1545, 1547), False, 'from django.db import migrations, models\n'), ((1580, 1630), 'django.db.models.PositiveIntegerField', 'models.PositiveIntegerField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (1607, 1630), False, 'from django.db import migrations, models\n'), ((1660, 1710), 'django.db.models.PositiveIntegerField', 'models.PositiveIntegerField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (1687, 1710), False, 'from django.db import migrations, models\n'), ((1740, 1790), 'django.db.models.PositiveIntegerField', 'models.PositiveIntegerField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (1767, 1790), False, 'from django.db import migrations, models\n'), ((1823, 1873), 'django.db.models.PositiveIntegerField', 'models.PositiveIntegerField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (1850, 1873), False, 'from django.db import migrations, models\n'), ((1903, 2015), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""reports"""', 'to': '"""reports.region"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='reports', to='reports.region')\n", (1920, 2015), False, 'from django.db import migrations, models\n')]
|
from unicodedata import normalize
def normalizar(texto: str) -> str:
"""
Normalize um texto qualquer.
Substitui os caracteres especiais do texto.
Exemplo:
>>> normalizar(' AçúcAR ')
'acucar'
"""
texto = normalize('NFKD', texto)
texto = texto.encode('iso-8859-1', 'ignore').decode('iso-8859-1')
texto = texto.strip()
return texto.lower()
|
[
"unicodedata.normalize"
] |
[((239, 263), 'unicodedata.normalize', 'normalize', (['"""NFKD"""', 'texto'], {}), "('NFKD', texto)\n", (248, 263), False, 'from unicodedata import normalize\n')]
|
import multiprocessing as mp
import itertools
QUEUE = mp.Queue()
class Routine(mp.Process):
def __init__(self, number: int, *args, **kwargs):
mp.Process.__init__(self, *args, **kwargs)
self.number = number
def target(self, number: int) -> int:
return 2 * number
def run(self):
result = self.target(self.number)
QUEUE.put(result)
def main() -> None:
for i in range(5):
routine = Routine(number=i)
routine.start()
routine.join()
for _ in range(5):
print(QUEUE.get())
if __name__ == '__main__':
main()
|
[
"multiprocessing.Process.__init__",
"multiprocessing.Queue"
] |
[((56, 66), 'multiprocessing.Queue', 'mp.Queue', ([], {}), '()\n', (64, 66), True, 'import multiprocessing as mp\n'), ((150, 192), 'multiprocessing.Process.__init__', 'mp.Process.__init__', (['self', '*args'], {}), '(self, *args, **kwargs)\n', (169, 192), True, 'import multiprocessing as mp\n')]
|
import subprocess
import json
def main(set_path):
for (problem_id, line) in enumerate(open(set_path).readlines()):
if line.strip() == "":
continue
problem_id = problem_id + 1
submission_id, globalist_source_problem = line.split(' ')
globalist_source_problem = int(globalist_source_problem)
print(submission_id, globalist_source_problem)
subprocess.run(
f'curl "https://icfpc.sx9.jp/submission?submission_id={submission_id}" '
f'> "tmp.txt"',
shell=True,
check=True,
)
j = json.load(open("tmp.txt"))
if globalist_source_problem != 0:
j["bonuses"][0]["problem"] = globalist_source_problem
with open(f"{problem_id}.json", "w") as f:
json.dump(j, f)
if __name__ == '__main__':
import fire
fire.Fire(main)
|
[
"json.dump",
"subprocess.run",
"fire.Fire"
] |
[((869, 884), 'fire.Fire', 'fire.Fire', (['main'], {}), '(main)\n', (878, 884), False, 'import fire\n'), ((405, 538), 'subprocess.run', 'subprocess.run', (['f"""curl "https://icfpc.sx9.jp/submission?submission_id={submission_id}" > "tmp.txt\\""""'], {'shell': '(True)', 'check': '(True)'}), '(\n f\'curl "https://icfpc.sx9.jp/submission?submission_id={submission_id}" > "tmp.txt"\'\n , shell=True, check=True)\n', (419, 538), False, 'import subprocess\n'), ((804, 819), 'json.dump', 'json.dump', (['j', 'f'], {}), '(j, f)\n', (813, 819), False, 'import json\n')]
|
import cv2
import matplotlib.pyplot as plt
import numpy as np
from functions_feat_extraction import image_to_features
from project_5_utils import stitch_together
def draw_labeled_bounding_boxes(img, labeled_frame, num_objects):
"""
Starting from labeled regions, draw enclosing rectangles in the original color frame.
"""
# Iterate through all detected cars
for car_number in range(1, num_objects + 1):
# Find pixels with each car_number label value
rows, cols = np.where(labeled_frame == car_number)
# Find minimum enclosing rectangle
x_min, y_min = np.min(cols), np.min(rows)
x_max, y_max = np.max(cols), np.max(rows)
cv2.rectangle(img, (x_min, y_min), (x_max, y_max), color=(255, 0, 0), thickness=6)
return img
def compute_heatmap_from_detections(frame, hot_windows, threshold=5, verbose=False):
"""
Compute heatmaps from windows classified as positive, in order to filter false positives.
"""
h, w, c = frame.shape
heatmap = np.zeros(shape=(h, w), dtype=np.uint8)
for bbox in hot_windows:
# for each bounding box, add heat to the corresponding rectangle in the image
x_min, y_min = bbox[0]
x_max, y_max = bbox[1]
heatmap[y_min:y_max, x_min:x_max] += 1 # add heat
# apply threshold + morphological closure to remove noise
_, heatmap_thresh = cv2.threshold(heatmap, threshold, 255, type=cv2.THRESH_BINARY)
heatmap_thresh = cv2.morphologyEx(heatmap_thresh, op=cv2.MORPH_CLOSE,
kernel=cv2.getStructuringElement(cv2.MORPH_ELLIPSE,
(13, 13)), iterations=1)
if verbose:
f, ax = plt.subplots(1, 3)
ax[0].imshow(cv2.cvtColor(frame, cv2.COLOR_BGR2RGB))
ax[1].imshow(heatmap, cmap='hot')
ax[2].imshow(heatmap_thresh, cmap='hot')
plt.show()
return heatmap, heatmap_thresh
def compute_windows_multiscale(image, verbose=False):
"""
Naive implementation of multiscale window search.
"""
h, w, c = image.shape
windows_multiscale = []
windows_32 = slide_window(image, x_start_stop=[None, None],
y_start_stop=[4 * h // 8, 5 * h // 8],
xy_window=(32, 32), xy_overlap=(0.8, 0.8))
windows_multiscale.append(windows_32)
windows_64 = slide_window(image, x_start_stop=[None, None],
y_start_stop=[4 * h // 8, 6 * h // 8],
xy_window=(64, 64), xy_overlap=(0.8, 0.8))
windows_multiscale.append(windows_64)
windows_128 = slide_window(image, x_start_stop=[None, None], y_start_stop=[3 * h // 8, h],
xy_window=(128, 128), xy_overlap=(0.8, 0.8))
windows_multiscale.append(windows_128)
if verbose:
windows_img_32 = draw_boxes(image, windows_32, color=(0, 0, 255), thick=1)
windows_img_64 = draw_boxes(image, windows_64, color=(0, 255, 0), thick=1)
windows_img_128 = draw_boxes(image, windows_128, color=(255, 0, 0), thick=1)
stitching = stitch_together([windows_img_32, windows_img_64, windows_img_128], (1, 3),
resize_dim=(1300, 500))
cv2.imshow('', stitching)
cv2.waitKey()
return np.concatenate(windows_multiscale)
def slide_window(img, x_start_stop=[None, None], y_start_stop=[None, None],
xy_window=(64, 64), xy_overlap=(0.5, 0.5)):
"""
Implementation of a sliding window in a region of interest of the image.
"""
# If x and/or y start/stop positions not defined, set to image size
if x_start_stop[0] is None:
x_start_stop[0] = 0
if x_start_stop[1] is None:
x_start_stop[1] = img.shape[1]
if y_start_stop[0] is None:
y_start_stop[0] = 0
if y_start_stop[1] is None:
y_start_stop[1] = img.shape[0]
# Compute the span of the region to be searched
x_span = x_start_stop[1] - x_start_stop[0]
y_span = y_start_stop[1] - y_start_stop[0]
# Compute the number of pixels per step in x/y
n_x_pix_per_step = np.int(xy_window[0] * (1 - xy_overlap[0]))
n_y_pix_per_step = np.int(xy_window[1] * (1 - xy_overlap[1]))
# Compute the number of windows in x / y
n_x_windows = np.int(x_span / n_x_pix_per_step) - 1
n_y_windows = np.int(y_span / n_y_pix_per_step) - 1
# Initialize a list to append window positions to
window_list = []
# Loop through finding x and y window positions.
for i in range(n_y_windows):
for j in range(n_x_windows):
# Calculate window position
start_x = j * n_x_pix_per_step + x_start_stop[0]
end_x = start_x + xy_window[0]
start_y = i * n_y_pix_per_step + y_start_stop[0]
end_y = start_y + xy_window[1]
# Append window position to list
window_list.append(((start_x, start_y), (end_x, end_y)))
# Return the list of windows
return window_list
def draw_boxes(img, bbox_list, color=(0, 0, 255), thick=6):
"""
Draw all bounding boxes in `bbox_list` onto a given image.
:param img: input image
:param bbox_list: list of bounding boxes
:param color: color used for drawing boxes
:param thick: thickness of the box line
:return: a new image with the bounding boxes drawn
"""
# Make a copy of the image
img_copy = np.copy(img)
# Iterate through the bounding boxes
for bbox in bbox_list:
# Draw a rectangle given bbox coordinates
tl_corner = tuple(bbox[0])
br_corner = tuple(bbox[1])
cv2.rectangle(img_copy, tl_corner, br_corner, color, thick)
# Return the image copy with boxes drawn
return img_copy
# Define a function you will pass an image and the list of windows to be searched (output of slide_windows())
def search_windows(img, windows, clf, scaler, feat_extraction_params):
hot_windows = [] # list to receive positive detection windows
for window in windows:
# Extract the current window from original image
resize_h, resize_w = feat_extraction_params['resize_h'], feat_extraction_params['resize_w']
test_img = cv2.resize(img[window[0][1]:window[1][1], window[0][0]:window[1][0]],
(resize_w, resize_h))
# Extract features for that window using single_img_features()
features = image_to_features(test_img, feat_extraction_params)
# Scale extracted features to be fed to classifier
test_features = scaler.transform(np.array(features).reshape(1, -1))
# Predict on rescaled features
prediction = clf.predict(test_features)
# If positive (prediction == 1) then save the window
if prediction == 1:
hot_windows.append(window)
# Return windows for positive detections
return hot_windows
|
[
"cv2.rectangle",
"functions_feat_extraction.image_to_features",
"cv2.imshow",
"project_5_utils.stitch_together",
"numpy.copy",
"cv2.cvtColor",
"numpy.max",
"numpy.int",
"matplotlib.pyplot.subplots",
"cv2.resize",
"matplotlib.pyplot.show",
"cv2.waitKey",
"numpy.min",
"numpy.concatenate",
"cv2.getStructuringElement",
"cv2.threshold",
"numpy.zeros",
"numpy.where",
"numpy.array"
] |
[((1030, 1068), 'numpy.zeros', 'np.zeros', ([], {'shape': '(h, w)', 'dtype': 'np.uint8'}), '(shape=(h, w), dtype=np.uint8)\n', (1038, 1068), True, 'import numpy as np\n'), ((1393, 1455), 'cv2.threshold', 'cv2.threshold', (['heatmap', 'threshold', '(255)'], {'type': 'cv2.THRESH_BINARY'}), '(heatmap, threshold, 255, type=cv2.THRESH_BINARY)\n', (1406, 1455), False, 'import cv2\n'), ((3360, 3394), 'numpy.concatenate', 'np.concatenate', (['windows_multiscale'], {}), '(windows_multiscale)\n', (3374, 3394), True, 'import numpy as np\n'), ((4183, 4225), 'numpy.int', 'np.int', (['(xy_window[0] * (1 - xy_overlap[0]))'], {}), '(xy_window[0] * (1 - xy_overlap[0]))\n', (4189, 4225), True, 'import numpy as np\n'), ((4249, 4291), 'numpy.int', 'np.int', (['(xy_window[1] * (1 - xy_overlap[1]))'], {}), '(xy_window[1] * (1 - xy_overlap[1]))\n', (4255, 4291), True, 'import numpy as np\n'), ((5476, 5488), 'numpy.copy', 'np.copy', (['img'], {}), '(img)\n', (5483, 5488), True, 'import numpy as np\n'), ((502, 539), 'numpy.where', 'np.where', (['(labeled_frame == car_number)'], {}), '(labeled_frame == car_number)\n', (510, 539), True, 'import numpy as np\n'), ((693, 779), 'cv2.rectangle', 'cv2.rectangle', (['img', '(x_min, y_min)', '(x_max, y_max)'], {'color': '(255, 0, 0)', 'thickness': '(6)'}), '(img, (x_min, y_min), (x_max, y_max), color=(255, 0, 0),\n thickness=6)\n', (706, 779), False, 'import cv2\n'), ((1748, 1766), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(3)'], {}), '(1, 3)\n', (1760, 1766), True, 'import matplotlib.pyplot as plt\n'), ((1927, 1937), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1935, 1937), True, 'import matplotlib.pyplot as plt\n'), ((3157, 3259), 'project_5_utils.stitch_together', 'stitch_together', (['[windows_img_32, windows_img_64, windows_img_128]', '(1, 3)'], {'resize_dim': '(1300, 500)'}), '([windows_img_32, windows_img_64, windows_img_128], (1, 3),\n resize_dim=(1300, 500))\n', (3172, 3259), False, 'from project_5_utils import stitch_together\n'), ((3300, 3325), 'cv2.imshow', 'cv2.imshow', (['""""""', 'stitching'], {}), "('', stitching)\n", (3310, 3325), False, 'import cv2\n'), ((3334, 3347), 'cv2.waitKey', 'cv2.waitKey', ([], {}), '()\n', (3345, 3347), False, 'import cv2\n'), ((4356, 4389), 'numpy.int', 'np.int', (['(x_span / n_x_pix_per_step)'], {}), '(x_span / n_x_pix_per_step)\n', (4362, 4389), True, 'import numpy as np\n'), ((4412, 4445), 'numpy.int', 'np.int', (['(y_span / n_y_pix_per_step)'], {}), '(y_span / n_y_pix_per_step)\n', (4418, 4445), True, 'import numpy as np\n'), ((5686, 5745), 'cv2.rectangle', 'cv2.rectangle', (['img_copy', 'tl_corner', 'br_corner', 'color', 'thick'], {}), '(img_copy, tl_corner, br_corner, color, thick)\n', (5699, 5745), False, 'import cv2\n'), ((6266, 6362), 'cv2.resize', 'cv2.resize', (['img[window[0][1]:window[1][1], window[0][0]:window[1][0]]', '(resize_w, resize_h)'], {}), '(img[window[0][1]:window[1][1], window[0][0]:window[1][0]], (\n resize_w, resize_h))\n', (6276, 6362), False, 'import cv2\n'), ((6479, 6530), 'functions_feat_extraction.image_to_features', 'image_to_features', (['test_img', 'feat_extraction_params'], {}), '(test_img, feat_extraction_params)\n', (6496, 6530), False, 'from functions_feat_extraction import image_to_features\n'), ((607, 619), 'numpy.min', 'np.min', (['cols'], {}), '(cols)\n', (613, 619), True, 'import numpy as np\n'), ((621, 633), 'numpy.min', 'np.min', (['rows'], {}), '(rows)\n', (627, 633), True, 'import numpy as np\n'), ((657, 669), 'numpy.max', 'np.max', (['cols'], {}), '(cols)\n', (663, 669), True, 'import numpy as np\n'), ((671, 683), 'numpy.max', 'np.max', (['rows'], {}), '(rows)\n', (677, 683), True, 'import numpy as np\n'), ((1575, 1629), 'cv2.getStructuringElement', 'cv2.getStructuringElement', (['cv2.MORPH_ELLIPSE', '(13, 13)'], {}), '(cv2.MORPH_ELLIPSE, (13, 13))\n', (1600, 1629), False, 'import cv2\n'), ((1788, 1826), 'cv2.cvtColor', 'cv2.cvtColor', (['frame', 'cv2.COLOR_BGR2RGB'], {}), '(frame, cv2.COLOR_BGR2RGB)\n', (1800, 1826), False, 'import cv2\n'), ((6632, 6650), 'numpy.array', 'np.array', (['features'], {}), '(features)\n', (6640, 6650), True, 'import numpy as np\n')]
|
import os
from mpl_toolkits import mplot3d
from matplotlib import cm
import matplotlib.pyplot as plt
import pandas as pd
def plot_3d(data, x, y, z):
ax = plt.axes(projection="3d")
ax.plot_trisurf(df[x], df[y], df[z], cmap=cm.Blues)
ax.set_xticks(df[x].values)
ax.set_yticks(df[y].values)
ax.set_xlabel(x)
ax.set_ylabel(y)
return ax
if __name__ == "__main__":
os.chdir(os.path.dirname(__file__))
df = pd.read_csv("../data/csv/calculation.csv", sep=";")
fig = plt.figure(figsize=(14, 8))
ax = plot_3d(df, x="Beta", y="Alfa", z="J")
plt.show()
|
[
"matplotlib.pyplot.show",
"matplotlib.pyplot.axes",
"pandas.read_csv",
"os.path.dirname",
"matplotlib.pyplot.figure"
] |
[((160, 185), 'matplotlib.pyplot.axes', 'plt.axes', ([], {'projection': '"""3d"""'}), "(projection='3d')\n", (168, 185), True, 'import matplotlib.pyplot as plt\n'), ((446, 497), 'pandas.read_csv', 'pd.read_csv', (['"""../data/csv/calculation.csv"""'], {'sep': '""";"""'}), "('../data/csv/calculation.csv', sep=';')\n", (457, 497), True, 'import pandas as pd\n'), ((508, 535), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(14, 8)'}), '(figsize=(14, 8))\n', (518, 535), True, 'import matplotlib.pyplot as plt\n'), ((588, 598), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (596, 598), True, 'import matplotlib.pyplot as plt\n'), ((405, 430), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (420, 430), False, 'import os\n')]
|
from factory import Faker
from factory.django import DjangoModelFactory
class IntegerInputDefinitionFactory(DjangoModelFactory):
key = Faker("pystr", min_chars=3, max_chars=50)
required = Faker("pybool")
description = Faker("sentence")
min_value = Faker("pyint", min_value=-20, max_value=-10)
max_value = Faker("pyint", min_value=20, max_value=40)
default = Faker("pyint", min_value=-10, max_value=20)
class Meta:
model = "django_analyses.IntegerInputDefinition"
|
[
"factory.Faker"
] |
[((141, 182), 'factory.Faker', 'Faker', (['"""pystr"""'], {'min_chars': '(3)', 'max_chars': '(50)'}), "('pystr', min_chars=3, max_chars=50)\n", (146, 182), False, 'from factory import Faker\n'), ((198, 213), 'factory.Faker', 'Faker', (['"""pybool"""'], {}), "('pybool')\n", (203, 213), False, 'from factory import Faker\n'), ((232, 249), 'factory.Faker', 'Faker', (['"""sentence"""'], {}), "('sentence')\n", (237, 249), False, 'from factory import Faker\n'), ((266, 310), 'factory.Faker', 'Faker', (['"""pyint"""'], {'min_value': '(-20)', 'max_value': '(-10)'}), "('pyint', min_value=-20, max_value=-10)\n", (271, 310), False, 'from factory import Faker\n'), ((327, 369), 'factory.Faker', 'Faker', (['"""pyint"""'], {'min_value': '(20)', 'max_value': '(40)'}), "('pyint', min_value=20, max_value=40)\n", (332, 369), False, 'from factory import Faker\n'), ((384, 427), 'factory.Faker', 'Faker', (['"""pyint"""'], {'min_value': '(-10)', 'max_value': '(20)'}), "('pyint', min_value=-10, max_value=20)\n", (389, 427), False, 'from factory import Faker\n')]
|
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Provides the web interface for changing internal_only property of a Bot."""
import logging
from google.appengine.api import taskqueue
from google.appengine.datastore import datastore_query
from google.appengine.ext import ndb
from dashboard import add_point_queue
from dashboard.common import request_handler
from dashboard.common import utils
from dashboard.common import datastore_hooks
from dashboard.common import stored_object
from dashboard.models import anomaly
from dashboard.models import graph_data
# Number of Row entities to process at once.
_MAX_ROWS_TO_PUT = 25
# Number of TestMetadata entities to process at once.
_MAX_TESTS_TO_PUT = 25
# Which queue to use for tasks started by this handler. Must be in queue.yaml.
_QUEUE_NAME = 'migrate-queue'
class ChangeInternalOnlyHandler(request_handler.RequestHandler):
"""Changes internal_only property of Bot, TestMetadata, and Row."""
def get(self):
"""Renders the UI for selecting bots."""
masters = {}
bots = graph_data.Bot.query().fetch()
for bot in bots:
master_name = bot.key.parent().string_id()
bot_name = bot.key.string_id()
bots = masters.setdefault(master_name, [])
bots.append({
'name': bot_name,
'internal_only': bot.internal_only,
})
logging.info('MASTERS: %s', masters)
self.RenderHtml('change_internal_only.html', {
'masters': masters,
})
def post(self):
"""Updates the selected bots internal_only property.
POST requests will be made by the task queue; tasks are added to the task
queue either by a kick-off POST from the front-end form, or by this handler
itself.
Request parameters:
internal_only: "true" if turning on internal_only, else "false".
bots: Bots to update. Multiple bots parameters are possible; the value
of each should be a string like "MasterName/platform-name".
test: An urlsafe Key for a TestMetadata entity.
cursor: An urlsafe Cursor; this parameter is only given if we're part-way
through processing a Bot or a TestMetadata.
Outputs:
A message to the user if this request was started by the web form,
or an error message if something went wrong, or nothing.
"""
# /change_internal_only should be only accessible if one has administrator
# privileges, so requests are guaranteed to be authorized.
datastore_hooks.SetPrivilegedRequest()
internal_only_string = self.request.get('internal_only')
if internal_only_string == 'true':
internal_only = True
elif internal_only_string == 'false':
internal_only = False
else:
self.ReportError('No internal_only field')
return
bot_names = self.request.get_all('bots')
test_key_urlsafe = self.request.get('test')
cursor = self.request.get('cursor', None)
if bot_names and len(bot_names) > 1:
self._UpdateMultipleBots(bot_names, internal_only)
self.RenderHtml('result.html', {
'headline': ('Updating internal_only. This may take some time '
'depending on the data to update. Check the task queue '
'to determine whether the job is still in progress.'),
})
elif bot_names and len(bot_names) == 1:
self._UpdateBot(bot_names[0], internal_only, cursor=cursor)
elif test_key_urlsafe:
self._UpdateTest(test_key_urlsafe, internal_only, cursor=cursor)
def _UpdateBotWhitelist(self, bot_master_names, internal_only):
"""Updates the global bot_whitelist object, otherwise subsequent add_point
calls will overwrite our work."""
bot_whitelist = stored_object.Get(add_point_queue.BOT_WHITELIST_KEY)
bot_names = [b.split('/')[1] for b in bot_master_names]
if internal_only:
bot_whitelist = [b for b in bot_whitelist if b not in bot_names]
else:
bot_whitelist.extend(bot_names)
bot_whitelist = list(set(bot_whitelist))
bot_whitelist.sort()
stored_object.Set(add_point_queue.BOT_WHITELIST_KEY, bot_whitelist)
def _UpdateMultipleBots(self, bot_names, internal_only):
"""Kicks off update tasks for individual bots and their tests."""
self._UpdateBotWhitelist(bot_names, internal_only)
for bot_name in bot_names:
taskqueue.add(
url='/change_internal_only',
params={
'bots': bot_name,
'internal_only': 'true' if internal_only else 'false'
},
queue_name=_QUEUE_NAME)
def _UpdateBot(self, bot_name, internal_only, cursor=None):
"""Starts updating internal_only for the given bot and associated data."""
master, bot = bot_name.split('/')
bot_key = ndb.Key('Master', master, 'Bot', bot)
if not cursor:
# First time updating for this Bot.
bot_entity = bot_key.get()
if bot_entity.internal_only != internal_only:
bot_entity.internal_only = internal_only
bot_entity.put()
else:
cursor = datastore_query.Cursor(urlsafe=cursor)
# Fetch a certain number of TestMetadata entities starting from cursor. See:
# https://developers.google.com/appengine/docs/python/ndb/queryclass
# Start update tasks for each existing subordinate TestMetadata.
test_query = graph_data.TestMetadata.query(
graph_data.TestMetadata.master_name == master,
graph_data.TestMetadata.bot_name == bot)
test_keys, next_cursor, more = test_query.fetch_page(
_MAX_TESTS_TO_PUT, start_cursor=cursor, keys_only=True)
for test_key in test_keys:
taskqueue.add(
url='/change_internal_only',
params={
'test': test_key.urlsafe(),
'internal_only': 'true' if internal_only else 'false',
},
queue_name=_QUEUE_NAME)
if more:
taskqueue.add(
url='/change_internal_only',
params={
'bots': bot_name,
'cursor': next_cursor.urlsafe(),
'internal_only': 'true' if internal_only else 'false',
},
queue_name=_QUEUE_NAME)
def _UpdateTest(self, test_key_urlsafe, internal_only, cursor=None):
"""Updates the given TestMetadata and associated Row entities."""
test_key = ndb.Key(urlsafe=test_key_urlsafe)
if not cursor:
# First time updating for this TestMetadata.
test_entity = test_key.get()
if test_entity.internal_only != internal_only:
test_entity.internal_only = internal_only
test_entity.put()
# Update all of the Anomaly entities for this test.
# Assuming that this should be fast enough to do in one request
# for any one test.
anomalies = anomaly.Anomaly.GetAlertsForTest(test_key)
for anomaly_entity in anomalies:
if anomaly_entity.internal_only != internal_only:
anomaly_entity.internal_only = internal_only
ndb.put_multi(anomalies)
else:
cursor = datastore_query.Cursor(urlsafe=cursor)
# Fetch a certain number of Row entities starting from cursor.
rows_query = graph_data.Row.query(
graph_data.Row.parent_test == utils.OldStyleTestKey(test_key))
rows, next_cursor, more = rows_query.fetch_page(
_MAX_ROWS_TO_PUT, start_cursor=cursor)
for row in rows:
if row.internal_only != internal_only:
row.internal_only = internal_only
ndb.put_multi(rows)
if more:
taskqueue.add(
url='/change_internal_only',
params={
'test': test_key_urlsafe,
'cursor': next_cursor.urlsafe(),
'internal_only': 'true' if internal_only else 'false',
},
queue_name=_QUEUE_NAME)
|
[
"dashboard.common.stored_object.Get",
"google.appengine.api.taskqueue.add",
"dashboard.models.anomaly.Anomaly.GetAlertsForTest",
"dashboard.common.datastore_hooks.SetPrivilegedRequest",
"dashboard.common.utils.OldStyleTestKey",
"dashboard.models.graph_data.TestMetadata.query",
"logging.info",
"dashboard.models.graph_data.Bot.query",
"google.appengine.ext.ndb.put_multi",
"dashboard.common.stored_object.Set",
"google.appengine.ext.ndb.Key",
"google.appengine.datastore.datastore_query.Cursor"
] |
[((1456, 1492), 'logging.info', 'logging.info', (['"""MASTERS: %s"""', 'masters'], {}), "('MASTERS: %s', masters)\n", (1468, 1492), False, 'import logging\n'), ((2562, 2600), 'dashboard.common.datastore_hooks.SetPrivilegedRequest', 'datastore_hooks.SetPrivilegedRequest', ([], {}), '()\n', (2598, 2600), False, 'from dashboard.common import datastore_hooks\n'), ((3802, 3854), 'dashboard.common.stored_object.Get', 'stored_object.Get', (['add_point_queue.BOT_WHITELIST_KEY'], {}), '(add_point_queue.BOT_WHITELIST_KEY)\n', (3819, 3854), False, 'from dashboard.common import stored_object\n'), ((4134, 4201), 'dashboard.common.stored_object.Set', 'stored_object.Set', (['add_point_queue.BOT_WHITELIST_KEY', 'bot_whitelist'], {}), '(add_point_queue.BOT_WHITELIST_KEY, bot_whitelist)\n', (4151, 4201), False, 'from dashboard.common import stored_object\n'), ((4840, 4877), 'google.appengine.ext.ndb.Key', 'ndb.Key', (['"""Master"""', 'master', '"""Bot"""', 'bot'], {}), "('Master', master, 'Bot', bot)\n", (4847, 4877), False, 'from google.appengine.ext import ndb\n'), ((5405, 5526), 'dashboard.models.graph_data.TestMetadata.query', 'graph_data.TestMetadata.query', (['(graph_data.TestMetadata.master_name == master)', '(graph_data.TestMetadata.bot_name == bot)'], {}), '(graph_data.TestMetadata.master_name == master,\n graph_data.TestMetadata.bot_name == bot)\n', (5434, 5526), False, 'from dashboard.models import graph_data\n'), ((6376, 6409), 'google.appengine.ext.ndb.Key', 'ndb.Key', ([], {'urlsafe': 'test_key_urlsafe'}), '(urlsafe=test_key_urlsafe)\n', (6383, 6409), False, 'from google.appengine.ext import ndb\n'), ((7498, 7517), 'google.appengine.ext.ndb.put_multi', 'ndb.put_multi', (['rows'], {}), '(rows)\n', (7511, 7517), False, 'from google.appengine.ext import ndb\n'), ((4426, 4583), 'google.appengine.api.taskqueue.add', 'taskqueue.add', ([], {'url': '"""/change_internal_only"""', 'params': "{'bots': bot_name, 'internal_only': 'true' if internal_only else 'false'}", 'queue_name': '_QUEUE_NAME'}), "(url='/change_internal_only', params={'bots': bot_name,\n 'internal_only': 'true' if internal_only else 'false'}, queue_name=\n _QUEUE_NAME)\n", (4439, 4583), False, 'from google.appengine.api import taskqueue\n'), ((5124, 5162), 'google.appengine.datastore.datastore_query.Cursor', 'datastore_query.Cursor', ([], {'urlsafe': 'cursor'}), '(urlsafe=cursor)\n', (5146, 5162), False, 'from google.appengine.datastore import datastore_query\n'), ((6817, 6859), 'dashboard.models.anomaly.Anomaly.GetAlertsForTest', 'anomaly.Anomaly.GetAlertsForTest', (['test_key'], {}), '(test_key)\n', (6849, 6859), False, 'from dashboard.models import anomaly\n'), ((7018, 7042), 'google.appengine.ext.ndb.put_multi', 'ndb.put_multi', (['anomalies'], {}), '(anomalies)\n', (7031, 7042), False, 'from google.appengine.ext import ndb\n'), ((7068, 7106), 'google.appengine.datastore.datastore_query.Cursor', 'datastore_query.Cursor', ([], {'urlsafe': 'cursor'}), '(urlsafe=cursor)\n', (7090, 7106), False, 'from google.appengine.datastore import datastore_query\n'), ((1162, 1184), 'dashboard.models.graph_data.Bot.query', 'graph_data.Bot.query', ([], {}), '()\n', (1182, 1184), False, 'from dashboard.models import graph_data\n'), ((7252, 7283), 'dashboard.common.utils.OldStyleTestKey', 'utils.OldStyleTestKey', (['test_key'], {}), '(test_key)\n', (7273, 7283), False, 'from dashboard.common import utils\n')]
|
import json
import ueimporter.version as version
def test_ueimporter_json_with_tag_will_succeed():
version_dict = {
'GitReleaseTag': '4.27.1-release'
}
assert version.UEImporterJson(
version_dict).git_release_tag == '4.27.1-release'
def test_ueimporter_json_without_key_will_yield_empty_tag():
version_dict = {
'MisspelledGitReleaseTag': '4.27.1-release'
}
assert version.UEImporterJson(version_dict).git_release_tag == ''
assert version.UEImporterJson({}).git_release_tag == ''
def test_ueimporter_json_set_tag_will_succeed():
version_dict = {
'GitReleaseTag': '4.27.1-release'
}
ueimporter_json = version.UEImporterJson(version_dict)
ueimporter_json.git_release_tag = '4.27.2-release'
assert ueimporter_json.git_release_tag == '4.27.2-release'
def test_ueimporter_json_will_yeild_json_with_tag():
version_dict = {
'GitReleaseTag': '4.27.1-release'
}
assert version.UEImporterJson(version_dict).to_json(indent=4) == \
"""{
"GitReleaseTag": "4.27.1-release"
}"""
def test_ueimporter_json_without_key_will_yeild_json_with_empty_tag():
assert version.UEImporterJson({}).to_json(indent=4) == \
"""{
"GitReleaseTag": ""
}"""
def test_ueimporter_json_with_invalid_key_will_yeild_json_with_valid_keys():
version_dict = {
'GitReleaseTag': '4.27.1-release',
'ThisKeyDoesNotBelong': 'SomeValue',
}
assert version.UEImporterJson(version_dict).to_json(indent=4) == \
"""{
"GitReleaseTag": "4.27.1-release"
}"""
def test_from_build_version_json():
file_content = json.dumps({
'MajorVersion': '4',
'MinorVersion': '27',
'PatchVersion': '1',
})
assert version.from_build_version_json(file_content) == '4.27.1'
def test_from_build_version_json_without_patch_will_fail():
file_content = json.dumps({
'MajorVersion': '4',
'MinorVersion': '27',
})
assert version.from_build_version_json(file_content) == None
def test_from_git_release_tag():
assert version.from_git_release_tag('4.27.1-release') == '4.27.1'
assert version.from_git_release_tag('4.27.2-release') == '4.27.2'
assert version.from_git_release_tag(
'5.0.0-early-access-1') == '5.0.0'
assert version.from_git_release_tag(
'5.0.0-early-access-2') == '5.0.0'
def test_from_git_release_tag_without_patch_will_fail():
assert version.from_git_release_tag('4.27.0-release') == '4.27.0'
|
[
"ueimporter.version.UEImporterJson",
"ueimporter.version.from_git_release_tag",
"ueimporter.version.from_build_version_json",
"json.dumps"
] |
[((678, 714), 'ueimporter.version.UEImporterJson', 'version.UEImporterJson', (['version_dict'], {}), '(version_dict)\n', (700, 714), True, 'import ueimporter.version as version\n'), ((1638, 1714), 'json.dumps', 'json.dumps', (["{'MajorVersion': '4', 'MinorVersion': '27', 'PatchVersion': '1'}"], {}), "({'MajorVersion': '4', 'MinorVersion': '27', 'PatchVersion': '1'})\n", (1648, 1714), False, 'import json\n'), ((1897, 1952), 'json.dumps', 'json.dumps', (["{'MajorVersion': '4', 'MinorVersion': '27'}"], {}), "({'MajorVersion': '4', 'MinorVersion': '27'})\n", (1907, 1952), False, 'import json\n'), ((1758, 1803), 'ueimporter.version.from_build_version_json', 'version.from_build_version_json', (['file_content'], {}), '(file_content)\n', (1789, 1803), True, 'import ueimporter.version as version\n'), ((1988, 2033), 'ueimporter.version.from_build_version_json', 'version.from_build_version_json', (['file_content'], {}), '(file_content)\n', (2019, 2033), True, 'import ueimporter.version as version\n'), ((2088, 2134), 'ueimporter.version.from_git_release_tag', 'version.from_git_release_tag', (['"""4.27.1-release"""'], {}), "('4.27.1-release')\n", (2116, 2134), True, 'import ueimporter.version as version\n'), ((2158, 2204), 'ueimporter.version.from_git_release_tag', 'version.from_git_release_tag', (['"""4.27.2-release"""'], {}), "('4.27.2-release')\n", (2186, 2204), True, 'import ueimporter.version as version\n'), ((2228, 2280), 'ueimporter.version.from_git_release_tag', 'version.from_git_release_tag', (['"""5.0.0-early-access-1"""'], {}), "('5.0.0-early-access-1')\n", (2256, 2280), True, 'import ueimporter.version as version\n'), ((2312, 2364), 'ueimporter.version.from_git_release_tag', 'version.from_git_release_tag', (['"""5.0.0-early-access-2"""'], {}), "('5.0.0-early-access-2')\n", (2340, 2364), True, 'import ueimporter.version as version\n'), ((2455, 2501), 'ueimporter.version.from_git_release_tag', 'version.from_git_release_tag', (['"""4.27.0-release"""'], {}), "('4.27.0-release')\n", (2483, 2501), True, 'import ueimporter.version as version\n'), ((182, 218), 'ueimporter.version.UEImporterJson', 'version.UEImporterJson', (['version_dict'], {}), '(version_dict)\n', (204, 218), True, 'import ueimporter.version as version\n'), ((417, 453), 'ueimporter.version.UEImporterJson', 'version.UEImporterJson', (['version_dict'], {}), '(version_dict)\n', (439, 453), True, 'import ueimporter.version as version\n'), ((487, 513), 'ueimporter.version.UEImporterJson', 'version.UEImporterJson', (['{}'], {}), '({})\n', (509, 513), True, 'import ueimporter.version as version\n'), ((968, 1004), 'ueimporter.version.UEImporterJson', 'version.UEImporterJson', (['version_dict'], {}), '(version_dict)\n', (990, 1004), True, 'import ueimporter.version as version\n'), ((1168, 1194), 'ueimporter.version.UEImporterJson', 'version.UEImporterJson', (['{}'], {}), '({})\n', (1190, 1194), True, 'import ueimporter.version as version\n'), ((1465, 1501), 'ueimporter.version.UEImporterJson', 'version.UEImporterJson', (['version_dict'], {}), '(version_dict)\n', (1487, 1501), True, 'import ueimporter.version as version\n')]
|
####################
# ES-DOC CIM Questionnaire
# Copyright (c) 2015 ES-DOC. All rights reserved.
#
# University of Colorado, Boulder
# http://cires.colorado.edu/
#
# This project is distributed according to the terms of the MIT license [http://www.opensource.org/licenses/MIT].
####################
__author__ = "allyn.treshansky"
"""
.. module:: test_views_legacy
Tests the redirect_legacy_projects decorator
"""
from django.core.urlresolvers import reverse
from Q.questionnaire.tests.test_base import TestQBase
from Q.questionnaire.q_utils import add_parameters_to_url, FuzzyInt
from Q.questionnaire.views.views_legacy import *
class Test(TestQBase):
def setUp(self):
super(Test, self).setUp()
# just setup some silly test projects
# I don't care if they're valid or not
# as long as 1 has 'is_legacy=True' it's good enough
self.current_project = QProject(
name="current_project",
title="Current Project",
email=self.test_user.email,
is_legacy=False,
)
self.current_project.save()
self.legacy_project = QProject(
name="legacy_project",
title="Legacy Project",
email=self.test_user.email,
is_legacy=True,
)
self.legacy_project.save()
def tearDown(self):
super(Test, self).tearDown()
#####################
# redirection tests #
#####################
# using the 'q_project' view as my test.
# it doesn't really matter which one I use,
# as long as it has the '@redirect_legacy_projects' decorator.
# this decorator only applies to "GET" requests,
# so I don't need to worry about any potential data passed in,
# but I still check for explicit parameters in the URL just in-case.
def test_redirect_legacy_projects(self):
test_params = {
"a": "a",
"b": "b",
}
current_request_url = add_parameters_to_url(reverse("project", kwargs={
"project_name": "current_project",
}), **test_params)
legacy_request_url = add_parameters_to_url(reverse("project", kwargs={
"project_name": "legacy_project",
}), **test_params)
# check that a non-legacy view did not redirect and returned a normal status_code...
response = self.client.get(current_request_url)
with self.assertRaises(AssertionError):
self.assertRedirects(response, expected_url=LEGACY_HOST+current_request_url)
self.assertEqual(response.status_code, 200)
import ipdb; ipdb.set_trace()
# TODO: THIS ASSERTION FAILS
# check that a legacy view did redirect and the status_code was either 301 or 302...
response = self.client.get(legacy_request_url)
self.assertRedirects(response, expected_url=LEGACY_HOST+legacy_request_url, status_code=FuzzyInt(301, 302), fetch_redirect_response=False)
|
[
"django.core.urlresolvers.reverse",
"Q.questionnaire.q_utils.FuzzyInt",
"ipdb.set_trace"
] |
[((2621, 2637), 'ipdb.set_trace', 'ipdb.set_trace', ([], {}), '()\n', (2635, 2637), False, 'import ipdb\n'), ((2006, 2068), 'django.core.urlresolvers.reverse', 'reverse', (['"""project"""'], {'kwargs': "{'project_name': 'current_project'}"}), "('project', kwargs={'project_name': 'current_project'})\n", (2013, 2068), False, 'from django.core.urlresolvers import reverse\n'), ((2159, 2220), 'django.core.urlresolvers.reverse', 'reverse', (['"""project"""'], {'kwargs': "{'project_name': 'legacy_project'}"}), "('project', kwargs={'project_name': 'legacy_project'})\n", (2166, 2220), False, 'from django.core.urlresolvers import reverse\n'), ((2919, 2937), 'Q.questionnaire.q_utils.FuzzyInt', 'FuzzyInt', (['(301)', '(302)'], {}), '(301, 302)\n', (2927, 2937), False, 'from Q.questionnaire.q_utils import add_parameters_to_url, FuzzyInt\n')]
|
import argparse
import csv
def findCategory(status):
# Received state
if "Fingerprint Fee Was Received" in status:
return 1
elif "Expedite Request Denied" in status:
return 1
elif "Case Was Received" in status:
return 1
elif "Case Was Reopened" in status:
return 1
elif "Duplicate Notice Was Mailed" in status:
return 1
elif "Fees Were Waived" in status:
return 1
elif "Fee Refund Was Mailed" in status:
return 1
elif "Date of Birth Was Updated" in status:
return 1
elif "Name Was Updated" in status:
return 1
elif "Fee Will Be Refunded" in status:
return 1
elif "Notice Was Returned To USCIS Because The Post Office Could Not Deliver It" in status:
return 1
# Expedite request
elif "Expedite Request Received" in status:
return 20
# Finger prints taken
elif "Show Fingerprints Were Taken" in status:
return 30
# RFE
elif "Request for Initial Evidence Was Sent" in status:
return 40
elif "Request for Additional Evidence Was Sent" in status:
return 40
# RFE received
elif "Response To USCIS' Request For Evidence Was Received" in status:
return 50
elif "Request For Evidence Was Received" in status:
return 50
elif "Correspondence Was Received And USCIS Is Reviewing It" in status:
return 50
# transerfed
elif "Case Transferred To Another Office" in status:
return 60
elif "Case Was Transferred And A New Office Has Jurisdiction" in status:
return 60
# Interview
elif "Request To Reschedule My Appointment Was Received" in status:
return 70
elif "Ready to Be Scheduled for An Interview" in status:
return 70
elif "Interview Was Rescheduled" in status:
return 70
elif "Interview Was Scheduled" in status:
return 70
elif "Case Was Updated To Show That No One Appeared for In-Person Processing" in status:
return 70
# Interview complete
elif "Interview Was Completed And My Case Must Be Reviewed" in status:
return 80
# Approved
elif "Card Was Mailed To Me" in status:
return 90
elif "Case Was Approved" in status:
return 90
elif "Interview Cancelled And Notice Ordered" in status:
return 90
elif "Notice Explaining USCIS Actions Was Mailed" in status:
return 90
elif "Case Closed Benefit Received By Other Means" in status:
return 90
elif "Card Was Returned To USCIS" in status:
return 90
elif "New Card Is Being Produced" in status:
return 90
# Denied
elif "Case Was Denied" in status:
return 100
elif "Petition/Application Was Rejected For Insufficient Funds" in status:
return 100
elif "Withdrawal Acknowledgement Notice Was Sent" in status:
return 100
# Rejected
elif "Case Rejected" in status:
return 110
elif "Case Was Rejected" in status:
return 110
elif "Card Was Picked Up By The United States Postal Service" in status:
return 120
elif "New Card Is Being Produced" in status:
return 120
elif "Card Was Delivered To Me By The Post Office" in status:
return 120
# everything else
else:
return 130
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('--infile', help="Input CSV file")
parser.add_argument('--outfile', help="Ouput CSV file")
args = parser.parse_args()
with open(args.infile, newline='') as inputcsvfile, open(args.outfile, 'w', newline='') as outcsvfile:
inreader = csv.reader(inputcsvfile, delimiter=',')
outwriter = csv.writer(outcsvfile, delimiter=',',
quotechar='"', quoting=csv.QUOTE_MINIMAL)
for row in inreader:
row[4] = findCategory(row[2])
outwriter.writerow(row)
|
[
"csv.writer",
"csv.reader",
"argparse.ArgumentParser"
] |
[((3411, 3436), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (3434, 3436), False, 'import argparse\n'), ((3714, 3753), 'csv.reader', 'csv.reader', (['inputcsvfile'], {'delimiter': '""","""'}), "(inputcsvfile, delimiter=',')\n", (3724, 3753), False, 'import csv\n'), ((3774, 3853), 'csv.writer', 'csv.writer', (['outcsvfile'], {'delimiter': '""","""', 'quotechar': '"""\\""""', 'quoting': 'csv.QUOTE_MINIMAL'}), '(outcsvfile, delimiter=\',\', quotechar=\'"\', quoting=csv.QUOTE_MINIMAL)\n', (3784, 3853), False, 'import csv\n')]
|
import json
import pandas as pd
import plotly.express as px
import dash
import dash_bootstrap_components as dbc
import dash_core_components as dcc
import dash_html_components as html
from dash.dependencies import Input, Output
from urllib.request import urlopen
app = dash.Dash(external_stylesheets=[dbc.themes.BOOTSTRAP])
buttons = dbc.ButtonGroup(
[
html.Form(action="https://github.com/cjoshi7/covid19-date-selector",
children=dbc.Button("Documentation", color="primary", type="submit")),
html.Form(action="https://github.com/nytimes/covid-19-data/blob/master/us-counties.csv",
children=dbc.Button("Download Dataset", color="primary", type="submit"))
]
)
radiobuttons = dcc.RadioItems(
id="datatype",
options = [
{"label": "Infection Rate", "value": "cases"},
{"label": "Death Rate", "value": "deaths"}
],
value = "cases",
className="radio",
)
date_selector = dcc.DatePickerSingle(
id="dateselector",
min_date_allowed="2020-1-21",
max_date_allowed="2021-2-5",
initial_visible_month="2021-2-5",
date="2021-2-5"
)
jumbotron = dbc.Jumbotron(
[
html.H1("COVID-19 Date Selector", className="display-3"),
html.P(
"Visualize the spread of the virus on a specific day",
className="lead",
),
html.Hr(className="my-2"),
html.P(
"@cjoshi7",
),
html.P(buttons),
]
)
app.layout = html.Div([
html.Div(jumbotron),
html.Div(date_selector),
html.Div(radiobuttons),
html.Div(
[dcc.Graph(id="choropleth")]
)
# Include multiple types of graphs for each date.
])
@app.callback(
Output("choropleth", "figure"),
Input("dateselector", "date"),
Input("datatype", "value"))
def display_choropleth(date, datatype):
color = "Viridis"
label = "Infection Rate"
if datatype == "deaths":
color = "hot"
label = "Death Rate"
with urlopen("https://raw.githubusercontent.com/plotly/datasets/master/geojson-counties-fips.json") as response:
counties = json.load(response)
raw_df = pd.read_csv("us-counties.csv",
dtype={"fips": str})
filtered_df = raw_df[raw_df.date == date]
fig = px.choropleth(filtered_df, geojson=counties, locations='fips', color=datatype,
color_continuous_scale=color,
range_color=(0, filtered_df[datatype].mean() * 1.5),
scope="usa",
labels={datatype: label}
)
fig.update_layout(margin={"r":0,"t":0,"l":0,"b":0})
return fig
if __name__ == '__main__':
app.run_server(debug=True)
|
[
"json.load",
"dash.Dash",
"dash_core_components.DatePickerSingle",
"pandas.read_csv",
"dash_html_components.Div",
"urllib.request.urlopen",
"dash_core_components.RadioItems",
"dash_bootstrap_components.Button",
"dash.dependencies.Input",
"dash_html_components.P",
"dash_core_components.Graph",
"dash_html_components.H1",
"dash.dependencies.Output",
"dash_html_components.Hr"
] |
[((283, 337), 'dash.Dash', 'dash.Dash', ([], {'external_stylesheets': '[dbc.themes.BOOTSTRAP]'}), '(external_stylesheets=[dbc.themes.BOOTSTRAP])\n', (292, 337), False, 'import dash\n'), ((763, 935), 'dash_core_components.RadioItems', 'dcc.RadioItems', ([], {'id': '"""datatype"""', 'options': "[{'label': 'Infection Rate', 'value': 'cases'}, {'label': 'Death Rate',\n 'value': 'deaths'}]", 'value': '"""cases"""', 'className': '"""radio"""'}), "(id='datatype', options=[{'label': 'Infection Rate', 'value':\n 'cases'}, {'label': 'Death Rate', 'value': 'deaths'}], value='cases',\n className='radio')\n", (777, 935), True, 'import dash_core_components as dcc\n'), ((1000, 1158), 'dash_core_components.DatePickerSingle', 'dcc.DatePickerSingle', ([], {'id': '"""dateselector"""', 'min_date_allowed': '"""2020-1-21"""', 'max_date_allowed': '"""2021-2-5"""', 'initial_visible_month': '"""2021-2-5"""', 'date': '"""2021-2-5"""'}), "(id='dateselector', min_date_allowed='2020-1-21',\n max_date_allowed='2021-2-5', initial_visible_month='2021-2-5', date=\n '2021-2-5')\n", (1020, 1158), True, 'import dash_core_components as dcc\n'), ((2247, 2298), 'pandas.read_csv', 'pd.read_csv', (['"""us-counties.csv"""'], {'dtype': "{'fips': str}"}), "('us-counties.csv', dtype={'fips': str})\n", (2258, 2298), True, 'import pandas as pd\n'), ((1790, 1820), 'dash.dependencies.Output', 'Output', (['"""choropleth"""', '"""figure"""'], {}), "('choropleth', 'figure')\n", (1796, 1820), False, 'from dash.dependencies import Input, Output\n'), ((1828, 1857), 'dash.dependencies.Input', 'Input', (['"""dateselector"""', '"""date"""'], {}), "('dateselector', 'date')\n", (1833, 1857), False, 'from dash.dependencies import Input, Output\n'), ((1864, 1890), 'dash.dependencies.Input', 'Input', (['"""datatype"""', '"""value"""'], {}), "('datatype', 'value')\n", (1869, 1890), False, 'from dash.dependencies import Input, Output\n'), ((1224, 1280), 'dash_html_components.H1', 'html.H1', (['"""COVID-19 Date Selector"""'], {'className': '"""display-3"""'}), "('COVID-19 Date Selector', className='display-3')\n", (1231, 1280), True, 'import dash_html_components as html\n'), ((1291, 1370), 'dash_html_components.P', 'html.P', (['"""Visualize the spread of the virus on a specific day"""'], {'className': '"""lead"""'}), "('Visualize the spread of the virus on a specific day', className='lead')\n", (1297, 1370), True, 'import dash_html_components as html\n'), ((1419, 1444), 'dash_html_components.Hr', 'html.Hr', ([], {'className': '"""my-2"""'}), "(className='my-2')\n", (1426, 1444), True, 'import dash_html_components as html\n'), ((1455, 1473), 'dash_html_components.P', 'html.P', (['"""@cjoshi7"""'], {}), "('@cjoshi7')\n", (1461, 1473), True, 'import dash_html_components as html\n'), ((1509, 1524), 'dash_html_components.P', 'html.P', (['buttons'], {}), '(buttons)\n', (1515, 1524), True, 'import dash_html_components as html\n'), ((1568, 1587), 'dash_html_components.Div', 'html.Div', (['jumbotron'], {}), '(jumbotron)\n', (1576, 1587), True, 'import dash_html_components as html\n'), ((1594, 1617), 'dash_html_components.Div', 'html.Div', (['date_selector'], {}), '(date_selector)\n', (1602, 1617), True, 'import dash_html_components as html\n'), ((1624, 1646), 'dash_html_components.Div', 'html.Div', (['radiobuttons'], {}), '(radiobuttons)\n', (1632, 1646), True, 'import dash_html_components as html\n'), ((2083, 2187), 'urllib.request.urlopen', 'urlopen', (['"""https://raw.githubusercontent.com/plotly/datasets/master/geojson-counties-fips.json"""'], {}), "(\n 'https://raw.githubusercontent.com/plotly/datasets/master/geojson-counties-fips.json'\n )\n", (2090, 2187), False, 'from urllib.request import urlopen\n'), ((2211, 2230), 'json.load', 'json.load', (['response'], {}), '(response)\n', (2220, 2230), False, 'import json\n'), ((481, 540), 'dash_bootstrap_components.Button', 'dbc.Button', (['"""Documentation"""'], {'color': '"""primary"""', 'type': '"""submit"""'}), "('Documentation', color='primary', type='submit')\n", (491, 540), True, 'import dash_bootstrap_components as dbc\n'), ((671, 733), 'dash_bootstrap_components.Button', 'dbc.Button', (['"""Download Dataset"""'], {'color': '"""primary"""', 'type': '"""submit"""'}), "('Download Dataset', color='primary', type='submit')\n", (681, 733), True, 'import dash_bootstrap_components as dbc\n'), ((1673, 1699), 'dash_core_components.Graph', 'dcc.Graph', ([], {'id': '"""choropleth"""'}), "(id='choropleth')\n", (1682, 1699), True, 'import dash_core_components as dcc\n')]
|
from __future__ import annotations
import threading
import numpy as np
from astropy.coordinates import SkyCoord
import astropy.units as u
from typing import Tuple, List
import random
from pyobs.object import Object
from pyobs.utils.enums import MotionStatus
class SimTelescope(Object):
"""A simulated telescope on an equitorial mount."""
__module__ = 'pyobs.utils.simulation'
def __init__(self, world: 'SimWorld', position: Tuple[float, float] = None, offsets: Tuple[float, float] = None,
pointing_offset: Tuple[float, float] = None, move_accuracy: float = 2.,
speed: float = 20., focus: float = 50, filters: List[str] = None, filter: str = 'clear',
drift: Tuple[float, float] = None, focal_length: float = 5000., *args, **kwargs):
"""Initializes new telescope.
Args:
world: World object.
position: RA/Dec tuple with position of telescope in degrees.
offsets: RA/Dec offsets of telescope in arcsecs.
pointing_offset: Pointing offset in RA/Dec in arcsecs.
move_accuracy: Accuracy of movements in RA/Dec, i.e. random error after any movement [arcsec].
speed: Speed of telescope in deg/sec.
focus: Telescope focus.
filters: List of filters.
filter: Current filter.
drift: RA/Dec drift of telescope in arcsec/sec.
focal_length: Focal length of telescope in mm.
"""
Object.__init__(self, *args, **kwargs)
# store
self.world = world
self.status = MotionStatus.IDLE
self.status_callback = None
# init
self._position = SkyCoord(0. * u.deg, 0. * u.deg, frame='icrs') if position is None else \
SkyCoord(position[0] * u.deg, position[1] * u.deg, frame='icrs')
self._offsets = (0., 0.) if offsets is None else offsets
self.pointing_offset = (20., 2.) if pointing_offset is None else pointing_offset
self.move_accuracy = (1, 1) if move_accuracy is None else move_accuracy
self.speed = speed # telescope speed in deg/sec
self.focus = focus
self.filters = ['clear', 'B', 'V', 'R'] if filters is None else filters
self.filter = filter
self.drift = (0.01, 0.0001) if drift is None else drift # arcsec/sec in RA/Dec
self.focal_length = focal_length
# private stuff
self._drift = (0., 0.)
self._dest_coords = None
# locks
self._pos_lock = threading.RLock()
# threads
self.add_thread_func(self._move_thread)
@property
def position(self):
return self._position
@property
def offsets(self):
return self._offsets
def _change_motion_status(self, status: MotionStatus):
"""Change the current motion status.
Args:
status: New motion status
"""
# call callback
if self.status_callback is not None and status != self.status:
self.status_callback(status)
# set it
self.status = status
@property
def real_pos(self):
# calculate offsets
dra = (self._offsets[0] * u.deg + self._drift[0] * u.arcsec) / np.cos(np.radians(self._position.dec.degree))
ddec = self._offsets[1] * u.deg + self._drift[1] * u.arcsec
# return position
with self._pos_lock:
return SkyCoord(ra=self._position.ra + dra,
dec=self._position.dec + ddec,
frame='icrs')
def move_ra_dec(self, coords):
"""Move telescope to given RA/Dec position.
Args:
coords: Destination coordinates.
"""
# change status
self._change_motion_status(MotionStatus.SLEWING)
# calculate random RA/Dec offsets
acc = self.move_accuracy / 3600.
ra = random.gauss(coords.ra.degree, acc / np.cos(np.radians(coords.dec.degree))) * u.deg
dec = random.gauss(coords.dec.degree, acc) * u.deg
# set coordinates
self._dest_coords = SkyCoord(ra=ra, dec=dec, frame='icrs')
def set_offsets(self, dra, ddec):
"""Move RA/Dec offsets.
Args:
dra: RA offset [deg]
ddec: Dec offset [deg]
"""
# calculate random RA/Dec offsets
acc = self.move_accuracy / 3600.
ra, dec = random.gauss(dra, acc), random.gauss(ddec, acc)
# set offsets
self._offsets = (ra, dec)
def _move_thread(self):
"""Move the telescope over time."""
# run until closed
while not self.closing.is_set():
# do we have destination coordinates?
if self._dest_coords is not None:
# calculate moving vector
vra = (self._dest_coords.ra.degree - self._position.ra.degree) * \
np.cos(np.radians(self._position.dec.degree))
vdec = self._dest_coords.dec.degree - self._position.dec.degree
# get direction
length = np.sqrt(vra**2 + vdec**2)
# do we reach target?
if length < self.speed:
# set it
with self._pos_lock:
# set position and reset destination
self._change_motion_status(MotionStatus.TRACKING)
self._position = self._dest_coords
self._dest_coords = None
# set some random drift around the pointing error
self._drift = (random.gauss(self.pointing_offset[0], self.pointing_offset[0] / 10.),
random.gauss(self.pointing_offset[1], self.pointing_offset[1] / 10.))
else:
# norm vector and get movement
dra = vra / length * self.speed / np.cos(np.radians(self._position.dec.degree)) * u.deg
ddec = vdec / length * self.speed * u.deg
# apply it
with self._pos_lock:
self._change_motion_status(MotionStatus.SLEWING)
self._position = SkyCoord(ra=self._position.ra + dra,
dec=self._position.dec + ddec,
frame='icrs')
else:
# no movement, just drift
# calculate constant drift
drift_ra = random.gauss(self.drift[0], self.drift[0] / 10.)
drift_dec = random.gauss(self.drift[1], self.drift[1] / 10.)
# and apply it
with self._pos_lock:
self._drift = (self._drift[0] + drift_ra, self._drift[1] + drift_dec)
# sleep a second
self.closing.wait(1)
__all__ = ['SimTelescope']
|
[
"numpy.radians",
"threading.RLock",
"random.gauss",
"astropy.coordinates.SkyCoord",
"pyobs.object.Object.__init__",
"numpy.sqrt"
] |
[((1494, 1532), 'pyobs.object.Object.__init__', 'Object.__init__', (['self', '*args'], {}), '(self, *args, **kwargs)\n', (1509, 1532), False, 'from pyobs.object import Object\n'), ((2538, 2555), 'threading.RLock', 'threading.RLock', ([], {}), '()\n', (2553, 2555), False, 'import threading\n'), ((4115, 4153), 'astropy.coordinates.SkyCoord', 'SkyCoord', ([], {'ra': 'ra', 'dec': 'dec', 'frame': '"""icrs"""'}), "(ra=ra, dec=dec, frame='icrs')\n", (4123, 4153), False, 'from astropy.coordinates import SkyCoord\n'), ((1694, 1742), 'astropy.coordinates.SkyCoord', 'SkyCoord', (['(0.0 * u.deg)', '(0.0 * u.deg)'], {'frame': '"""icrs"""'}), "(0.0 * u.deg, 0.0 * u.deg, frame='icrs')\n", (1702, 1742), False, 'from astropy.coordinates import SkyCoord\n'), ((1780, 1844), 'astropy.coordinates.SkyCoord', 'SkyCoord', (['(position[0] * u.deg)', '(position[1] * u.deg)'], {'frame': '"""icrs"""'}), "(position[0] * u.deg, position[1] * u.deg, frame='icrs')\n", (1788, 1844), False, 'from astropy.coordinates import SkyCoord\n'), ((3440, 3526), 'astropy.coordinates.SkyCoord', 'SkyCoord', ([], {'ra': '(self._position.ra + dra)', 'dec': '(self._position.dec + ddec)', 'frame': '"""icrs"""'}), "(ra=self._position.ra + dra, dec=self._position.dec + ddec, frame=\n 'icrs')\n", (3448, 3526), False, 'from astropy.coordinates import SkyCoord\n'), ((4015, 4051), 'random.gauss', 'random.gauss', (['coords.dec.degree', 'acc'], {}), '(coords.dec.degree, acc)\n', (4027, 4051), False, 'import random\n'), ((4422, 4444), 'random.gauss', 'random.gauss', (['dra', 'acc'], {}), '(dra, acc)\n', (4434, 4444), False, 'import random\n'), ((4446, 4469), 'random.gauss', 'random.gauss', (['ddec', 'acc'], {}), '(ddec, acc)\n', (4458, 4469), False, 'import random\n'), ((3258, 3295), 'numpy.radians', 'np.radians', (['self._position.dec.degree'], {}), '(self._position.dec.degree)\n', (3268, 3295), True, 'import numpy as np\n'), ((5097, 5126), 'numpy.sqrt', 'np.sqrt', (['(vra ** 2 + vdec ** 2)'], {}), '(vra ** 2 + vdec ** 2)\n', (5104, 5126), True, 'import numpy as np\n'), ((6552, 6601), 'random.gauss', 'random.gauss', (['self.drift[0]', '(self.drift[0] / 10.0)'], {}), '(self.drift[0], self.drift[0] / 10.0)\n', (6564, 6601), False, 'import random\n'), ((6629, 6678), 'random.gauss', 'random.gauss', (['self.drift[1]', '(self.drift[1] / 10.0)'], {}), '(self.drift[1], self.drift[1] / 10.0)\n', (6641, 6678), False, 'import random\n'), ((3961, 3990), 'numpy.radians', 'np.radians', (['coords.dec.degree'], {}), '(coords.dec.degree)\n', (3971, 3990), True, 'import numpy as np\n'), ((4920, 4957), 'numpy.radians', 'np.radians', (['self._position.dec.degree'], {}), '(self._position.dec.degree)\n', (4930, 4957), True, 'import numpy as np\n'), ((6239, 6325), 'astropy.coordinates.SkyCoord', 'SkyCoord', ([], {'ra': '(self._position.ra + dra)', 'dec': '(self._position.dec + ddec)', 'frame': '"""icrs"""'}), "(ra=self._position.ra + dra, dec=self._position.dec + ddec, frame=\n 'icrs')\n", (6247, 6325), False, 'from astropy.coordinates import SkyCoord\n'), ((5629, 5698), 'random.gauss', 'random.gauss', (['self.pointing_offset[0]', '(self.pointing_offset[0] / 10.0)'], {}), '(self.pointing_offset[0], self.pointing_offset[0] / 10.0)\n', (5641, 5698), False, 'import random\n'), ((5738, 5807), 'random.gauss', 'random.gauss', (['self.pointing_offset[1]', '(self.pointing_offset[1] / 10.0)'], {}), '(self.pointing_offset[1], self.pointing_offset[1] / 10.0)\n', (5750, 5807), False, 'import random\n'), ((5943, 5980), 'numpy.radians', 'np.radians', (['self._position.dec.degree'], {}), '(self._position.dec.degree)\n', (5953, 5980), True, 'import numpy as np\n')]
|
# -*- coding: utf-8 -*-
import random
import scrapy
from scrapy import Request
from ip_proxies.spiders.base import BaseSpider
from ip_proxies.items import IpProxiesItem
from ip_proxies.settings import TEST_URLS, LOG_FILE
class JiangxianliSpider(BaseSpider):
name = 'jiangxianli'
# allowed_domains = ['jiangxianli.com']
start_urls = ['http://ip.jiangxianli.com/?page=1']
custom_settings = {
'LOG_FILE': LOG_FILE.replace('log/', f'log/{name}__', 1),
}
def parse(self, response):
# print(response.url)
# 每一页的代理 ip 信息列表
ip_info_li = response.xpath('//table[@class="table table-hover table-bordered table-striped"]/tbody//tr')
# IP PORT 匿名度 类型 的列表
title_li = ['ip', 'port', 'anonymity', 'net_type']
for i in ip_info_li:
item = IpProxiesItem()
td_li = i.xpath('./td/text()').getall()
# 该网站只有 "高匿" "透明" 两种分类
# 我们不需要 "透明" , 去除
if td_li[3] in '透明':
continue
for k, v in zip(title_li, td_li[1:5]):
item[k] = v
# td_li[5] -> 中国 广东 汕尾
# td_li[6] -> 联通
# 二者合在一起才是所需数据
item['ip_location'] = ' '.join(td_li[5:7]) if len(td_li) == 12 else ''
if not item['net_type']:
item['net_type'] = 'HTTP'
# 从上述数据组合代理为如下格式 :http://some_proxy_server:port
proxy = item['net_type'].lower() + '://' + item['ip'] + ':' + item['port']
# 需加 dont_filter=True 因为测试代理是否可用每次都是访问固定的几个页面来测试
request = Request(url=random.choice(TEST_URLS), headers=self.headers,
meta={'proxy': proxy, 'item': item},
callback=self.verify_porxy, dont_filter=True)
yield request
# 该网站页数不固定,可能为为 2~3 页或更多页
# 此处通过是否仍能点击下一页判断
if_next = response.xpath('//ul[@class="pagination"]/li')
if if_next:
if_next = if_next[-1]
if not if_next.xpath('./@class'):
next_url = if_next.xpath('./a/@href').get()
next_request = Request(url=next_url, callback=self.parse, dont_filter=True)
yield next_request
|
[
"random.choice",
"ip_proxies.settings.LOG_FILE.replace",
"ip_proxies.items.IpProxiesItem",
"scrapy.Request"
] |
[((429, 473), 'ip_proxies.settings.LOG_FILE.replace', 'LOG_FILE.replace', (['"""log/"""', 'f"""log/{name}__"""', '(1)'], {}), "('log/', f'log/{name}__', 1)\n", (445, 473), False, 'from ip_proxies.settings import TEST_URLS, LOG_FILE\n'), ((819, 834), 'ip_proxies.items.IpProxiesItem', 'IpProxiesItem', ([], {}), '()\n', (832, 834), False, 'from ip_proxies.items import IpProxiesItem\n'), ((2118, 2178), 'scrapy.Request', 'Request', ([], {'url': 'next_url', 'callback': 'self.parse', 'dont_filter': '(True)'}), '(url=next_url, callback=self.parse, dont_filter=True)\n', (2125, 2178), False, 'from scrapy import Request\n'), ((1584, 1608), 'random.choice', 'random.choice', (['TEST_URLS'], {}), '(TEST_URLS)\n', (1597, 1608), False, 'import random\n')]
|
# coding: utf-8
"""
Main commands available for flatisfy.
"""
from __future__ import absolute_import, print_function, unicode_literals
import collections
import logging
import os
import flatisfy.filters
from flatisfy import database
from flatisfy import email
from flatisfy.models import flat as flat_model
from flatisfy.models import postal_code as postal_code_model
from flatisfy.models import public_transport as public_transport_model
from flatisfy import fetch
from flatisfy import tools
from flatisfy.filters import metadata
from flatisfy.web import app as web_app
LOGGER = logging.getLogger(__name__)
def filter_flats_list(config, constraint_name, flats_list, fetch_details=True):
"""
Filter the available flats list. Then, filter it according to criteria.
:param config: A config dict.
:param constraint_name: The constraint name that the ``flats_list`` should
satisfy.
:param fetch_details: Whether additional details should be fetched between
the two passes.
:param flats_list: The initial list of flat objects to filter.
:return: A dict mapping flat status and list of flat objects.
"""
# Add the flatisfy metadata entry and prepare the flat objects
flats_list = metadata.init(flats_list, constraint_name)
# Get the associated constraint from config
try:
constraint = config["constraints"][constraint_name]
except KeyError:
LOGGER.error(
"Missing constraint %s. Skipping filtering for these posts.",
constraint_name
)
return {
"new": [],
"duplicate": [],
"ignored": []
}
first_pass_result = collections.defaultdict(list)
second_pass_result = collections.defaultdict(list)
third_pass_result = collections.defaultdict(list)
# Do a first pass with the available infos to try to remove as much
# unwanted postings as possible
if config["passes"] > 0:
first_pass_result = flatisfy.filters.first_pass(flats_list,
constraint,
config)
else:
first_pass_result["new"] = flats_list
# Load additional infos
if fetch_details:
for i, flat in enumerate(first_pass_result["new"]):
details = fetch.fetch_details(config, flat["id"])
first_pass_result["new"][i] = tools.merge_dicts(flat, details)
# Do a second pass to consolidate all the infos we found and make use of
# additional infos
if config["passes"] > 1:
second_pass_result = flatisfy.filters.second_pass(
first_pass_result["new"], constraint, config
)
else:
second_pass_result["new"] = first_pass_result["new"]
# Do a third pass to deduplicate better
if config["passes"] > 2:
third_pass_result = flatisfy.filters.third_pass(
second_pass_result["new"],
config
)
else:
third_pass_result["new"] = second_pass_result["new"]
return {
"new": third_pass_result["new"],
"duplicate": (
first_pass_result["duplicate"] +
second_pass_result["duplicate"] +
third_pass_result["duplicate"]
),
"ignored": (
first_pass_result["ignored"] +
second_pass_result["ignored"] +
third_pass_result["ignored"]
)
}
def filter_fetched_flats(config, fetched_flats, fetch_details=True):
"""
Filter the available flats list. Then, filter it according to criteria.
:param config: A config dict.
:param fetch_details: Whether additional details should be fetched between
the two passes.
:param fetched_flats: The initial dict mapping constraints to the list of
fetched flat objects to filter.
:return: A dict mapping constraints to a dict mapping flat status and list
of flat objects.
"""
for constraint_name, flats_list in fetched_flats.items():
fetched_flats[constraint_name] = filter_flats_list(
config,
constraint_name,
flats_list,
fetch_details
)
return fetched_flats
def import_and_filter(config, load_from_db=False):
"""
Fetch the available flats list. Then, filter it according to criteria.
Finally, store it in the database.
:param config: A config dict.
:param load_from_db: Whether to load flats from database or fetch them
using WebOOB.
:return: ``None``.
"""
# Fetch and filter flats list
if load_from_db:
fetched_flats = fetch.load_flats_from_db(config)
else:
fetched_flats = fetch.fetch_flats(config)
# Do not fetch additional details if we loaded data from the db.
flats_by_status = filter_fetched_flats(config, fetched_flats=fetched_flats,
fetch_details=(not load_from_db))
# Create database connection
get_session = database.init_db(config["database"], config["search_index"])
new_flats = []
LOGGER.info("Merging fetched flats in database...")
# Flatten the flats_by_status dict
flatten_flats_by_status = collections.defaultdict(list)
for flats in flats_by_status.values():
for status, flats_list in flats.items():
flatten_flats_by_status[status].extend(flats_list)
with get_session() as session:
# Set is_expired to true for all existing flats.
# This will be set back to false if we find them during importing.
for flat in session.query(flat_model.Flat).all():
flat.is_expired = True;
for status, flats_list in flatten_flats_by_status.items():
# Build SQLAlchemy Flat model objects for every available flat
flats_objects = {
flat_dict["id"]: flat_model.Flat.from_dict(flat_dict)
for flat_dict in flats_list
}
if flats_objects:
# If there are some flats, try to merge them with the ones in
# db
existing_flats_queries = session.query(flat_model.Flat).filter(
flat_model.Flat.id.in_(flats_objects.keys())
)
for each in existing_flats_queries.all():
# For each flat to merge, take care not to overwrite the
# status if the user defined it
flat_object = flats_objects[each.id]
if each.status in flat_model.AUTOMATED_STATUSES:
flat_object.status = getattr(
flat_model.FlatStatus, status
)
else:
flat_object.status = each.status
# Every flat we fetched isn't expired
flat_object.is_expired = False
# For each flat already in the db, merge it (UPDATE)
# instead of adding it
session.merge(flats_objects.pop(each.id))
# For any other flat, it is not already in the database, so we can
# just set the status field without worrying
for flat in flats_objects.values():
flat.status = getattr(flat_model.FlatStatus, status)
if flat.status == flat_model.FlatStatus.new:
new_flats.append(flat)
session.add_all(flats_objects.values())
if config["send_email"]:
email.send_notification(config, new_flats)
# Touch a file to indicate last update timestamp
ts_file = os.path.join(
config["data_directory"],
"timestamp"
)
with open(ts_file, 'w'):
os.utime(ts_file, None)
LOGGER.info("Done!")
def purge_db(config):
"""
Purge the database.
:param config: A config dict.
:return: ``None``
"""
get_session = database.init_db(config["database"], config["search_index"])
with get_session() as session:
# Delete every flat in the db
LOGGER.info("Purge all flats from the database.")
for flat in session.query(flat_model.Flat).all():
# Use (slower) deletion by object, to ensure whoosh index is
# updated
session.delete(flat)
LOGGER.info("Purge all postal codes from the database.")
session.query(postal_code_model.PostalCode).delete()
LOGGER.info("Purge all public transportations from the database.")
session.query(public_transport_model.PublicTransport).delete()
def serve(config):
"""
Serve the web app.
:param config: A config dict.
:return: ``None``, long-running process.
"""
app = web_app.get_app(config)
server = config.get("webserver", None)
if not server:
# Default webserver is quiet, as Bottle is used with Canister for
# standard logging
server = web_app.QuietWSGIRefServer
app.run(host=config["host"], port=config["port"], server=server)
|
[
"flatisfy.web.app.get_app",
"flatisfy.fetch.load_flats_from_db",
"flatisfy.filters.metadata.init",
"flatisfy.fetch.fetch_details",
"flatisfy.email.send_notification",
"collections.defaultdict",
"flatisfy.fetch.fetch_flats",
"os.utime",
"flatisfy.database.init_db",
"os.path.join",
"flatisfy.models.flat.Flat.from_dict",
"logging.getLogger",
"flatisfy.tools.merge_dicts"
] |
[((584, 611), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (601, 611), False, 'import logging\n'), ((1237, 1279), 'flatisfy.filters.metadata.init', 'metadata.init', (['flats_list', 'constraint_name'], {}), '(flats_list, constraint_name)\n', (1250, 1279), False, 'from flatisfy.filters import metadata\n'), ((1683, 1712), 'collections.defaultdict', 'collections.defaultdict', (['list'], {}), '(list)\n', (1706, 1712), False, 'import collections\n'), ((1738, 1767), 'collections.defaultdict', 'collections.defaultdict', (['list'], {}), '(list)\n', (1761, 1767), False, 'import collections\n'), ((1792, 1821), 'collections.defaultdict', 'collections.defaultdict', (['list'], {}), '(list)\n', (1815, 1821), False, 'import collections\n'), ((5014, 5074), 'flatisfy.database.init_db', 'database.init_db', (["config['database']", "config['search_index']"], {}), "(config['database'], config['search_index'])\n", (5030, 5074), False, 'from flatisfy import database\n'), ((5221, 5250), 'collections.defaultdict', 'collections.defaultdict', (['list'], {}), '(list)\n', (5244, 5250), False, 'import collections\n'), ((7653, 7704), 'os.path.join', 'os.path.join', (["config['data_directory']", '"""timestamp"""'], {}), "(config['data_directory'], 'timestamp')\n", (7665, 7704), False, 'import os\n'), ((7953, 8013), 'flatisfy.database.init_db', 'database.init_db', (["config['database']", "config['search_index']"], {}), "(config['database'], config['search_index'])\n", (7969, 8013), False, 'from flatisfy import database\n'), ((8754, 8777), 'flatisfy.web.app.get_app', 'web_app.get_app', (['config'], {}), '(config)\n', (8769, 8777), True, 'from flatisfy.web import app as web_app\n'), ((4644, 4676), 'flatisfy.fetch.load_flats_from_db', 'fetch.load_flats_from_db', (['config'], {}), '(config)\n', (4668, 4676), False, 'from flatisfy import fetch\n'), ((4711, 4736), 'flatisfy.fetch.fetch_flats', 'fetch.fetch_flats', (['config'], {}), '(config)\n', (4728, 4736), False, 'from flatisfy import fetch\n'), ((7764, 7787), 'os.utime', 'os.utime', (['ts_file', 'None'], {}), '(ts_file, None)\n', (7772, 7787), False, 'import os\n'), ((2348, 2387), 'flatisfy.fetch.fetch_details', 'fetch.fetch_details', (['config', "flat['id']"], {}), "(config, flat['id'])\n", (2367, 2387), False, 'from flatisfy import fetch\n'), ((2430, 2462), 'flatisfy.tools.merge_dicts', 'tools.merge_dicts', (['flat', 'details'], {}), '(flat, details)\n', (2447, 2462), False, 'from flatisfy import tools\n'), ((7542, 7584), 'flatisfy.email.send_notification', 'email.send_notification', (['config', 'new_flats'], {}), '(config, new_flats)\n', (7565, 7584), False, 'from flatisfy import email\n'), ((5874, 5910), 'flatisfy.models.flat.Flat.from_dict', 'flat_model.Flat.from_dict', (['flat_dict'], {}), '(flat_dict)\n', (5899, 5910), True, 'from flatisfy.models import flat as flat_model\n')]
|
import uvicorn
from fastapi import FastAPI, Depends
from sqlalchemy.orm import declarative_base, sessionmaker
from fastapi_quickcrud import CrudMethods
from fastapi_quickcrud import crud_router_builder
from fastapi_quickcrud import sqlalchemy_to_pydantic
from fastapi_quickcrud.misc.memory_sql import sync_memory_db
app = FastAPI()
Base = declarative_base()
metadata = Base.metadata
from sqlalchemy import CHAR, Column, ForeignKey, Integer, Table
from sqlalchemy.orm import relationship
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
metadata = Base.metadata
association_table = Table('association', Base.metadata,
Column('left_id', ForeignKey('left.id')),
Column('right_id', ForeignKey('right.id'))
)
class Parent(Base):
__tablename__ = 'left'
id = Column(Integer, primary_key=True)
children = relationship("Child",
secondary=association_table)
class Child(Base):
__tablename__ = 'right'
id = Column(Integer, primary_key=True)
name = Column(CHAR, nullable=True)
user_model_m2m = sqlalchemy_to_pydantic(db_model=association_table,
crud_methods=[
CrudMethods.FIND_MANY,
CrudMethods.UPSERT_ONE,
CrudMethods.UPDATE_MANY,
CrudMethods.DELETE_MANY,
CrudMethods.PATCH_MANY,
],
exclude_columns=[])
user_model_set = sqlalchemy_to_pydantic(db_model=Parent,
crud_methods=[
CrudMethods.FIND_MANY,
CrudMethods.FIND_ONE,
CrudMethods.CREATE_ONE,
CrudMethods.UPDATE_MANY,
CrudMethods.UPDATE_ONE,
CrudMethods.DELETE_ONE,
CrudMethods.DELETE_MANY,
CrudMethods.PATCH_MANY,
],
exclude_columns=[])
friend_model_set = sqlalchemy_to_pydantic(db_model=Child,
crud_methods=[
CrudMethods.FIND_MANY,
CrudMethods.UPSERT_MANY,
CrudMethods.UPDATE_MANY,
CrudMethods.DELETE_MANY,
CrudMethods.CREATE_ONE,
CrudMethods.PATCH_MANY,
],
exclude_columns=[])
crud_route_1 = crud_router_builder(crud_models=user_model_set,
db_model=Parent,
prefix="/Parent",
dependencies=[],
async_mode=True,
tags=["Parent"]
)
crud_route_3 = crud_router_builder(crud_models=user_model_m2m,
db_model=association_table,
prefix="/Parent2child",
dependencies=[],
async_mode=True,
tags=["m2m"]
)
crud_route_2 = crud_router_builder(crud_models=friend_model_set,
db_model=Child,
async_mode=True,
prefix="/Child",
dependencies=[],
tags=["Child"]
)
post_model = friend_model_set.POST[CrudMethods.CREATE_ONE]
sync_memory_db.create_memory_table(Child)
@app.post("/hello",
status_code=201,
tags=["Child"],
response_model=post_model.responseModel,
dependencies=[])
async def my_api(
body: post_model.requestBodyModel = Depends(post_model.requestBodyModel),
session=Depends(sync_memory_db.get_memory_db_session)
):
db_item = Child(**body.__dict__)
session.add(db_item)
session.commit()
session.refresh(db_item)
return db_item.__dict__
app.include_router(crud_route_1)
app.include_router(crud_route_2)
app.include_router(crud_route_3)
uvicorn.run(app, host="0.0.0.0", port=8000, debug=False)
|
[
"fastapi_quickcrud.sqlalchemy_to_pydantic",
"fastapi_quickcrud.crud_router_builder",
"sqlalchemy.ForeignKey",
"sqlalchemy.ext.declarative.declarative_base",
"sqlalchemy.orm.relationship",
"uvicorn.run",
"fastapi.FastAPI",
"sqlalchemy.Column",
"fastapi.Depends",
"fastapi_quickcrud.misc.memory_sql.sync_memory_db.create_memory_table"
] |
[((324, 333), 'fastapi.FastAPI', 'FastAPI', ([], {}), '()\n', (331, 333), False, 'from fastapi import FastAPI, Depends\n'), ((342, 360), 'sqlalchemy.ext.declarative.declarative_base', 'declarative_base', ([], {}), '()\n', (358, 360), False, 'from sqlalchemy.ext.declarative import declarative_base\n'), ((555, 573), 'sqlalchemy.ext.declarative.declarative_base', 'declarative_base', ([], {}), '()\n', (571, 573), False, 'from sqlalchemy.ext.declarative import declarative_base\n'), ((1156, 1371), 'fastapi_quickcrud.sqlalchemy_to_pydantic', 'sqlalchemy_to_pydantic', ([], {'db_model': 'association_table', 'crud_methods': '[CrudMethods.FIND_MANY, CrudMethods.UPSERT_ONE, CrudMethods.UPDATE_MANY,\n CrudMethods.DELETE_MANY, CrudMethods.PATCH_MANY]', 'exclude_columns': '[]'}), '(db_model=association_table, crud_methods=[\n CrudMethods.FIND_MANY, CrudMethods.UPSERT_ONE, CrudMethods.UPDATE_MANY,\n CrudMethods.DELETE_MANY, CrudMethods.PATCH_MANY], exclude_columns=[])\n', (1178, 1371), False, 'from fastapi_quickcrud import sqlalchemy_to_pydantic\n'), ((1725, 2002), 'fastapi_quickcrud.sqlalchemy_to_pydantic', 'sqlalchemy_to_pydantic', ([], {'db_model': 'Parent', 'crud_methods': '[CrudMethods.FIND_MANY, CrudMethods.FIND_ONE, CrudMethods.CREATE_ONE,\n CrudMethods.UPDATE_MANY, CrudMethods.UPDATE_ONE, CrudMethods.DELETE_ONE,\n CrudMethods.DELETE_MANY, CrudMethods.PATCH_MANY]', 'exclude_columns': '[]'}), '(db_model=Parent, crud_methods=[CrudMethods.FIND_MANY,\n CrudMethods.FIND_ONE, CrudMethods.CREATE_ONE, CrudMethods.UPDATE_MANY,\n CrudMethods.UPDATE_ONE, CrudMethods.DELETE_ONE, CrudMethods.DELETE_MANY,\n CrudMethods.PATCH_MANY], exclude_columns=[])\n', (1747, 2002), False, 'from fastapi_quickcrud import sqlalchemy_to_pydantic\n'), ((2487, 2719), 'fastapi_quickcrud.sqlalchemy_to_pydantic', 'sqlalchemy_to_pydantic', ([], {'db_model': 'Child', 'crud_methods': '[CrudMethods.FIND_MANY, CrudMethods.UPSERT_MANY, CrudMethods.UPDATE_MANY,\n CrudMethods.DELETE_MANY, CrudMethods.CREATE_ONE, CrudMethods.PATCH_MANY]', 'exclude_columns': '[]'}), '(db_model=Child, crud_methods=[CrudMethods.FIND_MANY,\n CrudMethods.UPSERT_MANY, CrudMethods.UPDATE_MANY, CrudMethods.\n DELETE_MANY, CrudMethods.CREATE_ONE, CrudMethods.PATCH_MANY],\n exclude_columns=[])\n', (2509, 2719), False, 'from fastapi_quickcrud import sqlalchemy_to_pydantic\n'), ((3129, 3267), 'fastapi_quickcrud.crud_router_builder', 'crud_router_builder', ([], {'crud_models': 'user_model_set', 'db_model': 'Parent', 'prefix': '"""/Parent"""', 'dependencies': '[]', 'async_mode': '(True)', 'tags': "['Parent']"}), "(crud_models=user_model_set, db_model=Parent, prefix=\n '/Parent', dependencies=[], async_mode=True, tags=['Parent'])\n", (3148, 3267), False, 'from fastapi_quickcrud import crud_router_builder\n'), ((3489, 3640), 'fastapi_quickcrud.crud_router_builder', 'crud_router_builder', ([], {'crud_models': 'user_model_m2m', 'db_model': 'association_table', 'prefix': '"""/Parent2child"""', 'dependencies': '[]', 'async_mode': '(True)', 'tags': "['m2m']"}), "(crud_models=user_model_m2m, db_model=association_table,\n prefix='/Parent2child', dependencies=[], async_mode=True, tags=['m2m'])\n", (3508, 3640), False, 'from fastapi_quickcrud import crud_router_builder\n'), ((3863, 3999), 'fastapi_quickcrud.crud_router_builder', 'crud_router_builder', ([], {'crud_models': 'friend_model_set', 'db_model': 'Child', 'async_mode': '(True)', 'prefix': '"""/Child"""', 'dependencies': '[]', 'tags': "['Child']"}), "(crud_models=friend_model_set, db_model=Child,\n async_mode=True, prefix='/Child', dependencies=[], tags=['Child'])\n", (3882, 3999), False, 'from fastapi_quickcrud import crud_router_builder\n'), ((4267, 4308), 'fastapi_quickcrud.misc.memory_sql.sync_memory_db.create_memory_table', 'sync_memory_db.create_memory_table', (['Child'], {}), '(Child)\n', (4301, 4308), False, 'from fastapi_quickcrud.misc.memory_sql import sync_memory_db\n'), ((4869, 4925), 'uvicorn.run', 'uvicorn.run', (['app'], {'host': '"""0.0.0.0"""', 'port': '(8000)', 'debug': '(False)'}), "(app, host='0.0.0.0', port=8000, debug=False)\n", (4880, 4925), False, 'import uvicorn\n'), ((878, 911), 'sqlalchemy.Column', 'Column', (['Integer'], {'primary_key': '(True)'}), '(Integer, primary_key=True)\n', (884, 911), False, 'from sqlalchemy import CHAR, Column, ForeignKey, Integer, Table\n'), ((927, 977), 'sqlalchemy.orm.relationship', 'relationship', (['"""Child"""'], {'secondary': 'association_table'}), "('Child', secondary=association_table)\n", (939, 977), False, 'from sqlalchemy.orm import relationship\n'), ((1064, 1097), 'sqlalchemy.Column', 'Column', (['Integer'], {'primary_key': '(True)'}), '(Integer, primary_key=True)\n', (1070, 1097), False, 'from sqlalchemy import CHAR, Column, ForeignKey, Integer, Table\n'), ((1109, 1136), 'sqlalchemy.Column', 'Column', (['CHAR'], {'nullable': '(True)'}), '(CHAR, nullable=True)\n', (1115, 1136), False, 'from sqlalchemy import CHAR, Column, ForeignKey, Integer, Table\n'), ((4525, 4561), 'fastapi.Depends', 'Depends', (['post_model.requestBodyModel'], {}), '(post_model.requestBodyModel)\n', (4532, 4561), False, 'from fastapi import FastAPI, Depends\n'), ((4579, 4624), 'fastapi.Depends', 'Depends', (['sync_memory_db.get_memory_db_session'], {}), '(sync_memory_db.get_memory_db_session)\n', (4586, 4624), False, 'from fastapi import FastAPI, Depends\n'), ((699, 720), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""left.id"""'], {}), "('left.id')\n", (709, 720), False, 'from sqlalchemy import CHAR, Column, ForeignKey, Integer, Table\n'), ((768, 790), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""right.id"""'], {}), "('right.id')\n", (778, 790), False, 'from sqlalchemy import CHAR, Column, ForeignKey, Integer, Table\n')]
|
##############################################################################
#
# Copyright (c) 2009 Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
from relstorage.tests import TestCase
from relstorage.tests import MockCursor
from relstorage._util import consume
class RowBatcherTests(TestCase):
def getClass(self):
from relstorage.adapters.batch import RowBatcher
return RowBatcher
def test_delete_defer(self):
cursor = MockCursor()
batcher = self.getClass()(cursor)
batcher.delete_from("mytable", id=2)
self.assertEqual(cursor.executed, [])
self.assertEqual(batcher.rows_added, 1)
self.assertEqual(batcher.size_added, 0)
self.assertEqual(batcher.total_rows_inserted, 0)
self.assertEqual(batcher.total_rows_deleted, 0)
self.assertEqual(batcher.total_size_inserted, 0)
self.assertEqual(dict(batcher.deletes),
{('mytable', ('id',)): set([(2,)])})
def test_delete_multiple_column(self):
cursor = MockCursor()
batcher = self.getClass()(cursor)
batcher.delete_from("mytable", id=2, tid=10)
self.assertEqual(cursor.executed, [])
self.assertEqual(batcher.rows_added, 1)
self.assertEqual(batcher.size_added, 0)
self.assertEqual(dict(batcher.deletes),
{('mytable', ('id', 'tid')): set([(2, 10)])})
IN_ROWS_FLATTENED = False
delete_auto_flush = 'DELETE FROM mytable WHERE id IN (%s,%s)'
update_set_static_stmt = 'UPDATE pack_object SET foo=1 WHERE zoid IN (%s,%s)'
def _in(self, *params, **kw):
do_sort = kw.pop("do_sort", True)
assert not kw
params = sorted(params) if do_sort else params
if self.IN_ROWS_FLATTENED:
l = list(params)
return (l,)
return tuple(params)
def test_delete_auto_flush(self):
cursor = MockCursor()
cursor.sort_sequence_params = True
batcher = self.getClass()(cursor, 2)
batcher.sorted_deletes = True
batcher.delete_from("mytable", id=2)
batcher.delete_from("mytable", id=1)
self.assertEqual(
cursor.executed,
[
(self.delete_auto_flush,
self._in(1, 2)
)
])
self.assertEqual(batcher.rows_added, 0)
self.assertEqual(batcher.size_added, 0)
self.assertEqual(batcher.deletes, {})
self.assertEqual(batcher.total_rows_inserted, 0)
self.assertEqual(batcher.total_rows_deleted, 2)
self.assertEqual(batcher.total_size_inserted, 0)
def test_update_set_static(self):
cursor = MockCursor()
batcher = self.getClass()(cursor, 2)
cnt = batcher.update_set_static(
'UPDATE pack_object SET foo=1',
zoid=iter((1, 2, 3, 4, 5, 6, 7))
)
self.assertEqual(cnt, 7)
self.assertEqual(
cursor.executed,
[
(self.update_set_static_stmt,
self._in(2, 1, do_sort=False)
),
(self.update_set_static_stmt,
self._in(4, 3, do_sort=False)
),
(self.update_set_static_stmt,
self._in(6, 5, do_sort=False)
),
(self.update_set_static_stmt.replace(',%s', ''),
self._in(7)
),
])
maxDiff = None
def test_insert_defer(self):
cursor = MockCursor()
batcher = self.getClass()(cursor)
batcher.insert_into(
"mytable (id, name)",
"%s, id || %s",
(1, 'a'),
rowkey=1,
size=3,
)
self.assertEqual(cursor.executed, [])
self.assertEqual(batcher.rows_added, 1)
self.assertEqual(batcher.size_added, 3)
self.assertEqual(batcher.inserts, {
('INSERT', 'mytable (id, name)', '%s, id || %s', ''): {1: (1, 'a')}
})
self.assertEqual(batcher.total_rows_inserted, 0)
self.assertEqual(batcher.total_rows_deleted, 0)
self.assertEqual(batcher.total_size_inserted, 0)
def test_insert_defer_multi_table(self):
cursor = MockCursor()
batcher = self.getClass()(cursor)
batcher.insert_into(
"mytable (id, name)",
"%s, id || %s",
(1, 'a'),
rowkey=1,
size=3,
)
batcher.insert_into(
"othertable (name)",
"?",
('a'),
rowkey=1,
size=1,
)
self.assertEqual(cursor.executed, [])
self.assertEqual(batcher.rows_added, 2)
self.assertEqual(batcher.size_added, 4)
self.assertEqual(dict(batcher.inserts), {
('INSERT', 'mytable (id, name)', '%s, id || %s', ''): {1: (1, 'a')},
('INSERT', 'othertable (name)', '?', ''): {1: ('a')},
})
self.assertEqual(batcher.total_rows_inserted, 0)
self.assertEqual(batcher.total_rows_deleted, 0)
self.assertEqual(batcher.total_size_inserted, 0)
def test_insert_replace(self):
cursor = MockCursor()
batcher = self.getClass()(cursor)
batcher.insert_into(
"mytable (id, name)",
"%s, id || %s",
(1, 'a'),
rowkey=1,
size=3,
command='REPLACE',
)
self.assertEqual(cursor.executed, [])
self.assertEqual(batcher.rows_added, 1)
self.assertEqual(batcher.size_added, 3)
self.assertEqual(batcher.inserts, {
('REPLACE', 'mytable (id, name)', '%s, id || %s', ''): {1: (1, 'a')}
})
def test_insert_duplicate(self):
# A second insert on the same rowkey replaces the first insert.
cursor = MockCursor()
batcher = self.getClass()(cursor)
batcher.insert_into(
"mytable (id, name)",
"%s, id || %s",
(1, 'a'),
rowkey=1,
size=3,
)
batcher.insert_into(
"mytable (id, name)",
"%s, id || %s",
(1, 'b'),
rowkey=1,
size=3,
)
self.assertEqual(cursor.executed, [])
self.assertEqual(batcher.rows_added, 2)
self.assertEqual(batcher.size_added, 6)
self.assertEqual(batcher.inserts, {
('INSERT', 'mytable (id, name)', '%s, id || %s', ''): {1: (1, 'b')}
})
def test_insert_auto_flush(self):
cursor = MockCursor()
batcher = self.getClass()(cursor)
batcher.size_limit = 10
batcher.insert_into(
"mytable (id, name)",
"%s, id || %s",
(1, 'a'),
rowkey=1,
size=5,
)
batcher.insert_into(
"mytable (id, name)",
"%s, id || %s",
(2, 'B'),
rowkey=2,
size=5,
)
self.assertEqual(
cursor.executed,
[(
'INSERT INTO mytable (id, name) VALUES\n'
'(%s, id || %s), '
'(%s, id || %s)\n',
(1, 'a', 2, 'B'))
])
self.assertEqual(batcher.rows_added, 0)
self.assertEqual(batcher.size_added, 0)
self.assertEqual(batcher.inserts, {})
self.assertEqual(batcher.total_rows_inserted, 2)
self.assertEqual(batcher.total_rows_deleted, 0)
self.assertEqual(batcher.total_size_inserted, 10)
def test_insert_auto_flush_multi_table(self):
cursor = MockCursor()
batcher = self.getClass()(cursor)
batcher.size_limit = 10
batcher.insert_into(
"mytable (id, name)",
"%s, id || %s",
(1, 'a'),
rowkey=1,
size=5,
)
batcher.insert_into(
"mytable (id, name)",
"%s, id || %s",
(2, 'B'),
rowkey=2,
size=5,
)
self.assertLength(cursor.executed, 1)
self.assertEqual(
cursor.executed[0][0],
'INSERT INTO mytable (id, name) VALUES\n'
'(%s, id || %s), '
'(%s, id || %s)\n')
self.assertEqual(
cursor.executed[0][1],
(1, 'a', 2, 'B')
)
self.assertEqual(batcher.rows_added, 0)
self.assertEqual(batcher.size_added, 0)
self.assertEqual(batcher.inserts, {})
self.assertEqual(batcher.total_rows_inserted, 2)
self.assertEqual(batcher.total_rows_deleted, 0)
self.assertEqual(batcher.total_size_inserted, 10)
flush_delete_one = 'DELETE FROM mytable WHERE id IN (?)'
def test_flush(self):
cursor = MockCursor()
batcher = self.getClass()(cursor, delete_placeholder="?")
# Make sure we preserve order in multi-column
batcher.sorted_deletes = True
batcher.delete_from("mytable", id=1)
batcher.insert_into(
"mytable (id, name)",
"%s, id || %s",
(1, 'a'),
rowkey=1,
size=5,
)
batcher.delete_from("mytable", id=1, key='abc')
batcher.delete_from("mytable", id=2, key='def')
batcher.flush()
self.assertEqual(cursor.executed, [
(self.flush_delete_one,
self._in(1)),
('DELETE FROM mytable WHERE (id=? AND key=?) OR (id=? AND key=?)',
(1, 'abc', 2, 'def')),
('INSERT INTO mytable (id, name) VALUES\n(%s, id || %s)\n',
(1, 'a')),
])
select_one = 'SELECT zoid,tid FROM object_state WHERE oids IN (%s)'
def test_select_one(self):
cursor = MockCursor()
batcher = self.getClass()(cursor)
consume(batcher.select_from(('zoid', 'tid'), 'object_state', oids=(1,)))
self.assertEqual(cursor.executed, [
(self.select_one,
self._in(1,))
])
select_multiple_one_batch = 'SELECT zoid,tid FROM object_state WHERE oids IN (%s,%s,%s,%s)'
def test_select_multiple_one_batch(self):
cursor = MockCursor()
cursor.sort_sequence_params = True
batcher = self.getClass()(cursor)
list(batcher.select_from(('zoid', 'tid'), 'object_state',
oids=(1, 2, 3, 4)))
self.assertEqual(cursor.executed, [
(self.select_multiple_one_batch,
self._in(1, 2, 3, 4))
])
select_multiple_many_batch = 'SELECT zoid,tid FROM object_state WHERE oids IN (%s,%s)'
def test_select_multiple_many_batch(self, batch_limit_attr='row_limit'):
cursor = MockCursor()
cursor.sort_sequence_params = True
cursor.many_results = [
[(1, 1)],
[(3, 1)],
[]
]
batcher = self.getClass()(cursor)
setattr(batcher, batch_limit_attr, 2)
rows = batcher.select_from(('zoid', 'tid'), 'object_state',
oids=iter((1, 2, 3, 4, 5)))
rows = list(rows)
self.assertEqual(cursor.executed, [
(self.select_multiple_many_batch,
self._in(1, 2)),
(self.select_multiple_many_batch,
self._in(3, 4)),
(self.select_one,
self._in(5)),
])
self.assertEqual(rows, [
(1, 1),
(3, 1)
])
def test_select_multiple_many_batch_bind_limit(self):
self.test_select_multiple_many_batch(batch_limit_attr='bind_limit')
def test_select_from_timeout(self):
from relstorage.tests import mock
from relstorage.adapters.interfaces import AggregateOperationTimeoutError
cursor = MockCursor()
cursor.sort_sequence_params = True
cursor.many_results = [
[(1, 1)],
[(2, 1)],
[(3, 1)],
[]
]
batcher = self.getClass()(cursor)
batcher.bind_limit = 1
batcher.perf_counter = mock.Mock()
# These will be the time values returned from perf_counter()
batcher.perf_counter.side_effect = (
12345, # Begin
12346, # First batch
12347, # Second batch
)
gener = batcher.select_from(('zoid', 'tid',), 'object_state',
timeout=2,
oids=[1, 2, 3, 4, 5])
rows = []
with self.assertRaises(AggregateOperationTimeoutError):
for row in gener:
rows.append(row)
# We ran exactly twice before the perf_counter exceeded the timeout.
self.assertEqual(rows, [
(1, 1),
(2, 1),
])
class OracleRowBatcherTests(TestCase):
def getClass(self):
from relstorage.adapters.oracle.batch import OracleRowBatcher
return OracleRowBatcher
def test_insert_one_row(self):
cursor = MockCursor()
batcher = self.getClass()(cursor, {})
batcher.insert_into(
"mytable (id, name)",
"%s, id || %s",
(1, 'a'),
rowkey=1,
size=3,
)
self.assertEqual(cursor.executed, [])
batcher.flush()
self.assertEqual(cursor.executed, [
('INSERT INTO mytable (id, name) VALUES (%s, id || %s)', (1, 'a')),
])
def test_insert_two_rows(self):
cursor = MockCursor()
batcher = self.getClass()(cursor, {})
batcher.insert_into(
"mytable (id, name)",
":id, :id || :name",
{'id': 1, 'name': 'a'},
rowkey=1,
size=3,
)
batcher.insert_into(
"mytable (id, name)",
":id, :id || :name",
{'id': 2, 'name': 'b'},
rowkey=2,
size=3,
)
self.assertEqual(cursor.executed, [])
batcher.flush()
self.assertEqual(
cursor.executed,
[(
'INSERT ALL\n'
'INTO mytable (id, name) VALUES (:id_0, :id_0 || :name_0)\n'
'INTO mytable (id, name) VALUES (:id_1, :id_1 || :name_1)\n'
'SELECT * FROM DUAL',
{'id_0': 1, 'id_1': 2, 'name_1': 'b', 'name_0': 'a'})
])
def test_insert_one_raw_row(self):
class MockRawType(object):
pass
cursor = MockCursor()
batcher = self.getClass()(cursor, {'rawdata': MockRawType})
batcher.insert_into(
"mytable (id, data)",
":id, :rawdata",
{'id': 1, 'rawdata': 'xyz'},
rowkey=1,
size=3,
)
batcher.flush()
self.assertEqual(cursor.executed, [
('INSERT INTO mytable (id, data) VALUES (:id, :rawdata)',
{'id': 1, 'rawdata': 'xyz'})
])
self.assertEqual(cursor.inputsizes, {'rawdata': MockRawType})
def test_insert_two_raw_rows(self):
class MockRawType(object):
pass
cursor = MockCursor()
batcher = self.getClass()(cursor, {'rawdata': MockRawType})
batcher.insert_into(
"mytable (id, data)",
":id, :rawdata",
{'id': 1, 'rawdata': 'xyz'},
rowkey=1,
size=3,
)
batcher.insert_into(
"mytable (id, data)",
":id, :rawdata",
{'id': 2, 'rawdata': 'abc'},
rowkey=2,
size=3,
)
batcher.flush()
self.assertEqual(
cursor.executed,
[(
'INSERT ALL\n'
'INTO mytable (id, data) VALUES (:id_0, :rawdata_0)\n'
'INTO mytable (id, data) VALUES (:id_1, :rawdata_1)\n'
'SELECT * FROM DUAL',
{'id_0': 1, 'id_1': 2, 'rawdata_0': 'xyz', 'rawdata_1': 'abc'})
])
self.assertEqual(cursor.inputsizes, {
'rawdata_0': MockRawType,
'rawdata_1': MockRawType,
})
class PostgreSQLRowBatcherTests(RowBatcherTests):
def getClass(self):
from relstorage.adapters.postgresql.batch import PostgreSQLRowBatcher
return PostgreSQLRowBatcher
IN_ROWS_FLATTENED = True
delete_auto_flush = 'DELETE FROM mytable WHERE id = ANY (%s)'
flush_delete_one = 'DELETE FROM mytable WHERE id = ANY (?)'
select_one = 'SELECT zoid,tid FROM object_state WHERE oids = ANY (%s)'
select_multiple_one_batch = 'SELECT zoid,tid FROM object_state WHERE oids = ANY (%s)'
select_multiple_many_batch = 'SELECT zoid,tid FROM object_state WHERE oids = ANY (%s)'
update_set_static_stmt = 'UPDATE pack_object SET foo=1 WHERE zoid = ANY (%s)'
|
[
"relstorage.tests.mock.Mock",
"relstorage.tests.MockCursor"
] |
[((946, 958), 'relstorage.tests.MockCursor', 'MockCursor', ([], {}), '()\n', (956, 958), False, 'from relstorage.tests import MockCursor\n'), ((1529, 1541), 'relstorage.tests.MockCursor', 'MockCursor', ([], {}), '()\n', (1539, 1541), False, 'from relstorage.tests import MockCursor\n'), ((2404, 2416), 'relstorage.tests.MockCursor', 'MockCursor', ([], {}), '()\n', (2414, 2416), False, 'from relstorage.tests import MockCursor\n'), ((3176, 3188), 'relstorage.tests.MockCursor', 'MockCursor', ([], {}), '()\n', (3186, 3188), False, 'from relstorage.tests import MockCursor\n'), ((4010, 4022), 'relstorage.tests.MockCursor', 'MockCursor', ([], {}), '()\n', (4020, 4022), False, 'from relstorage.tests import MockCursor\n'), ((4740, 4752), 'relstorage.tests.MockCursor', 'MockCursor', ([], {}), '()\n', (4750, 4752), False, 'from relstorage.tests import MockCursor\n'), ((5684, 5696), 'relstorage.tests.MockCursor', 'MockCursor', ([], {}), '()\n', (5694, 5696), False, 'from relstorage.tests import MockCursor\n'), ((6340, 6352), 'relstorage.tests.MockCursor', 'MockCursor', ([], {}), '()\n', (6350, 6352), False, 'from relstorage.tests import MockCursor\n'), ((7066, 7078), 'relstorage.tests.MockCursor', 'MockCursor', ([], {}), '()\n', (7076, 7078), False, 'from relstorage.tests import MockCursor\n'), ((8121, 8133), 'relstorage.tests.MockCursor', 'MockCursor', ([], {}), '()\n', (8131, 8133), False, 'from relstorage.tests import MockCursor\n'), ((9293, 9305), 'relstorage.tests.MockCursor', 'MockCursor', ([], {}), '()\n', (9303, 9305), False, 'from relstorage.tests import MockCursor\n'), ((10265, 10277), 'relstorage.tests.MockCursor', 'MockCursor', ([], {}), '()\n', (10275, 10277), False, 'from relstorage.tests import MockCursor\n'), ((10674, 10686), 'relstorage.tests.MockCursor', 'MockCursor', ([], {}), '()\n', (10684, 10686), False, 'from relstorage.tests import MockCursor\n'), ((11213, 11225), 'relstorage.tests.MockCursor', 'MockCursor', ([], {}), '()\n', (11223, 11225), False, 'from relstorage.tests import MockCursor\n'), ((12282, 12294), 'relstorage.tests.MockCursor', 'MockCursor', ([], {}), '()\n', (12292, 12294), False, 'from relstorage.tests import MockCursor\n'), ((12565, 12576), 'relstorage.tests.mock.Mock', 'mock.Mock', ([], {}), '()\n', (12574, 12576), False, 'from relstorage.tests import mock\n'), ((13501, 13513), 'relstorage.tests.MockCursor', 'MockCursor', ([], {}), '()\n', (13511, 13513), False, 'from relstorage.tests import MockCursor\n'), ((13992, 14004), 'relstorage.tests.MockCursor', 'MockCursor', ([], {}), '()\n', (14002, 14004), False, 'from relstorage.tests import MockCursor\n'), ((14984, 14996), 'relstorage.tests.MockCursor', 'MockCursor', ([], {}), '()\n', (14994, 14996), False, 'from relstorage.tests import MockCursor\n'), ((15625, 15637), 'relstorage.tests.MockCursor', 'MockCursor', ([], {}), '()\n', (15635, 15637), False, 'from relstorage.tests import MockCursor\n')]
|
r"""
Definition
----------
The scattering intensity $I(q)$ is calculated as
.. math::
I(q) = \begin{cases}
A q^{-m1} + \text{background} & q <= q_c \\
C q^{-m2} + \text{background} & q > q_c
\end{cases}
where $q_c$ = the location of the crossover from one slope to the other,
$A$ = the scaling coefficent that sets the overall intensity of the lower Q
power law region, $m1$ = power law exponent at low Q, and $m2$ = power law
exponent at high Q. The scaling of the second power law region (coefficent C)
is then automatically scaled to match the first by following formula:
.. math::
C = \frac{A q_c^{m2}}{q_c^{m1}}
.. note::
Be sure to enter the power law exponents as positive values!
For 2D data the scattering intensity is calculated in the same way as 1D,
where the $q$ vector is defined as
.. math::
q = \sqrt{q_x^2 + q_y^2}
References
----------
None.
**Author:** NIST IGOR/DANSE **on:** pre 2010
**Last Modified by:** <NAME> **on:** February 18, 2016
**Last Reviewed by:** <NAME> **on:** March 21, 2016
"""
from numpy import inf, power, empty, errstate
name = "two_power_law"
title = "This model calculates an empirical functional form for SAS data \
characterized by two power laws."
description = """
I(q) = coef_A*pow(qval,-1.0*power1) + background for q<=q_c
=C*pow(qval,-1.0*power2) + background for q>q_c
where C=coef_A*pow(q_c,-1.0*power1)/pow(q_c,-1.0*power2).
coef_A = scaling coefficent
q_c = crossover location [1/A]
power_1 (=m1) = power law exponent at low Q
power_2 (=m2) = power law exponent at high Q
background = Incoherent background [1/cm]
"""
category = "shape-independent"
# pylint: disable=bad-whitespace, line-too-long
# ["name", "units", default, [lower, upper], "type", "description"],
parameters = [
["coefficent_1", "", 1.0, [-inf, inf], "", "coefficent A in low Q region"],
["crossover", "1/Ang", 0.04,[0, inf], "", "crossover location"],
["power_1", "", 1.0, [0, inf], "", "power law exponent at low Q"],
["power_2", "", 4.0, [0, inf], "", "power law exponent at high Q"],
]
# pylint: enable=bad-whitespace, line-too-long
def Iq(q,
coefficent_1=1.0,
crossover=0.04,
power_1=1.0,
power_2=4.0,
):
"""
:param q: Input q-value (float or [float, float])
:param coefficent_1: Scaling coefficent at low Q
:param crossover: Crossover location
:param power_1: Exponent of power law function at low Q
:param power_2: Exponent of power law function at high Q
:return: Calculated intensity
"""
result= empty(q.shape, 'd')
index = (q <= crossover)
with errstate(divide='ignore'):
coefficent_2 = coefficent_1 * power(crossover, power_2 - power_1)
result[index] = coefficent_1 * power(q[index], -power_1)
result[~index] = coefficent_2 * power(q[~index], -power_2)
return result
Iq.vectorized = True # Iq accepts an array of q values
demo = dict(scale=1, background=0.0,
coefficent_1=1.0,
crossover=0.04,
power_1=1.0,
power_2=4.0)
tests = [
# Accuracy tests based on content in test/utest_extra_models.py
[{'coefficent_1': 1.0,
'crossover': 0.04,
'power_1': 1.0,
'power_2': 4.0,
'background': 0.0,
}, 0.001, 1000],
[{'coefficent_1': 1.0,
'crossover': 0.04,
'power_1': 1.0,
'power_2': 4.0,
'background': 0.0,
}, 0.150141, 0.125945],
[{'coefficent_1': 1.0,
'crossover': 0.04,
'power_1': 1.0,
'power_2': 4.0,
'background': 0.0,
}, 0.442528, 0.00166884],
[{'coefficent_1': 1.0,
'crossover': 0.04,
'power_1': 1.0,
'power_2': 4.0,
'background': 0.0,
}, (0.442528, 0.00166884), 0.00166884],
]
|
[
"numpy.empty",
"numpy.errstate",
"numpy.power"
] |
[((2794, 2813), 'numpy.empty', 'empty', (['q.shape', '"""d"""'], {}), "(q.shape, 'd')\n", (2799, 2813), False, 'from numpy import inf, power, empty, errstate\n'), ((2852, 2877), 'numpy.errstate', 'errstate', ([], {'divide': '"""ignore"""'}), "(divide='ignore')\n", (2860, 2877), False, 'from numpy import inf, power, empty, errstate\n'), ((2917, 2952), 'numpy.power', 'power', (['crossover', '(power_2 - power_1)'], {}), '(crossover, power_2 - power_1)\n', (2922, 2952), False, 'from numpy import inf, power, empty, errstate\n'), ((2992, 3017), 'numpy.power', 'power', (['q[index]', '(-power_1)'], {}), '(q[index], -power_1)\n', (2997, 3017), False, 'from numpy import inf, power, empty, errstate\n'), ((3058, 3084), 'numpy.power', 'power', (['q[~index]', '(-power_2)'], {}), '(q[~index], -power_2)\n', (3063, 3084), False, 'from numpy import inf, power, empty, errstate\n')]
|
#!/usr/bin/env python
#
# manage.py 用于启动程序以及其他的程序任务
import os
# from flask import Flask
# from flask_sqlalchemy import SQLAlchemy
from flask_script import Manager
from flask_migrate import Migrate, MigrateCommand
from app import app, db
from flask_debugtoolbar import DebugToolbarExtension
app.config.from_object(os.environ['APP_SETTINGS'])
migrate = Migrate(app, db)
manager = Manager(app)
manager.add_command('db', MigrateCommand)
# add flask debug toolbar
toolbar = DebugToolbarExtension(app)
if __name__ == '__main__':
manager.run()
|
[
"flask_script.Manager",
"flask_migrate.Migrate",
"flask_debugtoolbar.DebugToolbarExtension",
"app.app.config.from_object"
] |
[((292, 342), 'app.app.config.from_object', 'app.config.from_object', (["os.environ['APP_SETTINGS']"], {}), "(os.environ['APP_SETTINGS'])\n", (314, 342), False, 'from app import app, db\n'), ((354, 370), 'flask_migrate.Migrate', 'Migrate', (['app', 'db'], {}), '(app, db)\n', (361, 370), False, 'from flask_migrate import Migrate, MigrateCommand\n'), ((381, 393), 'flask_script.Manager', 'Manager', (['app'], {}), '(app)\n', (388, 393), False, 'from flask_script import Manager\n'), ((474, 500), 'flask_debugtoolbar.DebugToolbarExtension', 'DebugToolbarExtension', (['app'], {}), '(app)\n', (495, 500), False, 'from flask_debugtoolbar import DebugToolbarExtension\n')]
|
import ShuntingYard_RE
import ThompsonConstruct
def runTests():
# List of ["Regular Expression", ["Strings"...]]
# (Infix Regular Expressions)
tests = [
["(a.b|b*)", ["", "ab", "b", "bb", "a"]],
["a.(b.b)*.a", ["aa", "bb", "abba", "aba"]],
["1.(0.0)*.1", ["11", "100001", "11001"]]
]
print("\nTESTS:")
#For each test
for test in tests:
# Infix
infix = test[0]
print(f"infix: {infix}")
# Postfix
postfix = ShuntingYard_RE.toPostfix(infix)
print(f"postfix: {postfix}")
# NFA
nfa = ThompsonConstruct.toNFA(postfix)
print(f"thompson: {nfa}")
# For each string for this test:
for s in test[1]:
# Match?
match = nfa.match(s)
print(f"Match '{s}'? {match}")
# Newline
print()
if __name__ == "__main__":
runTests()
|
[
"ThompsonConstruct.toNFA",
"ShuntingYard_RE.toPostfix"
] |
[((533, 565), 'ShuntingYard_RE.toPostfix', 'ShuntingYard_RE.toPostfix', (['infix'], {}), '(infix)\n', (558, 565), False, 'import ShuntingYard_RE\n'), ((632, 664), 'ThompsonConstruct.toNFA', 'ThompsonConstruct.toNFA', (['postfix'], {}), '(postfix)\n', (655, 664), False, 'import ThompsonConstruct\n')]
|
import os
import pickle
from googleapiclient.discovery import build
from google_auth_oauthlib.flow import InstalledAppFlow
from google.auth.transport.requests import Request
from googleapiclient.errors import HttpError
SCOPES = ['https://www.googleapis.com/auth/calendar.events',
'https://www.googleapis.com/auth/cloud-platform']
# https://developers.google.com/identity/protocols/googlescopes
def authorize(json_path='credentials_oauth2.json', token_path='token.pickle'):
creds = None
# The file token.pickle stores the user's access and refresh tokens, and is
# created automatically when the authorization flow completes for the first
# time.
if os.path.exists(token_path):
with open(token_path, 'rb') as token:
creds = pickle.load(token)
# If there are no (valid) credentials available, let the user log in.
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(
json_path, SCOPES)
creds = flow.run_local_server()
# Save the credentials for the next run
with open(token_path, 'wb') as token:
pickle.dump(creds, token)
return creds
authorize("./credentials/credentials.json", "./credentials/token.pickle")
|
[
"pickle.dump",
"google.auth.transport.requests.Request",
"os.path.exists",
"pickle.load",
"google_auth_oauthlib.flow.InstalledAppFlow.from_client_secrets_file"
] |
[((699, 725), 'os.path.exists', 'os.path.exists', (['token_path'], {}), '(token_path)\n', (713, 725), False, 'import os\n'), ((795, 813), 'pickle.load', 'pickle.load', (['token'], {}), '(token)\n', (806, 813), False, 'import pickle\n'), ((1061, 1121), 'google_auth_oauthlib.flow.InstalledAppFlow.from_client_secrets_file', 'InstalledAppFlow.from_client_secrets_file', (['json_path', 'SCOPES'], {}), '(json_path, SCOPES)\n', (1102, 1121), False, 'from google_auth_oauthlib.flow import InstalledAppFlow\n'), ((1294, 1319), 'pickle.dump', 'pickle.dump', (['creds', 'token'], {}), '(creds, token)\n', (1305, 1319), False, 'import pickle\n'), ((1015, 1024), 'google.auth.transport.requests.Request', 'Request', ([], {}), '()\n', (1022, 1024), False, 'from google.auth.transport.requests import Request\n')]
|