code stringlengths 22 1.05M | apis listlengths 1 3.31k | extract_api stringlengths 75 3.25M |
|---|---|---|
import numpy as np
from scipy import stats
import pandas as pd
from sklearn.cross_decomposition import PLSRegression
def standardize_vector(v, center=True, scale=False):
if center:
v = v - np.mean(v)
if scale:
if np.std(v) == 0:
return v
else:
return (v + 0.0) / np.std(v)
def standardize_vec(v, center='mean', scale='std'):
""""
Standardizes a vector by centering and scaling it
This function will ignore scaling if the scale value is zero and will
instead set the scale value to 1
"""
# choose the center value
if not center:
cent_val = 0.0
elif center == 'mean':
cent_val = np.mean(v)
elif center == 'median':
cent_val = np.median(v)
elif type(center) in [float, int]:
cent_val = center
else:
raise ValueError('improper center value')
# choose the scale value
if not scale:
scale = 1.0
elif scale == 'max':
scale_val = max(v)
elif scale == 'std':
scale_val = np.std(v)
elif scale == 'mean':
scale_val = np.mean(v)
elif scale == 'median':
scale_val = np.median(v)
elif type(scale) in [float, int]:
scale_val = scale
else:
raise ValueError('improper scale value')
# don't scale if scale value is zero
if scale_val == 0:
scale_val = 1
return (v - cent_val + 0.0) / scale_val
def get_PCA(X, scale=False):
"""
Returns the PCA decomposition of data frame X.
Rows of X are observations and columns are features.
Centers columns then performs PCA.
Optionally scales columns by standard deviation
X = U D V^t
Output
------
U, D, V
"""
if type(X) == np.ndarray:
X = pd.DataFrame(X)
# center columns
X_stand = X.apply(lambda c: standardize_vector(c,
center=True, scale=scale))
# do SVD
return np.linalg.svd(X_stand, full_matrices=False)
def get_pls(X, Y, n_comp):
"""
returns the PLS scores
parameters
----------
X: pandas data frame
Y: list
"""
# center and scale both X and y data
x = np.array(X.apply(lambda c: standardize_vector(c, center=True,
scale=True)))
y = standardize_vector(Y, center=True, scale=True)
# compute PLS direcections
pls = PLSRegression(n_components=int(n_comp), scale=True)
pls.fit(x, y)
return np.array(pls.x_scores_), pls.x_loadings_
| [
"numpy.mean",
"numpy.median",
"pandas.DataFrame",
"numpy.array",
"numpy.std",
"numpy.linalg.svd"
] | [((1965, 2008), 'numpy.linalg.svd', 'np.linalg.svd', (['X_stand'], {'full_matrices': '(False)'}), '(X_stand, full_matrices=False)\n', (1978, 2008), True, 'import numpy as np\n'), ((1770, 1785), 'pandas.DataFrame', 'pd.DataFrame', (['X'], {}), '(X)\n', (1782, 1785), True, 'import pandas as pd\n'), ((2507, 2530), 'numpy.array', 'np.array', (['pls.x_scores_'], {}), '(pls.x_scores_)\n', (2515, 2530), True, 'import numpy as np\n'), ((204, 214), 'numpy.mean', 'np.mean', (['v'], {}), '(v)\n', (211, 214), True, 'import numpy as np\n'), ((241, 250), 'numpy.std', 'np.std', (['v'], {}), '(v)\n', (247, 250), True, 'import numpy as np\n'), ((687, 697), 'numpy.mean', 'np.mean', (['v'], {}), '(v)\n', (694, 697), True, 'import numpy as np\n'), ((323, 332), 'numpy.std', 'np.std', (['v'], {}), '(v)\n', (329, 332), True, 'import numpy as np\n'), ((746, 758), 'numpy.median', 'np.median', (['v'], {}), '(v)\n', (755, 758), True, 'import numpy as np\n'), ((1049, 1058), 'numpy.std', 'np.std', (['v'], {}), '(v)\n', (1055, 1058), True, 'import numpy as np\n'), ((1105, 1115), 'numpy.mean', 'np.mean', (['v'], {}), '(v)\n', (1112, 1115), True, 'import numpy as np\n'), ((1164, 1176), 'numpy.median', 'np.median', (['v'], {}), '(v)\n', (1173, 1176), True, 'import numpy as np\n')] |
"""Tests for AzWebAppHttp20Event plugin."""
import copy
import unittest
from cloudmarker.events import azwebapphttp20event
base_record = {
'ext': {
'record_type': 'web_app_config',
'cloud_type': 'azure',
'http20_enabled': True
},
'com': {
'cloud_type': 'azure'
}
}
class AzWebAppHttp20EventTest(unittest.TestCase):
"""Tests for AzWebAppHttp20Event plugin."""
def test_com_bucket_missing(self):
record = copy.deepcopy(base_record)
record['com'] = None
plugin = azwebapphttp20event.AzWebAppHttp20Event()
events = list(plugin.eval(record))
self.assertEqual(events, [])
def test_cloud_type_non_azure(self):
record = copy.deepcopy(base_record)
record['com']['cloud_type'] = 'non_azure'
plugin = azwebapphttp20event.AzWebAppHttp20Event()
events = list(plugin.eval(record))
self.assertEqual(events, [])
def test_ext_bucket_missing(self):
record = copy.deepcopy(base_record)
record['ext'] = None
plugin = azwebapphttp20event.AzWebAppHttp20Event()
events = list(plugin.eval(record))
self.assertEqual(events, [])
def test_record_type_non_web_app_config(self):
record = copy.deepcopy(base_record)
record['ext']['record_type'] = 'non_web_app_config'
plugin = azwebapphttp20event.AzWebAppHttp20Event()
events = list(plugin.eval(record))
self.assertEqual(events, [])
def test_http20_enabled(self):
record = copy.deepcopy(base_record)
record['ext']['http20_enabled'] = True
plugin = azwebapphttp20event.AzWebAppHttp20Event()
events = list(plugin.eval(record))
self.assertEqual(events, [])
def test_http20_disabled(self):
record = copy.deepcopy(base_record)
record['ext']['http20_enabled'] = False
plugin = azwebapphttp20event.AzWebAppHttp20Event()
events = list(plugin.eval(record))
self.assertEqual(len(events), 1)
self.assertEqual(events[0]['ext']['record_type'],
'web_app_http20_event')
self.assertEqual(events[0]['com']['record_type'],
'web_app_http20_event')
| [
"cloudmarker.events.azwebapphttp20event.AzWebAppHttp20Event",
"copy.deepcopy"
] | [((478, 504), 'copy.deepcopy', 'copy.deepcopy', (['base_record'], {}), '(base_record)\n', (491, 504), False, 'import copy\n'), ((551, 592), 'cloudmarker.events.azwebapphttp20event.AzWebAppHttp20Event', 'azwebapphttp20event.AzWebAppHttp20Event', ([], {}), '()\n', (590, 592), False, 'from cloudmarker.events import azwebapphttp20event\n'), ((732, 758), 'copy.deepcopy', 'copy.deepcopy', (['base_record'], {}), '(base_record)\n', (745, 758), False, 'import copy\n'), ((826, 867), 'cloudmarker.events.azwebapphttp20event.AzWebAppHttp20Event', 'azwebapphttp20event.AzWebAppHttp20Event', ([], {}), '()\n', (865, 867), False, 'from cloudmarker.events import azwebapphttp20event\n'), ((1005, 1031), 'copy.deepcopy', 'copy.deepcopy', (['base_record'], {}), '(base_record)\n', (1018, 1031), False, 'import copy\n'), ((1078, 1119), 'cloudmarker.events.azwebapphttp20event.AzWebAppHttp20Event', 'azwebapphttp20event.AzWebAppHttp20Event', ([], {}), '()\n', (1117, 1119), False, 'from cloudmarker.events import azwebapphttp20event\n'), ((1269, 1295), 'copy.deepcopy', 'copy.deepcopy', (['base_record'], {}), '(base_record)\n', (1282, 1295), False, 'import copy\n'), ((1373, 1414), 'cloudmarker.events.azwebapphttp20event.AzWebAppHttp20Event', 'azwebapphttp20event.AzWebAppHttp20Event', ([], {}), '()\n', (1412, 1414), False, 'from cloudmarker.events import azwebapphttp20event\n'), ((1548, 1574), 'copy.deepcopy', 'copy.deepcopy', (['base_record'], {}), '(base_record)\n', (1561, 1574), False, 'import copy\n'), ((1639, 1680), 'cloudmarker.events.azwebapphttp20event.AzWebAppHttp20Event', 'azwebapphttp20event.AzWebAppHttp20Event', ([], {}), '()\n', (1678, 1680), False, 'from cloudmarker.events import azwebapphttp20event\n'), ((1815, 1841), 'copy.deepcopy', 'copy.deepcopy', (['base_record'], {}), '(base_record)\n', (1828, 1841), False, 'import copy\n'), ((1907, 1948), 'cloudmarker.events.azwebapphttp20event.AzWebAppHttp20Event', 'azwebapphttp20event.AzWebAppHttp20Event', ([], {}), '()\n', (1946, 1948), False, 'from cloudmarker.events import azwebapphttp20event\n')] |
from flask_restful import Resource
from flask import request, Blueprint, jsonify
from models import db, Post, Comment
from datetime import datetime
commentBp = Blueprint('commentBp', __name__)
class UserComments():
@commentBp.route("/addComment", methods=['POST'])
def addComment():
json_data = request.get_json(force=True)
if not json_data:
return jsonify({'message': 'No input data provided'}), 400
comment = Comment(
post_id = json_data['post_id'],
username = json_data['username'],
content = json_data['content']
)
db.session.add(comment)
db.session.commit()
result = Comment.serialize(Comment)
return jsonify({ "status" : 'success', 'data': comment}), 201
@commentBp.route("/getComment", methods=['GET'])
def getComment():
json_data = request.get_json(force=True)
comment_list = []
if not json_data:
return jsonify({'message': 'No input data provided'}), 400
if json_data['query_by']=='top':
comments = Comment.query.order_by(Comment.upvotes.desc()).all()
else: #json_data['query_by']=='latest'
comments = Comment.query.order_by(Comment.timestamp.desc()).all()
if comment_list == []:
return jsonify({'message': 'No comments found'}), 404
for i in range(0, len(comments)):
comment_list.append(comments[i].serialize())
return jsonify({"status" : str(comment_list)}), 200
@commentBp.route("/deleteComment", methods=['DELETE'])
def deleteComment():
json_data = request.get_json(force=True)
if not json_data:
return jsonify({'message': 'No input data provided'}), 400
comment = Comment.query.filter_by(id=json_data['comment_id']).first()
if not comment:
return jsonify({'message': 'no such comment found'}), 400
if json_data['username'] == comment.username:
db.session.delete(comment)
db.session.commit()
return jsonify({ "status" : 'success'}), 200
return jsonify({ "status" : 'not authhorized'}), 401
@commentBp.route("/upvote'", methods=['PUT'])
def upvote():
json_data = request.get_json(force=True)
if not json_data:
return jsonify({'message': 'No input data provided'}), 400
commment = Comment.query.filter_by(id=json_data['id']).one()
if json_data['state'] == 'true':
comment.upvotes += 1
else:
comment.upvotes -= 1
result = comments.upvotes
db.session.commit()
return jsonify({ "status" : 'success', 'data': result}), 201 | [
"models.Comment.timestamp.desc",
"models.Comment.serialize",
"flask.jsonify",
"models.Comment",
"models.db.session.add",
"flask.request.get_json",
"models.db.session.delete",
"models.Comment.query.filter_by",
"models.Comment.upvotes.desc",
"flask.Blueprint",
"models.db.session.commit"
] | [((161, 193), 'flask.Blueprint', 'Blueprint', (['"""commentBp"""', '__name__'], {}), "('commentBp', __name__)\n", (170, 193), False, 'from flask import request, Blueprint, jsonify\n'), ((318, 346), 'flask.request.get_json', 'request.get_json', ([], {'force': '(True)'}), '(force=True)\n', (334, 346), False, 'from flask import request, Blueprint, jsonify\n'), ((466, 569), 'models.Comment', 'Comment', ([], {'post_id': "json_data['post_id']", 'username': "json_data['username']", 'content': "json_data['content']"}), "(post_id=json_data['post_id'], username=json_data['username'],\n content=json_data['content'])\n", (473, 569), False, 'from models import db, Post, Comment\n'), ((627, 650), 'models.db.session.add', 'db.session.add', (['comment'], {}), '(comment)\n', (641, 650), False, 'from models import db, Post, Comment\n'), ((659, 678), 'models.db.session.commit', 'db.session.commit', ([], {}), '()\n', (676, 678), False, 'from models import db, Post, Comment\n'), ((697, 723), 'models.Comment.serialize', 'Comment.serialize', (['Comment'], {}), '(Comment)\n', (714, 723), False, 'from models import db, Post, Comment\n'), ((898, 926), 'flask.request.get_json', 'request.get_json', ([], {'force': '(True)'}), '(force=True)\n', (914, 926), False, 'from flask import request, Blueprint, jsonify\n'), ((1690, 1718), 'flask.request.get_json', 'request.get_json', ([], {'force': '(True)'}), '(force=True)\n', (1706, 1718), False, 'from flask import request, Blueprint, jsonify\n'), ((2333, 2361), 'flask.request.get_json', 'request.get_json', ([], {'force': '(True)'}), '(force=True)\n', (2349, 2361), False, 'from flask import request, Blueprint, jsonify\n'), ((2714, 2733), 'models.db.session.commit', 'db.session.commit', ([], {}), '()\n', (2731, 2733), False, 'from models import db, Post, Comment\n'), ((739, 786), 'flask.jsonify', 'jsonify', (["{'status': 'success', 'data': comment}"], {}), "({'status': 'success', 'data': comment})\n", (746, 786), False, 'from flask import request, Blueprint, jsonify\n'), ((2054, 2080), 'models.db.session.delete', 'db.session.delete', (['comment'], {}), '(comment)\n', (2071, 2080), False, 'from models import db, Post, Comment\n'), ((2093, 2112), 'models.db.session.commit', 'db.session.commit', ([], {}), '()\n', (2110, 2112), False, 'from models import db, Post, Comment\n'), ((2198, 2236), 'flask.jsonify', 'jsonify', (["{'status': 'not authhorized'}"], {}), "({'status': 'not authhorized'})\n", (2205, 2236), False, 'from flask import request, Blueprint, jsonify\n'), ((2758, 2804), 'flask.jsonify', 'jsonify', (["{'status': 'success', 'data': result}"], {}), "({'status': 'success', 'data': result})\n", (2765, 2804), False, 'from flask import request, Blueprint, jsonify\n'), ((395, 441), 'flask.jsonify', 'jsonify', (["{'message': 'No input data provided'}"], {}), "({'message': 'No input data provided'})\n", (402, 441), False, 'from flask import request, Blueprint, jsonify\n'), ((998, 1044), 'flask.jsonify', 'jsonify', (["{'message': 'No input data provided'}"], {}), "({'message': 'No input data provided'})\n", (1005, 1044), False, 'from flask import request, Blueprint, jsonify\n'), ((1369, 1410), 'flask.jsonify', 'jsonify', (["{'message': 'No comments found'}"], {}), "({'message': 'No comments found'})\n", (1376, 1410), False, 'from flask import request, Blueprint, jsonify\n'), ((1764, 1810), 'flask.jsonify', 'jsonify', (["{'message': 'No input data provided'}"], {}), "({'message': 'No input data provided'})\n", (1771, 1810), False, 'from flask import request, Blueprint, jsonify\n'), ((1834, 1885), 'models.Comment.query.filter_by', 'Comment.query.filter_by', ([], {'id': "json_data['comment_id']"}), "(id=json_data['comment_id'])\n", (1857, 1885), False, 'from models import db, Post, Comment\n'), ((1937, 1982), 'flask.jsonify', 'jsonify', (["{'message': 'no such comment found'}"], {}), "({'message': 'no such comment found'})\n", (1944, 1982), False, 'from flask import request, Blueprint, jsonify\n'), ((2132, 2162), 'flask.jsonify', 'jsonify', (["{'status': 'success'}"], {}), "({'status': 'success'})\n", (2139, 2162), False, 'from flask import request, Blueprint, jsonify\n'), ((2410, 2456), 'flask.jsonify', 'jsonify', (["{'message': 'No input data provided'}"], {}), "({'message': 'No input data provided'})\n", (2417, 2456), False, 'from flask import request, Blueprint, jsonify\n'), ((2490, 2533), 'models.Comment.query.filter_by', 'Comment.query.filter_by', ([], {'id': "json_data['id']"}), "(id=json_data['id'])\n", (2513, 2533), False, 'from models import db, Post, Comment\n'), ((1146, 1168), 'models.Comment.upvotes.desc', 'Comment.upvotes.desc', ([], {}), '()\n', (1166, 1168), False, 'from models import db, Post, Comment\n'), ((1278, 1302), 'models.Comment.timestamp.desc', 'Comment.timestamp.desc', ([], {}), '()\n', (1300, 1302), False, 'from models import db, Post, Comment\n')] |
import os
import sys
import os.path as osp
from contextlib import contextmanager
############################################################
# Setup path
def add_path(path):
if path not in sys.path:
sys.path.insert(0, path)
curdir = osp.dirname(__file__)
lib_path = osp.join(curdir, '..', 'lib')
add_path(lib_path)
############################################################
# Import modules from the lib
from config import cfg
from utils.ops import may_create
from utils.proto import prototxt_from_template
@contextmanager
def workenv():
olddir = os.getcwd()
os.chdir(osp.join(curdir, '..'))
try:
yield
finally:
os.chdir(olddir)
def setup(phase_key, dataset, expname, rsltname):
'''Setup paths & general args after possible merge from config file.'''
# Save args to config
cfg.DATASET = dataset
cfg.EXP = expname
cfg.NUM_CLASSES = {
'TH14': 20,
'AN': 100,
}[cfg.DATASET]
# AN.train == TH14.val; AN.val == TH14.test
# if cfg.DATASET == 'AN':
# cfg[phase_key].STAGE = {
# 'val': 'train',
# 'test': 'val',
# 'train': 'train',
# 'val': 'val',
# }[cfg[phase_key].STAGE]
# Setup <infix> first, resulting in
# '' => ''; 'infix' => '.infix' so that we can uniformly insert it.
ret_infix = cfg.INFIX if not cfg.INFIX.startswith('.') else cfg.INFIX[1:]
ret_infix = '' if ret_infix == '' else '.{}'.format(ret_infix)
cfg.INFIX = ret_infix
# Setup <viz_folder> name
norm_str = 'normed' if cfg.FEAT.NORM else 'unnormed'
avt_str = {
True: '{avt}',
False: '{avt}{trh}'
}[cfg.FEAT.THRESH is None].format(avt=cfg.FEAT.ACTIVATION,
trh=cfg.FEAT.THRESH)
cfg.VIZ.FOLDER_NAME = '{}_{}_{}_{}'.format(cfg[phase_key].STAGE, cfg.FEAT.MODE,
norm_str, avt_str)
if not cfg.VIZ.FIX_WIDTH:
cfg.VIZ.FOLDER_NAME += '_fixwidth'
# Then several paths: <proto>, <log>, <local_snapshots>, <viz>
cfg.EXP_PATH = osp.join(cfg.EXP_DIR, cfg.DATASET, cfg.EXP)
cfg.PROTO_PATH = osp.join(cfg.EXP_PATH, 'proto')
cfg.LOG_PATH = osp.join(cfg.EXP_PATH, 'log')
cfg.LOCAL_SNAPSHOT_PATH = osp.join(cfg.EXP_PATH, 'snapshot')
# Example: exp/TH14/experiment100/val_mul_normed_relu10_fixwidth
cfg.VIZ_PATH = osp.join(cfg.EXP_PATH, cfg.VIZ.FOLDER_NAME)
cfg.RSLT_PATH = osp.join(cfg.EXP_PATH, 'rslt')
path2check = [cfg.PROTO_PATH, cfg.LOG_PATH, cfg.LOCAL_SNAPSHOT_PATH,
cfg.VIZ_PATH, cfg.RSLT_PATH]
map(may_create, path2check)
cfg.SL_PATH = osp.join(cfg.PROTO_PATH,
'solver{}.prototxt'.format(cfg.INFIX))
cfg.TR_PATH = osp.join(cfg.PROTO_PATH,
'train{}.prototxt'.format(cfg.INFIX))
# Currently we share the prototxt between training and testing.
cfg.TE_PATH = cfg.TR_PATH
cfg.SNAPSHOT_PATH = osp.join(cfg.LOCAL_SNAPSHOT_PATH, {
True: rsltname.replace('.pc', '.caffemodel'),
False: '{}_iter{}.caffemodel'.format(rsltname, cfg.MAX_ITER)
}[rsltname.endswith('.pc')])
# Setup `videoids_lst` template.
cfg.DSPEC.VID_LST = osp.join(cfg.DATA_DIR, cfg.DATASET, '{stage}_videoid.lst')
# Specify training input.
cfg[phase_key].DATA_PATH = osp.join(cfg.DATA_DIR, cfg.DATASET,
cfg[phase_key].DATA_FILE)
phase_ = phase_key.lower() + '.'
# Processing rsltname in following logic in order:
# (1) rsltname should start with '<phase>.';
# (2) rslname with '.pc' should be directly used;
# (3) otherwise it should be recorded with the iteration.
if not rsltname.startswith(phase_):
rsltname = phase_ + rsltname
# Finally the result pickle file.
cfg[phase_key].RSLT_PATH = osp.join(cfg.RSLT_PATH, {
True: rsltname,
False: '{}_iter{}.pc'.format(rsltname, cfg.MAX_ITER)
}[rsltname.endswith('.pc')])
# Generate prototxt from template
prototxt_from_template()
| [
"sys.path.insert",
"os.path.join",
"os.getcwd",
"os.chdir",
"os.path.dirname",
"config.cfg.INFIX.startswith",
"utils.proto.prototxt_from_template"
] | [((252, 273), 'os.path.dirname', 'osp.dirname', (['__file__'], {}), '(__file__)\n', (263, 273), True, 'import os.path as osp\n'), ((286, 315), 'os.path.join', 'osp.join', (['curdir', '""".."""', '"""lib"""'], {}), "(curdir, '..', 'lib')\n", (294, 315), True, 'import os.path as osp\n'), ((577, 588), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (586, 588), False, 'import os\n'), ((2116, 2159), 'os.path.join', 'osp.join', (['cfg.EXP_DIR', 'cfg.DATASET', 'cfg.EXP'], {}), '(cfg.EXP_DIR, cfg.DATASET, cfg.EXP)\n', (2124, 2159), True, 'import os.path as osp\n'), ((2182, 2213), 'os.path.join', 'osp.join', (['cfg.EXP_PATH', '"""proto"""'], {}), "(cfg.EXP_PATH, 'proto')\n", (2190, 2213), True, 'import os.path as osp\n'), ((2233, 2262), 'os.path.join', 'osp.join', (['cfg.EXP_PATH', '"""log"""'], {}), "(cfg.EXP_PATH, 'log')\n", (2241, 2262), True, 'import os.path as osp\n'), ((2293, 2327), 'os.path.join', 'osp.join', (['cfg.EXP_PATH', '"""snapshot"""'], {}), "(cfg.EXP_PATH, 'snapshot')\n", (2301, 2327), True, 'import os.path as osp\n'), ((2416, 2459), 'os.path.join', 'osp.join', (['cfg.EXP_PATH', 'cfg.VIZ.FOLDER_NAME'], {}), '(cfg.EXP_PATH, cfg.VIZ.FOLDER_NAME)\n', (2424, 2459), True, 'import os.path as osp\n'), ((2480, 2510), 'os.path.join', 'osp.join', (['cfg.EXP_PATH', '"""rslt"""'], {}), "(cfg.EXP_PATH, 'rslt')\n", (2488, 2510), True, 'import os.path as osp\n'), ((3259, 3317), 'os.path.join', 'osp.join', (['cfg.DATA_DIR', 'cfg.DATASET', '"""{stage}_videoid.lst"""'], {}), "(cfg.DATA_DIR, cfg.DATASET, '{stage}_videoid.lst')\n", (3267, 3317), True, 'import os.path as osp\n'), ((3379, 3440), 'os.path.join', 'osp.join', (['cfg.DATA_DIR', 'cfg.DATASET', 'cfg[phase_key].DATA_FILE'], {}), '(cfg.DATA_DIR, cfg.DATASET, cfg[phase_key].DATA_FILE)\n', (3387, 3440), True, 'import os.path as osp\n'), ((4079, 4103), 'utils.proto.prototxt_from_template', 'prototxt_from_template', ([], {}), '()\n', (4101, 4103), False, 'from utils.proto import prototxt_from_template\n'), ((216, 240), 'sys.path.insert', 'sys.path.insert', (['(0)', 'path'], {}), '(0, path)\n', (231, 240), False, 'import sys\n'), ((602, 624), 'os.path.join', 'osp.join', (['curdir', '""".."""'], {}), "(curdir, '..')\n", (610, 624), True, 'import os.path as osp\n'), ((670, 686), 'os.chdir', 'os.chdir', (['olddir'], {}), '(olddir)\n', (678, 686), False, 'import os\n'), ((1387, 1412), 'config.cfg.INFIX.startswith', 'cfg.INFIX.startswith', (['"""."""'], {}), "('.')\n", (1407, 1412), False, 'from config import cfg\n')] |
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
utility tools.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import distutils.util
import matplotlib.pyplot as plt
import numpy as np
from PIL import Image
import os
import time
OUTPUT = './output/'
img_mean = np.array([0.485, 0.456, 0.406]).reshape((3, 1, 1))
img_std = np.array([0.229, 0.224, 0.225]).reshape((3, 1, 1))
class bcolors:
RED = "\033[1;31m"
BLUE = "\033[1;34m"
CYAN = "\033[1;36m"
GREEN = "\033[1;32m"
RESET = "\033[0;0m"
BOLD = "\033[;1m"
REVERSE = "\033[;7m"
HEADER = '\033[95m'
OKBLUE = '\033[94m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
def print_arguments(args):
"""Print argparse's arguments.
Usage:
.. code-block:: python
parser = argparse.ArgumentParser()
parser.add_argument("name", default="Jonh", type=str, help="User name.")
args = parser.parse_args()
print_arguments(args)
:param args: Input argparse.Namespace for printing.
:type args: argparse.Namespace
"""
print("----------- Configuration Arguments -----------")
for arg, value in sorted(vars(args).items()):
print("%s: %s" % (arg, value))
print("------------------------------------------------")
def add_arguments(argname, type, default, help, argparser, **kwargs):
"""Add argparse's argument.
Usage:
.. code-block:: python
parser = argparse.ArgumentParser()
add_argument("name", str, "Jonh", "User name.", parser)
args = parser.parse_args()
"""
type = distutils.util.strtobool if type == bool else type
argparser.add_argument(
"--" + argname,
default=default,
type=type,
help=help + ' Default: %(default)s.',
**kwargs)
def check_output_directory(type):
"""
create output directory
Args:
type: name of picture set for test
"""
if not os.path.exists(OUTPUT):
os.mkdir(OUTPUT, 0o755)
if not os.path.exists(OUTPUT + "/" + type):
os.mkdir(OUTPUT + "/" + type, 0o755)
def convert_net(img_example):
"""
convert image array to original
Args:
img_example: array data of img
"""
#reshape img_example
output_img = np.reshape(img_example.astype('float32'), (3, 224, 224))
output_img *= img_std
output_img += img_mean
output_img *= 255
output_img = np.reshape(output_img.astype(np.uint8), (3, 224, 224))
#convert C,H,W to H,W,C
output_img = output_img.transpose((1, 2, 0))
return output_img
def save_image(output_img, path):
"""
save image from array that original or adversarial
Args:
img_example: array data of img
path: directory and filename
"""
im = Image.fromarray(output_img)
im.save(path, 'png')
def generation_image(id, org_img, org_label, adv_img, adv_label, attack_method='FGSM'):
"""
save image from array that original or adversarial
imagenet data set
Args:
org_img: array data of test img
adv_img: array data of adv img
org_label: the inference label of test image
adv_label: the adverarial label of adv image
attack_method: the adverarial example generation method
"""
DATA_TYPE = "imagenet"
check_output_directory(DATA_TYPE)
org_path= OUTPUT + DATA_TYPE + "/%d_original-%d-by-%s.png" \
% (id, org_label, attack_method)
adv_path= OUTPUT + DATA_TYPE + "/%d_adversary-%d-by-%s.png" \
% (id, adv_label, attack_method)
diff_path= OUTPUT + DATA_TYPE + "/%d_diff-x-by-%s.png" % (id, attack_method)
org_output = convert_net(org_img)
adv_output = convert_net(adv_img)
diff_output = abs(adv_output - org_output)
save_image(org_output, org_path)
save_image(adv_output, adv_path)
save_image(diff_output, diff_path)
print("--------------------------------------------------")
def show_images_diff(original_img, original_label, adversarial_img, adversarial_label):
"""
show original image, adversarial image and their difference
Args:
original_img: original image, numpy
original_label:original label, int
adversarial_img: adversarial image
adversarial_label: adversarial label
Returns:
"""
plt.figure()
plt.subplot(131)
plt.title('Original')
plt.imshow(original_img)
plt.axis('off')
plt.subplot(132)
plt.title('Adversarial')
plt.imshow(adversarial_img)
plt.axis('off')
plt.subplot(133)
plt.title('Adversarial-Original')
difference = adversarial_img - original_img
l0 = np.where(difference != 0)[0].shape[0]
l2 = np.linalg.norm(difference)
print("l0={} l2={}".format(l0, l2))
#(-1,1) -> (0,1)
difference = difference / abs(difference).max() / 2.0 + 0.5
plt.imshow(difference, cmap=plt.cm.gray)
plt.axis('off')
plt.tight_layout()
ts = time.localtime(time.time())
ts = time.strftime("%Y-%m-%d %H:%M:%S", ts)
if not os.path.exists('output'):
os.makedirs('output')
plt.savefig("output/orig_adv_diff_{}_{}.png".format(adversarial_label, ts))
plt.show()
def show_images_diff_denoising(image_a, image_a_label, image_b, image_b_label, image_a_title='Input', image_b_title='output'):
"""
show original image, adversarial image and their difference
Args:
image_a: original image, ndarray
image_a_label:original label, int
image_b: adversarial image, ndarray
image_b_label: adversarial label
image_a_title: the title of the image a
image_b_title: the title of the image b
Returns:
"""
plt.figure()
plt.subplot(131)
plt.title(image_a_title)
plt.imshow(image_a)
plt.axis('off')
plt.subplot(132)
plt.title(image_b_title)
plt.imshow(image_b)
plt.axis('off')
plt.subplot(133)
plt.title(image_a_title+'-'+image_b_title)
difference = image_a - image_b
l0 = np.where(difference != 0)[0].shape[0]
l2 = np.linalg.norm(difference)
print("l0={} l2={}".format(l0, l2))
#(-1,1) -> (0,1)
difference = difference / abs(difference).max() / 2.0 + 0.5
plt.imshow(difference, cmap=plt.cm.gray)
plt.axis('off')
plt.tight_layout()
ts = time.localtime(time.time())
ts = time.strftime("%Y-%m-%d %H:%M:%S", ts)
if not os.path.exists('examples/image_cls/output'):
os.makedirs('output')
plt.savefig("output/{}_{}_diff_{}_{}_{}.png".format(image_a_title, image_b_title, image_a_label, image_b_label, ts))
plt.show()
def show_input_adv_and_denoise(image_a, image_b, image_c, image_d, \
image_a_label, image_b_label, image_c_label, image_d_label, \
image_a_title='Input', image_b_title='Adversary', \
image_c_title='Adv-Denoise', image_d_title='In-Denoise',method='Default'
):
"""
show original image, adversarial image, and their denoising results, respectively
Args:
image_a: original image, ndarray
image_a_label: original label, str
image_a_title: the title of the image a
image_b: adversarial image, ndarray
image_b_label: adversarial label
image_b_title: the title of the image b
image_c: denoising result of the adversarial image, ndarray
image_c_label: the predicted class label after denoising of the adv-image
image_c_title: the title of the image c
image_d: denoising result of the original input image, ndarray
image_d_label: the predicted class label after denoising of the input image
image_d_title: the title of the image d
Returns:
"""
# get the first class name
a_label=''
for i in image_a_label:
if i!=',':
a_label+=i
else:
break
temp=a_label
if len(a_label)>10:
temp=''
for i in a_label:
if i==' ':
temp=''
else:
temp=temp+i
a_label=temp
b_label=''
for i in image_b_label:
if i!=',':
b_label+=i
else:
break
temp=b_label
if len(b_label)>10:
temp=''
for i in b_label:
if i==' ':
temp=''
else:
temp=temp+i
b_label=temp
c_label=''
for i in image_c_label:
if i!=',':
c_label+=i
else:
break
temp=c_label
if len(c_label)>10:
temp=''
for i in c_label:
if i==' ':
temp=''
else:
temp=temp+i
c_label=temp
d_label=''
for i in image_d_label:
if i!=',':
d_label+=i
else:
break
temp=d_label
if len(d_label)>10:
temp=''
for i in d_label:
if i==' ':
temp=''
else:
temp=temp+i
d_label=temp
# define the plot position
w = image_c.shape[0] if image_c.shape[0] > image_d.shape[0] else image_d.shape[0]
h = image_c.shape[1] if image_c.shape[1] > image_d.shape[1] else image_d.shape[1]
x = 0 # initial horizontal position of the first line
y = h + 10 # initial vertical position of the first line
xos = 15 # offset to x of the second line
yos = 10 # offset to y of the second line
fig = plt.figure()
title = 'Denoise method: ' + method
fig.suptitle(title, fontsize=12, fontweight='bold', y=0.80)
plt.subplot(141)
plt.title(image_a_title)
plt.imshow(image_a)
plt.text(x, y, 'Top1 label:')
plt.text(x+xos, y+yos, a_label)
plt.axis('off')
plt.subplot(142)
plt.title(image_b_title)
plt.imshow(image_b)
plt.text(x, y, 'Top1 label:')
plt.text(x+xos, y+yos, b_label)
plt.axis('off')
plt.subplot(143)
plt.title(image_c_title)
plt.imshow(image_c)
plt.text(x, y, 'Top1 label:')
plt.text(x+xos, y+yos, c_label)
plt.axis('off')
plt.subplot(144)
plt.title(image_d_title)
plt.imshow(image_d)
plt.text(x, y, 'Top1 label:')
plt.text(x+xos, y+yos, d_label)
plt.axis('off')
plt.tight_layout()
if not os.path.exists('examples/image_cls/output'):
os.makedirs('output')
plt.savefig("output/{}_Denoising_Comparison.png".format(method))
plt.show()
def get_best_weigthts_from_folder(folder, pdparams_file_starter):
pdparams_files = [filename for filename in os.listdir(folder) if filename.lower().endswith('.pdparams')
and filename.lower().startswith(pdparams_file_starter.lower())]
if not pdparams_files:
return None
else:
acc_list = [filename.split('.')[1] for filename in pdparams_files]
max_index = acc_list.index(max(acc_list))
best_weight_path = os.path.join(folder, pdparams_files[max_index])
print('Loaded: ', best_weight_path)
return best_weight_path
| [
"numpy.array",
"numpy.linalg.norm",
"matplotlib.pyplot.imshow",
"os.path.exists",
"os.listdir",
"numpy.where",
"os.mkdir",
"matplotlib.pyplot.axis",
"matplotlib.pyplot.title",
"time.time",
"matplotlib.pyplot.show",
"matplotlib.pyplot.text",
"PIL.Image.fromarray",
"os.makedirs",
"time.str... | [((3471, 3498), 'PIL.Image.fromarray', 'Image.fromarray', (['output_img'], {}), '(output_img)\n', (3486, 3498), False, 'from PIL import Image\n'), ((5021, 5033), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (5031, 5033), True, 'import matplotlib.pyplot as plt\n'), ((5039, 5055), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(131)'], {}), '(131)\n', (5050, 5055), True, 'import matplotlib.pyplot as plt\n'), ((5060, 5081), 'matplotlib.pyplot.title', 'plt.title', (['"""Original"""'], {}), "('Original')\n", (5069, 5081), True, 'import matplotlib.pyplot as plt\n'), ((5086, 5110), 'matplotlib.pyplot.imshow', 'plt.imshow', (['original_img'], {}), '(original_img)\n', (5096, 5110), True, 'import matplotlib.pyplot as plt\n'), ((5115, 5130), 'matplotlib.pyplot.axis', 'plt.axis', (['"""off"""'], {}), "('off')\n", (5123, 5130), True, 'import matplotlib.pyplot as plt\n'), ((5136, 5152), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(132)'], {}), '(132)\n', (5147, 5152), True, 'import matplotlib.pyplot as plt\n'), ((5157, 5181), 'matplotlib.pyplot.title', 'plt.title', (['"""Adversarial"""'], {}), "('Adversarial')\n", (5166, 5181), True, 'import matplotlib.pyplot as plt\n'), ((5186, 5213), 'matplotlib.pyplot.imshow', 'plt.imshow', (['adversarial_img'], {}), '(adversarial_img)\n', (5196, 5213), True, 'import matplotlib.pyplot as plt\n'), ((5218, 5233), 'matplotlib.pyplot.axis', 'plt.axis', (['"""off"""'], {}), "('off')\n", (5226, 5233), True, 'import matplotlib.pyplot as plt\n'), ((5239, 5255), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(133)'], {}), '(133)\n', (5250, 5255), True, 'import matplotlib.pyplot as plt\n'), ((5260, 5293), 'matplotlib.pyplot.title', 'plt.title', (['"""Adversarial-Original"""'], {}), "('Adversarial-Original')\n", (5269, 5293), True, 'import matplotlib.pyplot as plt\n'), ((5399, 5425), 'numpy.linalg.norm', 'np.linalg.norm', (['difference'], {}), '(difference)\n', (5413, 5425), True, 'import numpy as np\n'), ((5557, 5597), 'matplotlib.pyplot.imshow', 'plt.imshow', (['difference'], {'cmap': 'plt.cm.gray'}), '(difference, cmap=plt.cm.gray)\n', (5567, 5597), True, 'import matplotlib.pyplot as plt\n'), ((5602, 5617), 'matplotlib.pyplot.axis', 'plt.axis', (['"""off"""'], {}), "('off')\n", (5610, 5617), True, 'import matplotlib.pyplot as plt\n'), ((5622, 5640), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (5638, 5640), True, 'import matplotlib.pyplot as plt\n'), ((5687, 5725), 'time.strftime', 'time.strftime', (['"""%Y-%m-%d %H:%M:%S"""', 'ts'], {}), "('%Y-%m-%d %H:%M:%S', ts)\n", (5700, 5725), False, 'import time\n'), ((5878, 5888), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (5886, 5888), True, 'import matplotlib.pyplot as plt\n'), ((6391, 6403), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (6401, 6403), True, 'import matplotlib.pyplot as plt\n'), ((6409, 6425), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(131)'], {}), '(131)\n', (6420, 6425), True, 'import matplotlib.pyplot as plt\n'), ((6430, 6454), 'matplotlib.pyplot.title', 'plt.title', (['image_a_title'], {}), '(image_a_title)\n', (6439, 6454), True, 'import matplotlib.pyplot as plt\n'), ((6459, 6478), 'matplotlib.pyplot.imshow', 'plt.imshow', (['image_a'], {}), '(image_a)\n', (6469, 6478), True, 'import matplotlib.pyplot as plt\n'), ((6483, 6498), 'matplotlib.pyplot.axis', 'plt.axis', (['"""off"""'], {}), "('off')\n", (6491, 6498), True, 'import matplotlib.pyplot as plt\n'), ((6504, 6520), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(132)'], {}), '(132)\n', (6515, 6520), True, 'import matplotlib.pyplot as plt\n'), ((6525, 6549), 'matplotlib.pyplot.title', 'plt.title', (['image_b_title'], {}), '(image_b_title)\n', (6534, 6549), True, 'import matplotlib.pyplot as plt\n'), ((6554, 6573), 'matplotlib.pyplot.imshow', 'plt.imshow', (['image_b'], {}), '(image_b)\n', (6564, 6573), True, 'import matplotlib.pyplot as plt\n'), ((6578, 6593), 'matplotlib.pyplot.axis', 'plt.axis', (['"""off"""'], {}), "('off')\n", (6586, 6593), True, 'import matplotlib.pyplot as plt\n'), ((6599, 6615), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(133)'], {}), '(133)\n', (6610, 6615), True, 'import matplotlib.pyplot as plt\n'), ((6620, 6666), 'matplotlib.pyplot.title', 'plt.title', (["(image_a_title + '-' + image_b_title)"], {}), "(image_a_title + '-' + image_b_title)\n", (6629, 6666), True, 'import matplotlib.pyplot as plt\n'), ((6755, 6781), 'numpy.linalg.norm', 'np.linalg.norm', (['difference'], {}), '(difference)\n', (6769, 6781), True, 'import numpy as np\n'), ((6913, 6953), 'matplotlib.pyplot.imshow', 'plt.imshow', (['difference'], {'cmap': 'plt.cm.gray'}), '(difference, cmap=plt.cm.gray)\n', (6923, 6953), True, 'import matplotlib.pyplot as plt\n'), ((6958, 6973), 'matplotlib.pyplot.axis', 'plt.axis', (['"""off"""'], {}), "('off')\n", (6966, 6973), True, 'import matplotlib.pyplot as plt\n'), ((6978, 6996), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (6994, 6996), True, 'import matplotlib.pyplot as plt\n'), ((7043, 7081), 'time.strftime', 'time.strftime', (['"""%Y-%m-%d %H:%M:%S"""', 'ts'], {}), "('%Y-%m-%d %H:%M:%S', ts)\n", (7056, 7081), False, 'import time\n'), ((7294, 7304), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (7302, 7304), True, 'import matplotlib.pyplot as plt\n'), ((10164, 10176), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (10174, 10176), True, 'import matplotlib.pyplot as plt\n'), ((10287, 10303), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(141)'], {}), '(141)\n', (10298, 10303), True, 'import matplotlib.pyplot as plt\n'), ((10308, 10332), 'matplotlib.pyplot.title', 'plt.title', (['image_a_title'], {}), '(image_a_title)\n', (10317, 10332), True, 'import matplotlib.pyplot as plt\n'), ((10337, 10356), 'matplotlib.pyplot.imshow', 'plt.imshow', (['image_a'], {}), '(image_a)\n', (10347, 10356), True, 'import matplotlib.pyplot as plt\n'), ((10361, 10390), 'matplotlib.pyplot.text', 'plt.text', (['x', 'y', '"""Top1 label:"""'], {}), "(x, y, 'Top1 label:')\n", (10369, 10390), True, 'import matplotlib.pyplot as plt\n'), ((10395, 10430), 'matplotlib.pyplot.text', 'plt.text', (['(x + xos)', '(y + yos)', 'a_label'], {}), '(x + xos, y + yos, a_label)\n', (10403, 10430), True, 'import matplotlib.pyplot as plt\n'), ((10431, 10446), 'matplotlib.pyplot.axis', 'plt.axis', (['"""off"""'], {}), "('off')\n", (10439, 10446), True, 'import matplotlib.pyplot as plt\n'), ((10452, 10468), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(142)'], {}), '(142)\n', (10463, 10468), True, 'import matplotlib.pyplot as plt\n'), ((10473, 10497), 'matplotlib.pyplot.title', 'plt.title', (['image_b_title'], {}), '(image_b_title)\n', (10482, 10497), True, 'import matplotlib.pyplot as plt\n'), ((10502, 10521), 'matplotlib.pyplot.imshow', 'plt.imshow', (['image_b'], {}), '(image_b)\n', (10512, 10521), True, 'import matplotlib.pyplot as plt\n'), ((10526, 10555), 'matplotlib.pyplot.text', 'plt.text', (['x', 'y', '"""Top1 label:"""'], {}), "(x, y, 'Top1 label:')\n", (10534, 10555), True, 'import matplotlib.pyplot as plt\n'), ((10560, 10595), 'matplotlib.pyplot.text', 'plt.text', (['(x + xos)', '(y + yos)', 'b_label'], {}), '(x + xos, y + yos, b_label)\n', (10568, 10595), True, 'import matplotlib.pyplot as plt\n'), ((10596, 10611), 'matplotlib.pyplot.axis', 'plt.axis', (['"""off"""'], {}), "('off')\n", (10604, 10611), True, 'import matplotlib.pyplot as plt\n'), ((10617, 10633), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(143)'], {}), '(143)\n', (10628, 10633), True, 'import matplotlib.pyplot as plt\n'), ((10638, 10662), 'matplotlib.pyplot.title', 'plt.title', (['image_c_title'], {}), '(image_c_title)\n', (10647, 10662), True, 'import matplotlib.pyplot as plt\n'), ((10667, 10686), 'matplotlib.pyplot.imshow', 'plt.imshow', (['image_c'], {}), '(image_c)\n', (10677, 10686), True, 'import matplotlib.pyplot as plt\n'), ((10691, 10720), 'matplotlib.pyplot.text', 'plt.text', (['x', 'y', '"""Top1 label:"""'], {}), "(x, y, 'Top1 label:')\n", (10699, 10720), True, 'import matplotlib.pyplot as plt\n'), ((10725, 10760), 'matplotlib.pyplot.text', 'plt.text', (['(x + xos)', '(y + yos)', 'c_label'], {}), '(x + xos, y + yos, c_label)\n', (10733, 10760), True, 'import matplotlib.pyplot as plt\n'), ((10761, 10776), 'matplotlib.pyplot.axis', 'plt.axis', (['"""off"""'], {}), "('off')\n", (10769, 10776), True, 'import matplotlib.pyplot as plt\n'), ((10782, 10798), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(144)'], {}), '(144)\n', (10793, 10798), True, 'import matplotlib.pyplot as plt\n'), ((10803, 10827), 'matplotlib.pyplot.title', 'plt.title', (['image_d_title'], {}), '(image_d_title)\n', (10812, 10827), True, 'import matplotlib.pyplot as plt\n'), ((10832, 10851), 'matplotlib.pyplot.imshow', 'plt.imshow', (['image_d'], {}), '(image_d)\n', (10842, 10851), True, 'import matplotlib.pyplot as plt\n'), ((10856, 10885), 'matplotlib.pyplot.text', 'plt.text', (['x', 'y', '"""Top1 label:"""'], {}), "(x, y, 'Top1 label:')\n", (10864, 10885), True, 'import matplotlib.pyplot as plt\n'), ((10890, 10925), 'matplotlib.pyplot.text', 'plt.text', (['(x + xos)', '(y + yos)', 'd_label'], {}), '(x + xos, y + yos, d_label)\n', (10898, 10925), True, 'import matplotlib.pyplot as plt\n'), ((10926, 10941), 'matplotlib.pyplot.axis', 'plt.axis', (['"""off"""'], {}), "('off')\n", (10934, 10941), True, 'import matplotlib.pyplot as plt\n'), ((10947, 10965), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (10963, 10965), True, 'import matplotlib.pyplot as plt\n'), ((11126, 11136), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (11134, 11136), True, 'import matplotlib.pyplot as plt\n'), ((892, 923), 'numpy.array', 'np.array', (['[0.485, 0.456, 0.406]'], {}), '([0.485, 0.456, 0.406])\n', (900, 923), True, 'import numpy as np\n'), ((953, 984), 'numpy.array', 'np.array', (['[0.229, 0.224, 0.225]'], {}), '([0.229, 0.224, 0.225])\n', (961, 984), True, 'import numpy as np\n'), ((2636, 2658), 'os.path.exists', 'os.path.exists', (['OUTPUT'], {}), '(OUTPUT)\n', (2650, 2658), False, 'import os\n'), ((2668, 2689), 'os.mkdir', 'os.mkdir', (['OUTPUT', '(493)'], {}), '(OUTPUT, 493)\n', (2676, 2689), False, 'import os\n'), ((2703, 2738), 'os.path.exists', 'os.path.exists', (["(OUTPUT + '/' + type)"], {}), "(OUTPUT + '/' + type)\n", (2717, 2738), False, 'import os\n'), ((2748, 2782), 'os.mkdir', 'os.mkdir', (["(OUTPUT + '/' + type)", '(493)'], {}), "(OUTPUT + '/' + type, 493)\n", (2756, 2782), False, 'import os\n'), ((5665, 5676), 'time.time', 'time.time', ([], {}), '()\n', (5674, 5676), False, 'import time\n'), ((5738, 5762), 'os.path.exists', 'os.path.exists', (['"""output"""'], {}), "('output')\n", (5752, 5762), False, 'import os\n'), ((5772, 5793), 'os.makedirs', 'os.makedirs', (['"""output"""'], {}), "('output')\n", (5783, 5793), False, 'import os\n'), ((7021, 7032), 'time.time', 'time.time', ([], {}), '()\n', (7030, 7032), False, 'import time\n'), ((7094, 7137), 'os.path.exists', 'os.path.exists', (['"""examples/image_cls/output"""'], {}), "('examples/image_cls/output')\n", (7108, 7137), False, 'import os\n'), ((7147, 7168), 'os.makedirs', 'os.makedirs', (['"""output"""'], {}), "('output')\n", (7158, 7168), False, 'import os\n'), ((10978, 11021), 'os.path.exists', 'os.path.exists', (['"""examples/image_cls/output"""'], {}), "('examples/image_cls/output')\n", (10992, 11021), False, 'import os\n'), ((11031, 11052), 'os.makedirs', 'os.makedirs', (['"""output"""'], {}), "('output')\n", (11042, 11052), False, 'import os\n'), ((11608, 11655), 'os.path.join', 'os.path.join', (['folder', 'pdparams_files[max_index]'], {}), '(folder, pdparams_files[max_index])\n', (11620, 11655), False, 'import os\n'), ((11252, 11270), 'os.listdir', 'os.listdir', (['folder'], {}), '(folder)\n', (11262, 11270), False, 'import os\n'), ((5352, 5377), 'numpy.where', 'np.where', (['(difference != 0)'], {}), '(difference != 0)\n', (5360, 5377), True, 'import numpy as np\n'), ((6708, 6733), 'numpy.where', 'np.where', (['(difference != 0)'], {}), '(difference != 0)\n', (6716, 6733), True, 'import numpy as np\n')] |
from flask import Blueprint, request
import json
import databaseutils as utils
tiposrefeicao = Blueprint("tiposrefeicao", __name__)
tiposrefeicaoColumns = ["cod_tiporefeicao", "designacao"]
@tiposrefeicao.route("/api/tiposrefeicao", methods=["GET"])
@tiposrefeicao.route("/api/tiposrefeicao/", methods=["GET"])
def get_tiposrefeicao():
return utils.getAll(tiposrefeicaoColumns,
f"SELECT * FROM selecttiposrefeicao();")
@tiposrefeicao.route("/api/tiposrefeicao/<cod_TipoRefeicao>", methods=["GET"])
@tiposrefeicao.route("/api/tiposrefeicao/<cod_TipoRefeicao>/", methods=["GET"])
def get_tiporefeicao(cod_TipoRefeicao):
return utils.getOne(
tiposrefeicaoColumns,
f"SELECT * FROM selecttiporefeicao('{cod_TipoRefeicao}');")
@tiposrefeicao.route("/api/tiposrefeicao", methods=["POST"])
@tiposrefeicao.route("/api/tiposrefeicao/", methods=["POST"])
def post_tiporefeicao():
return utils.postOne(
tiposrefeicaoColumns,
f"SELECT * FROM inserttiposrefeicao('{json.dumps(request.json)}');")
@tiposrefeicao.route("/api/tiposrefeicao/<cod_TipoRefeicao>", methods=["PUT"])
@tiposrefeicao.route("/api/tiposrefeicao/<cod_TipoRefeicao>/", methods=["PUT"])
def put_tiporefeicao(cod_TipoRefeicao):
return utils.putOne(
f"CALL updatetiposrefeicao('{cod_TipoRefeicao}', '{json.dumps(request.json)}');"
)
@tiposrefeicao.route("/api/tiposrefeicao/<cod_TipoRefeicao>",
methods=["DELETE"])
@tiposrefeicao.route("/api/tiposrefeicao/<cod_TipoRefeicao>/",
methods=["DELETE"])
def delete_tiporefeicao(cod_TipoRefeicao):
return utils.deleteOne(f"CALL deletetiposrefeicao('{cod_TipoRefeicao}');") | [
"databaseutils.deleteOne",
"databaseutils.getOne",
"json.dumps",
"flask.Blueprint",
"databaseutils.getAll"
] | [((96, 132), 'flask.Blueprint', 'Blueprint', (['"""tiposrefeicao"""', '__name__'], {}), "('tiposrefeicao', __name__)\n", (105, 132), False, 'from flask import Blueprint, request\n'), ((351, 426), 'databaseutils.getAll', 'utils.getAll', (['tiposrefeicaoColumns', 'f"""SELECT * FROM selecttiposrefeicao();"""'], {}), "(tiposrefeicaoColumns, f'SELECT * FROM selecttiposrefeicao();')\n", (363, 426), True, 'import databaseutils as utils\n'), ((663, 761), 'databaseutils.getOne', 'utils.getOne', (['tiposrefeicaoColumns', 'f"""SELECT * FROM selecttiporefeicao(\'{cod_TipoRefeicao}\');"""'], {}), '(tiposrefeicaoColumns,\n f"SELECT * FROM selecttiporefeicao(\'{cod_TipoRefeicao}\');")\n', (675, 761), True, 'import databaseutils as utils\n'), ((1642, 1709), 'databaseutils.deleteOne', 'utils.deleteOne', (['f"""CALL deletetiposrefeicao(\'{cod_TipoRefeicao}\');"""'], {}), '(f"CALL deletetiposrefeicao(\'{cod_TipoRefeicao}\');")\n', (1657, 1709), True, 'import databaseutils as utils\n'), ((1027, 1051), 'json.dumps', 'json.dumps', (['request.json'], {}), '(request.json)\n', (1037, 1051), False, 'import json\n'), ((1343, 1367), 'json.dumps', 'json.dumps', (['request.json'], {}), '(request.json)\n', (1353, 1367), False, 'import json\n')] |
#!/usr/bin/env /usr/local/opt/python@3.9/bin/python3.9
import sys
import psycopg2
from faker import Faker
from contextlib import closing
from random import randint
import time
if __name__ == "__main__":
total = 10
batch_size = 1000
if len(sys.argv) > 1:
total = int(sys.argv[1])
if len(sys.argv) > 2:
batch_size = int(sys.argv[2])
fake = Faker(["en_US"], use_weighting=False)
with closing(psycopg2.connect(dbname='hla', user='postgres',
password='<PASSWORD>', host='localhost', port = 5432)) as conn:
with closing(conn.cursor()) as cursor:
start_time = time.monotonic()
for i in range(total):
author = fake.first_name() + " " + fake.last_name()
title = fake.text(max_nb_chars=20).replace(".", "")
year = fake.date_between(start_date='-75y', end_date='today').year
category_id = randint(1,3)
record = (category_id, author, title, year)
cursor.execute("INSERT INTO books (id, category_id, author, title, year) VALUES (nextval('books_seq'), %s, %s, %s, %s)", record)
if i>0 and i%batch_size == 0:
print("commit at",i)
conn.commit()
conn.commit()
elapsed = round(time.monotonic() - start_time, 2)
print()
print("Inserted ", total, " records in", elapsed, "sec") | [
"faker.Faker",
"time.monotonic",
"random.randint",
"psycopg2.connect"
] | [((384, 421), 'faker.Faker', 'Faker', (["['en_US']"], {'use_weighting': '(False)'}), "(['en_US'], use_weighting=False)\n", (389, 421), False, 'from faker import Faker\n'), ((440, 544), 'psycopg2.connect', 'psycopg2.connect', ([], {'dbname': '"""hla"""', 'user': '"""postgres"""', 'password': '"""<PASSWORD>"""', 'host': '"""localhost"""', 'port': '(5432)'}), "(dbname='hla', user='postgres', password='<PASSWORD>', host\n ='localhost', port=5432)\n", (456, 544), False, 'import psycopg2\n'), ((659, 675), 'time.monotonic', 'time.monotonic', ([], {}), '()\n', (673, 675), False, 'import time\n'), ((961, 974), 'random.randint', 'randint', (['(1)', '(3)'], {}), '(1, 3)\n', (968, 974), False, 'from random import randint\n'), ((1359, 1375), 'time.monotonic', 'time.monotonic', ([], {}), '()\n', (1373, 1375), False, 'import time\n')] |
import os
import sys
import matplotlib.pyplot as plt
import numpy as np
import cmasher as cmr
import astropy.units as u
import astropy.coordinates as coord
from astropy.io import ascii
from astropy.io import fits
from astropy.wcs import WCS
from functions import *
plt.rcParams['mathtext.fontset'] = 'cm'
plt.rcParams['font.family'] = 'cmu serif'
SMALL_SIZE = 8
MEDIUM_SIZE = 8
BIGGER_SIZE = 12
plt.rc('font', size=SMALL_SIZE) # controls default text sizes
plt.rc('axes', titlesize=MEDIUM_SIZE) # fontsize of the axes title
plt.rc('axes', labelsize=MEDIUM_SIZE) # fontsize of the x and y labels
plt.rc('xtick', labelsize=SMALL_SIZE) # fontsize of the tick labels
plt.rc('ytick', labelsize=SMALL_SIZE) # fontsize of the tick labels
plt.rc('legend', fontsize=SMALL_SIZE) # legend fontsize
fig_directory='/Users/emma/OneDrive/PhD/thesis/Figures/'
cmap_blue = cmr.get_sub_cmap('twilight_shifted', 0, 0.5)
cmap_red = cmr.get_sub_cmap('twilight', 0.5, 1)
cmap_redblue=cmr.get_sub_cmap('twilight_shifted', 0.1, 0.9)
cmap=plt.cm.twilight_shifted
def main():
faradaysky,header=fitsopen('/Volumes/TARDIS/Work/askap/Faraday_cutout_pilot.fits')
faradayuncertainty,header2=fitsopen('/Volumes/TARDIS/Work/askap/Faraday_error_pilot.fits')
print(faradaysky.shape)
wcs=WCS(header)
sources=np.loadtxt('source_coords.txt',dtype='str')
plt.figure()
ax=plt.subplot(projection=wcs)
c=ax.imshow(faradaysky, origin='lower', cmap=cmap_redblue,vmin=-50,vmax=50)
cbar=plt.colorbar(c,fraction=0.046, pad=0.04)
for i in range(0,sources.shape[0]):
ra_ha=coord.Angle(sources[i,0],unit=u.hourangle)
ra = coord.Angle(ra_ha,unit=u.degree)
dec = coord.Angle(sources[i,1],unit=u.degree)
coords=coord.SkyCoord(ra=ra,dec=dec)
pixcoords=wcs.world_to_pixel(coords)
x=int(round(float(pixcoords[0])))
y=int(round(float(pixcoords[1])))
plt.scatter(pixcoords[0],pixcoords[1],marker='.',color='k')
RM=faradaysky[y,x]
RMerr=faradayuncertainty[y,x]
print(sources[i,0],sources[i,1])
print('{} +/- {}'.format(RM,RMerr))
plt.show()
if __name__ == "__main__":
main() | [
"astropy.coordinates.Angle",
"matplotlib.pyplot.colorbar",
"astropy.coordinates.SkyCoord",
"matplotlib.pyplot.figure",
"matplotlib.pyplot.scatter",
"numpy.loadtxt",
"cmasher.get_sub_cmap",
"matplotlib.pyplot.subplot",
"astropy.wcs.WCS",
"matplotlib.pyplot.rc",
"matplotlib.pyplot.show"
] | [((399, 430), 'matplotlib.pyplot.rc', 'plt.rc', (['"""font"""'], {'size': 'SMALL_SIZE'}), "('font', size=SMALL_SIZE)\n", (405, 430), True, 'import matplotlib.pyplot as plt\n'), ((470, 507), 'matplotlib.pyplot.rc', 'plt.rc', (['"""axes"""'], {'titlesize': 'MEDIUM_SIZE'}), "('axes', titlesize=MEDIUM_SIZE)\n", (476, 507), True, 'import matplotlib.pyplot as plt\n'), ((541, 578), 'matplotlib.pyplot.rc', 'plt.rc', (['"""axes"""'], {'labelsize': 'MEDIUM_SIZE'}), "('axes', labelsize=MEDIUM_SIZE)\n", (547, 578), True, 'import matplotlib.pyplot as plt\n'), ((615, 652), 'matplotlib.pyplot.rc', 'plt.rc', (['"""xtick"""'], {'labelsize': 'SMALL_SIZE'}), "('xtick', labelsize=SMALL_SIZE)\n", (621, 652), True, 'import matplotlib.pyplot as plt\n'), ((686, 723), 'matplotlib.pyplot.rc', 'plt.rc', (['"""ytick"""'], {'labelsize': 'SMALL_SIZE'}), "('ytick', labelsize=SMALL_SIZE)\n", (692, 723), True, 'import matplotlib.pyplot as plt\n'), ((757, 794), 'matplotlib.pyplot.rc', 'plt.rc', (['"""legend"""'], {'fontsize': 'SMALL_SIZE'}), "('legend', fontsize=SMALL_SIZE)\n", (763, 794), True, 'import matplotlib.pyplot as plt\n'), ((886, 930), 'cmasher.get_sub_cmap', 'cmr.get_sub_cmap', (['"""twilight_shifted"""', '(0)', '(0.5)'], {}), "('twilight_shifted', 0, 0.5)\n", (902, 930), True, 'import cmasher as cmr\n'), ((942, 978), 'cmasher.get_sub_cmap', 'cmr.get_sub_cmap', (['"""twilight"""', '(0.5)', '(1)'], {}), "('twilight', 0.5, 1)\n", (958, 978), True, 'import cmasher as cmr\n'), ((992, 1038), 'cmasher.get_sub_cmap', 'cmr.get_sub_cmap', (['"""twilight_shifted"""', '(0.1)', '(0.9)'], {}), "('twilight_shifted', 0.1, 0.9)\n", (1008, 1038), True, 'import cmasher as cmr\n'), ((1290, 1301), 'astropy.wcs.WCS', 'WCS', (['header'], {}), '(header)\n', (1293, 1301), False, 'from astropy.wcs import WCS\n'), ((1312, 1356), 'numpy.loadtxt', 'np.loadtxt', (['"""source_coords.txt"""'], {'dtype': '"""str"""'}), "('source_coords.txt', dtype='str')\n", (1322, 1356), True, 'import numpy as np\n'), ((1358, 1370), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (1368, 1370), True, 'import matplotlib.pyplot as plt\n'), ((1375, 1402), 'matplotlib.pyplot.subplot', 'plt.subplot', ([], {'projection': 'wcs'}), '(projection=wcs)\n', (1386, 1402), True, 'import matplotlib.pyplot as plt\n'), ((1486, 1527), 'matplotlib.pyplot.colorbar', 'plt.colorbar', (['c'], {'fraction': '(0.046)', 'pad': '(0.04)'}), '(c, fraction=0.046, pad=0.04)\n', (1498, 1527), True, 'import matplotlib.pyplot as plt\n'), ((2047, 2057), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2055, 2057), True, 'import matplotlib.pyplot as plt\n'), ((1573, 1617), 'astropy.coordinates.Angle', 'coord.Angle', (['sources[i, 0]'], {'unit': 'u.hourangle'}), '(sources[i, 0], unit=u.hourangle)\n', (1584, 1617), True, 'import astropy.coordinates as coord\n'), ((1623, 1656), 'astropy.coordinates.Angle', 'coord.Angle', (['ra_ha'], {'unit': 'u.degree'}), '(ra_ha, unit=u.degree)\n', (1634, 1656), True, 'import astropy.coordinates as coord\n'), ((1664, 1705), 'astropy.coordinates.Angle', 'coord.Angle', (['sources[i, 1]'], {'unit': 'u.degree'}), '(sources[i, 1], unit=u.degree)\n', (1675, 1705), True, 'import astropy.coordinates as coord\n'), ((1713, 1743), 'astropy.coordinates.SkyCoord', 'coord.SkyCoord', ([], {'ra': 'ra', 'dec': 'dec'}), '(ra=ra, dec=dec)\n', (1727, 1743), True, 'import astropy.coordinates as coord\n'), ((1857, 1919), 'matplotlib.pyplot.scatter', 'plt.scatter', (['pixcoords[0]', 'pixcoords[1]'], {'marker': '"""."""', 'color': '"""k"""'}), "(pixcoords[0], pixcoords[1], marker='.', color='k')\n", (1868, 1919), True, 'import matplotlib.pyplot as plt\n')] |
from flask import Blueprint, render_template
from flask_login import login_required, current_user
articles = Blueprint(
"articles",
__name__,
)
@articles.route("/intro-fgr")
@login_required
def intro():
return render_template("article-intro-fgr.html", user=current_user)
@articles.route("/causes-fgr")
@login_required
def causes():
return render_template("article-causes-fgr.html", user=current_user)
@articles.route("/twinsrisk-fgr")
@login_required
def twinsrisk():
return render_template("article-twinsrisk-fgr.html", user=current_user)
@articles.route("/symptoms-fgr")
@login_required
def symptoms():
return render_template("article-symptoms-fgr.html", user=current_user)
@articles.route("/diagnosis-fgr")
@login_required
def diagnosis():
return render_template("article-diagnosis-fgr.html", user=current_user)
@articles.route("/preventions-fgr")
@login_required
def preventions():
return render_template("article-preventions-fgr.html", user=current_user)
| [
"flask.render_template",
"flask.Blueprint"
] | [((110, 141), 'flask.Blueprint', 'Blueprint', (['"""articles"""', '__name__'], {}), "('articles', __name__)\n", (119, 141), False, 'from flask import Blueprint, render_template\n'), ((225, 285), 'flask.render_template', 'render_template', (['"""article-intro-fgr.html"""'], {'user': 'current_user'}), "('article-intro-fgr.html', user=current_user)\n", (240, 285), False, 'from flask import Blueprint, render_template\n'), ((360, 421), 'flask.render_template', 'render_template', (['"""article-causes-fgr.html"""'], {'user': 'current_user'}), "('article-causes-fgr.html', user=current_user)\n", (375, 421), False, 'from flask import Blueprint, render_template\n'), ((502, 566), 'flask.render_template', 'render_template', (['"""article-twinsrisk-fgr.html"""'], {'user': 'current_user'}), "('article-twinsrisk-fgr.html', user=current_user)\n", (517, 566), False, 'from flask import Blueprint, render_template\n'), ((645, 708), 'flask.render_template', 'render_template', (['"""article-symptoms-fgr.html"""'], {'user': 'current_user'}), "('article-symptoms-fgr.html', user=current_user)\n", (660, 708), False, 'from flask import Blueprint, render_template\n'), ((789, 853), 'flask.render_template', 'render_template', (['"""article-diagnosis-fgr.html"""'], {'user': 'current_user'}), "('article-diagnosis-fgr.html', user=current_user)\n", (804, 853), False, 'from flask import Blueprint, render_template\n'), ((938, 1004), 'flask.render_template', 'render_template', (['"""article-preventions-fgr.html"""'], {'user': 'current_user'}), "('article-preventions-fgr.html', user=current_user)\n", (953, 1004), False, 'from flask import Blueprint, render_template\n')] |
from flask import Flask, render_template, request, redirect, url_for
import tensorflow as tf
from keras.models import load_model
from keras.backend import set_session
from src.utils import image_preprocessing
from src.utils import overall_class_label
from src.utils import infinite_scraper
# sessions and default graphs are needed to make tensorflow work properly
global sess
global graph
sess = tf.Session()
graph = tf.get_default_graph()
set_session(sess)
num_attributes = 4
model = [[] for i in range(num_attributes)]
model[0] = load_model('./model/augmented/glamarous_model.h5')
model[1] = load_model('./model/augmented/rugged_model.h5')
model[2] = load_model('./model/augmented/fun_model.h5')
model[3] = load_model('./model/augmented/healthy_model.h5')
app = Flask(__name__)
# this function collects images from the official and the unofficial (hashtag) page
def data_collection(official, unofficial):
# specify number of images to retrieve
# Note: you do not retrieve exactly 36 images but 36 + the last batch
LIMIT_IMAGE_COUNT = 36
# specify your 'personal' instagram page, needed to get access to the API
# Note: Instagram will block access to their API when you retrieve too many images within a short amount of time
user_name = '<EMAIL>'
password = '<PASSWORD>'
# retrieve lists of URLs for both the official and unofficial account
official_images = infinite_scraper.official(user_name, password, LIMIT_IMAGE_COUNT, official)
unofficial_images = infinite_scraper.unofficial(user_name, password, LIMIT_IMAGE_COUNT, unofficial)
return official_images, unofficial_images
# this function reformats the collected images to make them usable as model input (X_test)
# the images are stored in python objects, that way the user does not have to download images on his/her computer
def data_preprocessing(official_images, unofficial_images):
preprocessed_data_official = image_preprocessing.preprocessing(official_images)
preprocessed_data_unofficial = image_preprocessing.preprocessing(unofficial_images)
return preprocessed_data_official, preprocessed_data_unofficial
# this function takes the preprocessed images and feeds them into the pretrained models
# as output we get a list with the predicted labels
def make_prediction(preprocessed_data):
X_test = preprocessed_data
# tensorflow specifics to correctly use a pretrained model
with graph.as_default():
set_session(sess)
y_pred = [[] for i in range(num_attributes)]
for i in range(num_attributes):
y_pred[i] = model[i].predict(X_test)
y_pred_label = overall_class_label.give_ovr_class_label_output(y_pred)
# encoded label
y_pred_lst = y_pred_label.tolist()
# map back to original label name
code2label = {0: 'glamorous', 1: 'rugged', 2: 'fun', 3: 'healthy'}
y_pred_lbnm = map(code2label.get, y_pred_lst)
y_pred_lbnm = list(y_pred_lbnm)
prediction = y_pred_lbnm
total = len(prediction)
return prediction, total
# the homepage the user sees when starting the application
@app.route("/", methods=["POST", "GET"])
def index():
# once the user entered the data and clicked on 'Predict', the data is captured and redirected to the predict page
if request.method == "POST":
official = request.form["official"]
unofficial = request.form["unofficial"]
return redirect(url_for("predict", official=official, unofficial=unofficial))
else:
return render_template("index.html")
# the page the user gets redirected to after hitting the 'Predict' button on the homepage
# Note: the entire pipeline takes a couple of minutes since we feed every picture in each of the four models
@app.route("/predict/", methods=["POST", "GET"])
def predict():
official = request.args.get('official')
unofficial = request.args.get('unofficial')
official_images, unofficial_images = data_collection(official, unofficial)
preprocessed_data_official, preprocessed_data_unofficial = data_preprocessing(official_images, unofficial_images)
prediction_official, total_official = make_prediction(preprocessed_data_official)
prediction_unofficial, total_unofficial = make_prediction(preprocessed_data_unofficial)
# generate the numbers to be displayed in the analysis table
# for official:
fun_official = prediction_official.count('fun')
glamorous_official = prediction_official.count('glamorous')
healthy_official = prediction_official.count('healthy')
rugged_official = prediction_official.count('rugged')
# for unofficial:
fun_unofficial = prediction_unofficial.count('fun')
glamorous_unofficial = prediction_unofficial.count('glamorous')
healthy_unofficial = prediction_unofficial.count('healthy')
rugged_unofficial = prediction_unofficial.count('rugged')
# for relative table:
fun_official_rel = round(fun_official/total_official*100)
fun_unofficial_rel = round(fun_unofficial/total_unofficial*100)
glamorous_official_rel = round(glamorous_official / total_official*100)
glamorous_unofficial_rel = round(glamorous_unofficial / total_unofficial*100)
healthy_official_rel = round(healthy_official / total_official*100)
healthy_unofficial_rel = round(healthy_unofficial / total_unofficial*100)
rugged_official_rel = round(rugged_official / total_official*100)
rugged_unofficial_rel = round(rugged_unofficial / total_unofficial*100)
return render_template("predict.html", fo=fun_official, fu=fun_unofficial, fo_rel=fun_official_rel, fu_rel=fun_unofficial_rel,
go=glamorous_official, gu=glamorous_unofficial, go_rel=glamorous_official_rel, gu_rel=glamorous_unofficial_rel,
ho=healthy_official, hu=healthy_unofficial, ho_rel=healthy_official_rel, hu_rel=healthy_unofficial_rel,
ro=rugged_official, ru=rugged_unofficial, ro_rel=rugged_official_rel, ru_rel=rugged_unofficial_rel,
to=total_official, tu=total_unofficial, unofficial=unofficial)
if __name__ == "__main__":
app.run(host="127.0.0.1", port=8080, debug=True) | [
"flask.render_template",
"flask.request.args.get",
"src.utils.overall_class_label.give_ovr_class_label_output",
"src.utils.infinite_scraper.unofficial",
"keras.models.load_model",
"flask.Flask",
"tensorflow.Session",
"src.utils.infinite_scraper.official",
"keras.backend.set_session",
"flask.url_fo... | [((397, 409), 'tensorflow.Session', 'tf.Session', ([], {}), '()\n', (407, 409), True, 'import tensorflow as tf\n'), ((418, 440), 'tensorflow.get_default_graph', 'tf.get_default_graph', ([], {}), '()\n', (438, 440), True, 'import tensorflow as tf\n'), ((441, 458), 'keras.backend.set_session', 'set_session', (['sess'], {}), '(sess)\n', (452, 458), False, 'from keras.backend import set_session\n'), ((533, 583), 'keras.models.load_model', 'load_model', (['"""./model/augmented/glamarous_model.h5"""'], {}), "('./model/augmented/glamarous_model.h5')\n", (543, 583), False, 'from keras.models import load_model\n'), ((595, 642), 'keras.models.load_model', 'load_model', (['"""./model/augmented/rugged_model.h5"""'], {}), "('./model/augmented/rugged_model.h5')\n", (605, 642), False, 'from keras.models import load_model\n'), ((654, 698), 'keras.models.load_model', 'load_model', (['"""./model/augmented/fun_model.h5"""'], {}), "('./model/augmented/fun_model.h5')\n", (664, 698), False, 'from keras.models import load_model\n'), ((710, 758), 'keras.models.load_model', 'load_model', (['"""./model/augmented/healthy_model.h5"""'], {}), "('./model/augmented/healthy_model.h5')\n", (720, 758), False, 'from keras.models import load_model\n'), ((767, 782), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (772, 782), False, 'from flask import Flask, render_template, request, redirect, url_for\n'), ((1403, 1478), 'src.utils.infinite_scraper.official', 'infinite_scraper.official', (['user_name', 'password', 'LIMIT_IMAGE_COUNT', 'official'], {}), '(user_name, password, LIMIT_IMAGE_COUNT, official)\n', (1428, 1478), False, 'from src.utils import infinite_scraper\n'), ((1503, 1582), 'src.utils.infinite_scraper.unofficial', 'infinite_scraper.unofficial', (['user_name', 'password', 'LIMIT_IMAGE_COUNT', 'unofficial'], {}), '(user_name, password, LIMIT_IMAGE_COUNT, unofficial)\n', (1530, 1582), False, 'from src.utils import infinite_scraper\n'), ((1929, 1979), 'src.utils.image_preprocessing.preprocessing', 'image_preprocessing.preprocessing', (['official_images'], {}), '(official_images)\n', (1962, 1979), False, 'from src.utils import image_preprocessing\n'), ((2015, 2067), 'src.utils.image_preprocessing.preprocessing', 'image_preprocessing.preprocessing', (['unofficial_images'], {}), '(unofficial_images)\n', (2048, 2067), False, 'from src.utils import image_preprocessing\n'), ((3809, 3837), 'flask.request.args.get', 'request.args.get', (['"""official"""'], {}), "('official')\n", (3825, 3837), False, 'from flask import Flask, render_template, request, redirect, url_for\n'), ((3855, 3885), 'flask.request.args.get', 'request.args.get', (['"""unofficial"""'], {}), "('unofficial')\n", (3871, 3885), False, 'from flask import Flask, render_template, request, redirect, url_for\n'), ((5475, 6005), 'flask.render_template', 'render_template', (['"""predict.html"""'], {'fo': 'fun_official', 'fu': 'fun_unofficial', 'fo_rel': 'fun_official_rel', 'fu_rel': 'fun_unofficial_rel', 'go': 'glamorous_official', 'gu': 'glamorous_unofficial', 'go_rel': 'glamorous_official_rel', 'gu_rel': 'glamorous_unofficial_rel', 'ho': 'healthy_official', 'hu': 'healthy_unofficial', 'ho_rel': 'healthy_official_rel', 'hu_rel': 'healthy_unofficial_rel', 'ro': 'rugged_official', 'ru': 'rugged_unofficial', 'ro_rel': 'rugged_official_rel', 'ru_rel': 'rugged_unofficial_rel', 'to': 'total_official', 'tu': 'total_unofficial', 'unofficial': 'unofficial'}), "('predict.html', fo=fun_official, fu=fun_unofficial, fo_rel=\n fun_official_rel, fu_rel=fun_unofficial_rel, go=glamorous_official, gu=\n glamorous_unofficial, go_rel=glamorous_official_rel, gu_rel=\n glamorous_unofficial_rel, ho=healthy_official, hu=healthy_unofficial,\n ho_rel=healthy_official_rel, hu_rel=healthy_unofficial_rel, ro=\n rugged_official, ru=rugged_unofficial, ro_rel=rugged_official_rel,\n ru_rel=rugged_unofficial_rel, to=total_official, tu=total_unofficial,\n unofficial=unofficial)\n", (5490, 6005), False, 'from flask import Flask, render_template, request, redirect, url_for\n'), ((2450, 2467), 'keras.backend.set_session', 'set_session', (['sess'], {}), '(sess)\n', (2461, 2467), False, 'from keras.backend import set_session\n'), ((2633, 2688), 'src.utils.overall_class_label.give_ovr_class_label_output', 'overall_class_label.give_ovr_class_label_output', (['y_pred'], {}), '(y_pred)\n', (2680, 2688), False, 'from src.utils import overall_class_label\n'), ((3499, 3528), 'flask.render_template', 'render_template', (['"""index.html"""'], {}), "('index.html')\n", (3514, 3528), False, 'from flask import Flask, render_template, request, redirect, url_for\n'), ((3412, 3472), 'flask.url_for', 'url_for', (['"""predict"""'], {'official': 'official', 'unofficial': 'unofficial'}), "('predict', official=official, unofficial=unofficial)\n", (3419, 3472), False, 'from flask import Flask, render_template, request, redirect, url_for\n')] |
# Faça um programa que leia o comprimento do cateto oposto e a do cateto adjacente de um triângulo retângulo.
# Calcule e mostre o comprimento da hipotenusa.
from math import hypot
catoposto = float(input('Cateto oposto: '))
catadjacente = float(input('Cateto adjacente: '))
print(hypot(catoposto, catadjacente))
'''
Solução da aula:
co = float(input('Comprimento do cateto oposto: '))
ca = float(input('Comprimento do cateto adjacente: '))
hi = hypot(co, ca)
print('A hipotenusa mede {:.2f}'.format(hi))
'''
| [
"math.hypot"
] | [((283, 313), 'math.hypot', 'hypot', (['catoposto', 'catadjacente'], {}), '(catoposto, catadjacente)\n', (288, 313), False, 'from math import hypot\n')] |
import os
CRAIGSLIST_SITE = 'newjersey'
CRAIGSLIST_CATEGORY = 'apa'
MIN_FT2 = 900
MIN_PRICE = 1500
MAX_PRICE = 3000
MAX_TRANSIT_DISTANCE = 0.75
BOXES = {
"Hoboken": [40.734966101, -74.0439891815, 40.7529789172, -74.0192699432],
"The Heights": [40.7332100782, -74.0573787689, 40.7615609255, -74.0378093719],
"Downtown": [40.7111582926, -74.05626297, 40.7357465407, -74.0299129486],
"Journal Square": [40.7131100727, -74.0830850601, 40.7402664072, -74.0509414673]
}
NEIGHBORHOODS = ["hoboken", "journal square", "heights", "newport", "grove",
"downtown", "paulus hook", "powerhouse", "exchange place",
"waterfront", "jersey city"]
STATIONS = {
"9th St light rail": [40.748874, -74.038552],
"2nd St light rail": [40.741594, -74.042730],
"Newport light rail": [40.726828, -74.036256],
"Harsimus Cove light rail": [40.722663, -74.037288],
"Harborside light rail": [40.719514, -74.034019],
"Exchange Pl light rail": [40.715993, -74.034118],
"Essex St light rail": [40.712847, -74.036114],
"Marin Blvd light rail": [40.714425, -74.043264],
"Jersey Ave light rail": [40.715001, -74.048428],
"Exchange Pl PATH": [40.716738, -74.032397],
"Grove St PATH": [40.719609, -74.042642],
"Newport PATH": [40.726998, -74.033818],
"Journal Square PATH": [40.733014, -74.062882],
"Hoboken PATH": [40.734937, -74.027545]
}
SLACK_TOKEN = os.getenv('SLACK_TOKEN', '')
SLACK_CHANNEL = '#cribs'
| [
"os.getenv"
] | [((1429, 1457), 'os.getenv', 'os.getenv', (['"""SLACK_TOKEN"""', '""""""'], {}), "('SLACK_TOKEN', '')\n", (1438, 1457), False, 'import os\n')] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import sys
import pickle
import numpy as np
from scipy.io import wavfile
import python_speech_features as fextract
audio_filename = sys.argv[1]
features_filename = sys.argv[2]
rate, sig = wavfile.read(audio_filename)
fbank_feat = fextract.logfbank(sig,samplerate=rate)
with open(features_filename, 'wb') as stream:
pickle.dump(fbank_feat, stream)
| [
"pickle.dump",
"scipy.io.wavfile.read",
"python_speech_features.logfbank"
] | [((242, 270), 'scipy.io.wavfile.read', 'wavfile.read', (['audio_filename'], {}), '(audio_filename)\n', (254, 270), False, 'from scipy.io import wavfile\n'), ((286, 325), 'python_speech_features.logfbank', 'fextract.logfbank', (['sig'], {'samplerate': 'rate'}), '(sig, samplerate=rate)\n', (303, 325), True, 'import python_speech_features as fextract\n'), ((377, 408), 'pickle.dump', 'pickle.dump', (['fbank_feat', 'stream'], {}), '(fbank_feat, stream)\n', (388, 408), False, 'import pickle\n')] |
import numpy as np
import pymc as pm
import networkx as nx
from matplotlib import pyplot as plt
alpha = 0.5
beta = 0.1
L= 9.0
G0 = nx.Graph()
for i in range(1, 10):
for j in range(i + 1, 11):
G0.add_edge(i, j)
#G0.add_path(range(1, 11))
#G0.add_path(range(1, 11))
#G0.remove_edge(2, 3)
#G0.remove_edge(3, 4)
#G0.add_edge(2, 4)
#G0.add_edge(3, 7)
#G0.add_edge(8, 10)
# nx.draw(G0, with_labels=True, font_weight='bold')
# plt.show()
@pm.stochastic(dtype=nx.Graph)
def cwg(value = G0, alpha = alpha, beta = beta, L = L):
tmp = 0
for i in range(1, len(value)):
for j in range(i + 1, len(value)+1):
if value.has_edge(i, j):
tmp += np.log(alpha) - ((j - i) / (beta * L))
else:
tmp += np.log(1 - alpha * np.exp((i - j) / (beta * L)))
return tmp
class CWGMetropolis(pm.Metropolis):
""" A PyMC Step Method that walks on connected Waxman Graphs by
choosing two distinct nodes at random and considering the
possible link between them. If the link is already in the
graph, it consider it for deletion, and if the link is not in
the graph, it consider it for inclusion, keeping it with the
appropriate Metropolis probability (no Hastings factor necessary,
because the chain is reversible, right?)
"""
def __init__(self, stochastic):
# Initialize superclass
pm.Metropolis.__init__(self, stochastic, scale=1., verbose=0, tally=False)
def propose(self):
""" Add an edge or remove an edge"""
G = self.stochastic.value
G.u_new = np.random.choice(G.nodes()); G.v_new = np.random.choice(G.nodes())
while G.u_new == G.v_new:
G.v_new = np.random.choice(G.nodes())
if G.has_edge(G.u_new, G.v_new):
G.remove_edge(G.u_new, G.v_new)
if not nx.is_connected(G):
G.add_edge(G.u_new, G.v_new)
else:
G.add_edge(G.u_new, G.v_new)
self.stochastic.value = G
def reject(self):
""" Restore the graph"""
G = self.stochastic.value
if G.has_edge(G.u_new, G.v_new):
G.remove_edge(G.u_new, G.v_new)
else:
G.add_edge(G.u_new, G.v_new)
self.rejected += 1
self.stochastic.value = G
@pm.deterministic
def average_degree(G = cwg):
# return np.sum([t[1] for t in list(G.degree())]) / len(G)
return np.sum(list(G.degree().values())) / len(G)
mcmc = pm.MCMC([cwg, average_degree])
mcmc.use_step_method(CWGMetropolis, cwg)
mcmc.sample(100000)
avgd_samples = mcmc.trace("average_degree")[:]
plt.hist(avgd_samples[90000:])
plt.show()
nx.draw(cwg.value, with_labels=True, font_weight='bold')
plt.show()
mcmc.sample(100)
nx.draw(cwg.value, with_labels=True, font_weight='bold')
plt.show()
mcmc.sample(100)
nx.draw(cwg.value, with_labels=True, font_weight='bold')
plt.show()
| [
"matplotlib.pyplot.hist",
"networkx.is_connected",
"numpy.log",
"networkx.Graph",
"numpy.exp",
"pymc.Metropolis.__init__",
"pymc.MCMC",
"pymc.stochastic",
"networkx.draw",
"matplotlib.pyplot.show"
] | [((142, 152), 'networkx.Graph', 'nx.Graph', ([], {}), '()\n', (150, 152), True, 'import networkx as nx\n'), ((478, 507), 'pymc.stochastic', 'pm.stochastic', ([], {'dtype': 'nx.Graph'}), '(dtype=nx.Graph)\n', (491, 507), True, 'import pymc as pm\n'), ((2567, 2597), 'pymc.MCMC', 'pm.MCMC', (['[cwg, average_degree]'], {}), '([cwg, average_degree])\n', (2574, 2597), True, 'import pymc as pm\n'), ((2708, 2738), 'matplotlib.pyplot.hist', 'plt.hist', (['avgd_samples[90000:]'], {}), '(avgd_samples[90000:])\n', (2716, 2738), True, 'from matplotlib import pyplot as plt\n'), ((2739, 2749), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2747, 2749), True, 'from matplotlib import pyplot as plt\n'), ((2751, 2807), 'networkx.draw', 'nx.draw', (['cwg.value'], {'with_labels': '(True)', 'font_weight': '"""bold"""'}), "(cwg.value, with_labels=True, font_weight='bold')\n", (2758, 2807), True, 'import networkx as nx\n'), ((2809, 2819), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2817, 2819), True, 'from matplotlib import pyplot as plt\n'), ((2840, 2896), 'networkx.draw', 'nx.draw', (['cwg.value'], {'with_labels': '(True)', 'font_weight': '"""bold"""'}), "(cwg.value, with_labels=True, font_weight='bold')\n", (2847, 2896), True, 'import networkx as nx\n'), ((2898, 2908), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2906, 2908), True, 'from matplotlib import pyplot as plt\n'), ((2929, 2985), 'networkx.draw', 'nx.draw', (['cwg.value'], {'with_labels': '(True)', 'font_weight': '"""bold"""'}), "(cwg.value, with_labels=True, font_weight='bold')\n", (2936, 2985), True, 'import networkx as nx\n'), ((2987, 2997), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2995, 2997), True, 'from matplotlib import pyplot as plt\n'), ((1470, 1545), 'pymc.Metropolis.__init__', 'pm.Metropolis.__init__', (['self', 'stochastic'], {'scale': '(1.0)', 'verbose': '(0)', 'tally': '(False)'}), '(self, stochastic, scale=1.0, verbose=0, tally=False)\n', (1492, 1545), True, 'import pymc as pm\n'), ((1935, 1953), 'networkx.is_connected', 'nx.is_connected', (['G'], {}), '(G)\n', (1950, 1953), True, 'import networkx as nx\n'), ((722, 735), 'numpy.log', 'np.log', (['alpha'], {}), '(alpha)\n', (728, 735), True, 'import numpy as np\n'), ((823, 851), 'numpy.exp', 'np.exp', (['((i - j) / (beta * L))'], {}), '((i - j) / (beta * L))\n', (829, 851), True, 'import numpy as np\n')] |
from django.conf.urls.defaults import patterns, include, url
from django.contrib import admin
from board.feeds import EventFeed
from board.views import IndexView, ServiceView
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', IndexView.as_view(), name='index'),
url(r'^services/(?P<slug>[-\w]+)$', ServiceView.as_view(), name='service'),
url(r'^feed$', EventFeed(), name='feed'),
url(r'^admin/', include(admin.site.urls)),
)
| [
"board.views.IndexView.as_view",
"django.conf.urls.defaults.include",
"board.views.ServiceView.as_view",
"board.feeds.EventFeed",
"django.contrib.admin.autodiscover"
] | [((176, 196), 'django.contrib.admin.autodiscover', 'admin.autodiscover', ([], {}), '()\n', (194, 196), False, 'from django.contrib import admin\n'), ((240, 259), 'board.views.IndexView.as_view', 'IndexView.as_view', ([], {}), '()\n', (257, 259), False, 'from board.views import IndexView, ServiceView\n'), ((316, 337), 'board.views.ServiceView.as_view', 'ServiceView.as_view', ([], {}), '()\n', (335, 337), False, 'from board.views import IndexView, ServiceView\n'), ((375, 386), 'board.feeds.EventFeed', 'EventFeed', ([], {}), '()\n', (384, 386), False, 'from board.feeds import EventFeed\n'), ((423, 447), 'django.conf.urls.defaults.include', 'include', (['admin.site.urls'], {}), '(admin.site.urls)\n', (430, 447), False, 'from django.conf.urls.defaults import patterns, include, url\n')] |
# region [Imports]
# * Standard Library Imports ---------------------------------------------------------------------------->
import os
import asyncio
from io import BytesIO
from pathlib import Path
from datetime import datetime
from tempfile import TemporaryDirectory
from textwrap import dedent
# * Third Party Imports --------------------------------------------------------------------------------->
import discord
from PIL import Image, ImageEnhance
from pytz import timezone
from discord.ext import commands, flags
# * Gid Imports ----------------------------------------------------------------------------------------->
import gidlogger as glog
# * Local Imports --------------------------------------------------------------------------------------->
from antipetros_discordbot.utility.misc import make_config_name
from antipetros_discordbot.utility.enums import WatermarkPosition
from antipetros_discordbot.utility.checks import allowed_channel_and_allowed_role_2, command_enabled_checker, allowed_requester
from antipetros_discordbot.utility.embed_helpers import make_basic_embed
from antipetros_discordbot.utility.gidtools_functions import loadjson, pathmaker
from antipetros_discordbot.init_userdata.user_data_setup import ParaStorageKeeper
from antipetros_discordbot.utility.poor_mans_abc import attribute_checker
from antipetros_discordbot.utility.enums import CogState
from antipetros_discordbot.utility.replacements.command_replacement import auto_meta_info_command
# endregion[Imports]
# region [TODO]
# TODO: create regions for this file
# TODO: Document and Docstrings
# endregion [TODO]
# region [Logging]
log = glog.aux_logger(__name__)
glog.import_notification(log, __name__)
# endregion[Logging]
# region [Constants]
APPDATA = ParaStorageKeeper.get_appdata()
BASE_CONFIG = ParaStorageKeeper.get_config('base_config')
COGS_CONFIG = ParaStorageKeeper.get_config('cogs_config')
THIS_FILE_DIR = os.path.abspath(os.path.dirname(__file__)) # location of this file, does not work if app gets compiled to exe with pyinstaller
COG_NAME = "ImageManipulationCog"
CONFIG_NAME = make_config_name(COG_NAME)
get_command_enabled = command_enabled_checker(CONFIG_NAME)
# endregion [Constants]
class ImageManipulatorCog(commands.Cog, command_attrs={'hidden': False, "name": COG_NAME}):
"""
Soon
"""
# region [ClassAttributes]
config_name = CONFIG_NAME
allowed_stamp_formats = set(loadjson(APPDATA["image_file_extensions.json"]))
stamp_positions = {'top': WatermarkPosition.Top, 'bottom': WatermarkPosition.Bottom, 'left': WatermarkPosition.Left, 'right': WatermarkPosition.Right, 'center': WatermarkPosition.Center}
docattrs = {'show_in_readme': True,
'is_ready': (CogState.WORKING | CogState.OPEN_TODOS | CogState.UNTESTED | CogState.FEATURE_MISSING | CogState.NEEDS_REFRACTORING | CogState.DOCUMENTATION_MISSING,
"2021-02-06 05:09:20",
"f166431cb83ae36c91d70d7d09020e274a7ebea84d5a0c724819a3ecd2230b9eca0b3e14c2d473563d005671b7a2bf9d87f5449544eb9b57bcab615035b0f83d")}
required_config_data = dedent(""" avatar_stamp = ASLOGO1
avatar_stamp_fraction = 0.2
stamps_margin = 5
stamp_fraction = 0.3""")
# endregion[ClassAttributes]
# region [Init]
def __init__(self, bot):
self.bot = bot
self.support = self.bot.support
self.stamp_location = APPDATA['stamps']
self.stamps = {}
self.stamp_pos_functions = {WatermarkPosition.Right | WatermarkPosition.Bottom: self._to_bottom_right,
WatermarkPosition.Right | WatermarkPosition.Top: self._to_top_right,
WatermarkPosition.Right | WatermarkPosition.Center: self._to_center_right,
WatermarkPosition.Left | WatermarkPosition.Bottom: self._to_bottom_left,
WatermarkPosition.Left | WatermarkPosition.Top: self._to_top_left,
WatermarkPosition.Left | WatermarkPosition.Center: self._to_center_left,
WatermarkPosition.Center | WatermarkPosition.Center: self._to_center_center,
WatermarkPosition.Center | WatermarkPosition.Bottom: self._to_bottom_center,
WatermarkPosition.Center | WatermarkPosition.Top: self._to_top_center}
# self.base_map_image = Image.open(r"D:\Dropbox\hobby\Modding\Ressources\Arma_Ressources\maps\tanoa_v3_2000_w_outposts.png")
# self.outpost_overlay = {'city': Image.open(r"D:\Dropbox\hobby\Modding\Ressources\Arma_Ressources\maps\tanoa_v2_2000_city_marker.png"),
# 'volcano': Image.open(r"D:\Dropbox\hobby\Modding\Ressources\Arma_Ressources\maps\tanoa_v2_2000_volcano_marker.png"),
# 'airport': Image.open(r"D:\Dropbox\hobby\Modding\Ressources\Arma_Ressources\maps\tanoa_v2_2000_airport_marker.png")}
self.old_map_message = None
self._get_stamps()
self.allowed_channels = allowed_requester(self, 'channels')
self.allowed_roles = allowed_requester(self, 'roles')
self.allowed_dm_ids = allowed_requester(self, 'dm_ids')
glog.class_init_notification(log, self)
# endregion[Init]
# region [Setup]
async def on_ready_setup(self):
self._get_stamps()
log.debug('setup for cog "%s" finished', str(self))
async def update(self, typus):
return
log.debug('cog "%s" was updated', str(self))
# endregion[Setup]
# region [Properties]
@property
def target_stamp_fraction(self):
return COGS_CONFIG.getfloat(CONFIG_NAME, 'stamp_fraction')
@property
def stamp_margin(self):
return COGS_CONFIG.getint(CONFIG_NAME, 'stamps_margin')
@property
def avatar_stamp_fraction(self):
return COGS_CONFIG.getfloat(CONFIG_NAME, 'avatar_stamp_fraction')
@property
def avatar_stamp(self):
return self._get_stamp_image(COGS_CONFIG.get(CONFIG_NAME, 'avatar_stamp').upper(), 1)
# endregion[Properties]
def _get_stamps(self):
self.stamps = {}
for file in os.scandir(self.stamp_location):
if os.path.isfile(file.path) is True and os.path.splitext(file.name)[1] in self.allowed_stamp_formats:
name = file.name.split('.')[0].replace(' ', '_').strip().upper()
self.stamps[name] = file.path
def _get_stamp_image(self, stamp_name, stamp_opacity):
image = Image.open(self.stamps.get(stamp_name))
alpha = image.split()[3]
alpha = ImageEnhance.Brightness(alpha).enhance(stamp_opacity)
image.putalpha(alpha)
return image.copy()
@staticmethod
def _stamp_resize(input_image, stamp_image, factor):
input_image_width, input_image_height = input_image.size
input_image_width_fractioned = input_image_width * factor
input_image_height_fractioned = input_image_height * factor
transform_factor_width = input_image_width_fractioned / stamp_image.size[0]
transform_factor_height = input_image_height_fractioned / stamp_image.size[1]
transform_factor = (transform_factor_width + transform_factor_height) / 2
return stamp_image.resize((round(stamp_image.size[0] * transform_factor), round(stamp_image.size[1] * transform_factor)), resample=Image.LANCZOS)
def _to_bottom_right(self, input_image, stamp_image, factor):
log.debug('pasting image to bottom_right')
input_image_width, input_image_height = input_image.size
_resized_stamp = self._stamp_resize(input_image, stamp_image, factor)
input_image.paste(_resized_stamp,
(input_image_width - _resized_stamp.size[0] - self.stamp_margin, input_image_height - _resized_stamp.size[1] - self.stamp_margin),
_resized_stamp)
return input_image
def _to_top_right(self, input_image, stamp_image, factor):
input_image_width, input_image_height = input_image.size
_resized_stamp = self._stamp_resize(input_image, stamp_image, factor)
input_image.paste(_resized_stamp,
(input_image_width - _resized_stamp.size[0] - self.stamp_margin, 0 + self.stamp_margin),
_resized_stamp)
return input_image
def _to_center_right(self, input_image, stamp_image, factor):
input_image_width, input_image_height = input_image.size
_resized_stamp = self._stamp_resize(input_image, stamp_image, factor)
input_image.paste(_resized_stamp,
(input_image_width - _resized_stamp.size[0] - self.stamp_margin, round((input_image_height / 2) - (_resized_stamp.size[1] / 2))),
_resized_stamp)
return input_image
def _to_bottom_left(self, input_image, stamp_image, factor):
input_image_width, input_image_height = input_image.size
_resized_stamp = self._stamp_resize(input_image, stamp_image, factor)
input_image.paste(_resized_stamp,
(0 + self.stamp_margin, input_image_height - _resized_stamp.size[1] - self.stamp_margin),
_resized_stamp)
return input_image
def _to_top_left(self, input_image, stamp_image, factor):
_resized_stamp = self._stamp_resize(input_image, stamp_image, factor)
input_image.paste(_resized_stamp,
(0 + self.stamp_margin, 0 + self.stamp_margin),
_resized_stamp)
return input_image
def _to_center_left(self, input_image, stamp_image, factor):
input_image_width, input_image_height = input_image.size
_resized_stamp = self._stamp_resize(input_image, stamp_image, factor)
input_image.paste(_resized_stamp,
(0 + self.stamp_margin, round((input_image_height / 2) - (_resized_stamp.size[1] / 2))),
_resized_stamp)
return input_image
def _to_center_center(self, input_image, stamp_image, factor):
input_image_width, input_image_height = input_image.size
_resized_stamp = self._stamp_resize(input_image, stamp_image, factor)
input_image.paste(_resized_stamp,
(round((input_image_width / 2) - (_resized_stamp.size[0] / 2)), round((input_image_height / 2) - (_resized_stamp.size[1] / 2))),
_resized_stamp)
return input_image
def _to_top_center(self, input_image, stamp_image, factor):
input_image_width, input_image_height = input_image.size
_resized_stamp = self._stamp_resize(input_image, stamp_image, factor)
input_image.paste(_resized_stamp,
(round((input_image_width / 2) - (_resized_stamp.size[0] / 2)), 0 + self.stamp_margin),
_resized_stamp)
return input_image
def _to_bottom_center(self, input_image, stamp_image, factor):
input_image_width, input_image_height = input_image.size
_resized_stamp = self._stamp_resize(input_image, stamp_image, factor)
input_image.paste(_resized_stamp,
(round((input_image_width / 2) - (_resized_stamp.size[0] / 2)), input_image_height - _resized_stamp.size[1] - self.stamp_margin),
_resized_stamp)
return input_image
async def _send_image(self, ctx, image, name, message_title, message_text=None, image_format=None, delete_after=None):
image_format = 'png' if image_format is None else image_format
with BytesIO() as image_binary:
image.save(image_binary, image_format.upper(), optimize=True)
image_binary.seek(0)
file = discord.File(fp=image_binary, filename=name.replace('_', '') + '.' + image_format)
embed = discord.Embed(title=message_title, description=message_text, color=self.support.cyan.discord_color, timestamp=datetime.now(tz=timezone("Europe/Berlin")), type='image')
embed.set_author(name='AntiPetros', icon_url="https://www.der-buntspecht-shop.de/wp-content/uploads/Baumwollstoff-Camouflage-olivegruen-2.jpg")
embed.set_image(url=f"attachment://{name.replace('_','')}.{image_format}")
await ctx.send(embed=embed, file=file, delete_after=delete_after)
@flags.add_flag("--stamp-image", "-si", type=str, default='ASLOGO1')
@flags.add_flag("--first-pos", '-fp', type=str, default="bottom")
@flags.add_flag("--second-pos", '-sp', type=str, default="right")
@flags.add_flag("--stamp-opacity", '-so', type=float, default=1.0)
@flags.add_flag('--factor', '-f', type=float, default=None)
@auto_meta_info_command(enabled=get_command_enabled("stamp_image"), cls=flags.FlagCommand)
@allowed_channel_and_allowed_role_2(in_dm_allowed=False)
@commands.max_concurrency(1, per=commands.BucketType.guild, wait=True)
async def stamp_image(self, ctx, **flags):
"""
Stamps an image with a small image from the available stamps.
Usefull for watermarking images.
Get all available stamps with '@AntiPetros available_stamps'
"""
async with ctx.channel.typing():
if len(ctx.message.attachments) == 0:
# TODO: make as embed
await ctx.send('! **there is NO image to antistasify** !')
return
if flags.get('stamp_image') not in self.stamps:
# TODO: make as embed
await ctx.send("! **There is NO stamp with that name** !")
return
first_pos = self.stamp_positions.get(flags.get("first_pos").casefold(), None)
second_pos = self.stamp_positions.get(flags.get("second_pos").casefold(), None)
if any(_pos is None for _pos in [first_pos, second_pos]) or first_pos | second_pos not in self.stamp_pos_functions:
# TODO: make as embed
await ctx.send("! **Those are NOT valid position combinations** !")
return
for _file in ctx.message.attachments:
# TODO: maybe make extra attribute for input format, check what is possible and working. else make a generic format list
if any(_file.filename.endswith(allowed_ext) for allowed_ext in self.allowed_stamp_formats):
_stamp = self._get_stamp_image(flags.get('stamp_image'), flags.get('stamp_opacity'))
_stamp = _stamp.copy()
with TemporaryDirectory(prefix='temp') as temp_dir:
temp_file = Path(pathmaker(temp_dir, 'temp_file.png'))
log.debug("Tempfile '%s' created", temp_file)
await _file.save(temp_file)
in_image = await self.bot.execute_in_thread(Image.open, temp_file)
in_image = await self.bot.execute_in_thread(in_image.copy)
factor = self.target_stamp_fraction if flags.get('factor') is None else flags.get('factor')
pos_function = self.stamp_pos_functions.get(first_pos | second_pos)
in_image = await self.bot.execute_in_thread(pos_function, in_image, _stamp, factor)
name = 'antistasified_' + os.path.splitext(_file.filename)[0]
await ctx.message.delete()
# TODO: make as embed
await self._send_image(ctx, in_image, name, f"__**{name}**__")
@auto_meta_info_command(enabled=get_command_enabled("available_stamps"))
@allowed_channel_and_allowed_role_2(in_dm_allowed=False)
@commands.cooldown(1, 120, commands.BucketType.channel)
async def available_stamps(self, ctx):
"""
Posts all available stamps.
"""
await ctx.message.delete()
await ctx.send(embed=await make_basic_embed(title="__**Currently available Stamps are:**__", footer="These messages will be deleted in 120 seconds", symbol='photo'), delete_after=120)
for name, image_path in self.stamps.items():
thumb_image = Image.open(image_path)
thumb_image.thumbnail((128, 128))
with BytesIO() as image_binary:
await asyncio.sleep(0)
thumb_image.save(image_binary, 'PNG', optimize=True)
image_binary.seek(0)
_file = discord.File(image_binary, filename=name + '.png')
embed = discord.Embed(title="Available Stamp")
embed.add_field(name='Stamp Name:', value=name)
embed.set_image(url=f"attachment://{name}.png")
await ctx.send(embed=embed, file=_file, delete_after=120)
@auto_meta_info_command(enabled=get_command_enabled("member_avatar"))
@allowed_channel_and_allowed_role_2(in_dm_allowed=False)
@commands.cooldown(1, 300, commands.BucketType.member)
async def member_avatar(self, ctx):
"""
Stamps the avatar of a Member with the Antistasi Crest.
Returns the new stamped avatar as a .PNG image that the Member can save and replace his orginal avatar with.
"""
avatar_image = await self.get_avatar_from_user(ctx.author)
stamp = self.avatar_stamp
modified_avatar = await self.bot.execute_in_thread(self._to_bottom_right, avatar_image, stamp, self.avatar_stamp_fraction)
name = f"{ctx.author.name}_Member_avatar"
await self._send_image(ctx, modified_avatar, name, "**Your New Avatar**") # change completion line to "Pledge your allegiance to the Antistasi Rebellion!"?
async def get_avatar_from_user(self, user):
avatar = user.avatar_url
temp_dir = TemporaryDirectory()
temp_file = pathmaker(temp_dir.name, 'user_avatar.png')
log.debug("Tempfile '%s' created", temp_file)
await avatar.save(temp_file)
avatar_image = Image.open(temp_file)
avatar_image = avatar_image.copy()
avatar_image = avatar_image.convert('RGB')
temp_dir.cleanup()
return avatar_image
def map_image_handling(self, base_image, marker_name, color, bytes_out):
log.debug("creating changed map, changed_location: '%s', changed_color: '%s'", marker_name, color)
marker_image = self.outpost_overlay.get(marker_name)
marker_alpha = marker_image.getchannel('A')
marker_image = Image.new('RGBA', marker_image.size, color=color)
marker_image.putalpha(marker_alpha)
base_image.paste(marker_image, mask=marker_alpha)
base_image.save(bytes_out, 'PNG', optimize=True)
bytes_out.seek(0)
return base_image, bytes_out
# @commands.command(aliases=get_aliases("map_changed"), enabled=get_command_enabled("map_changed"))
# @allowed_channel_and_allowed_role_2(in_dm_allowed=False)
# @commands.max_concurrency(1, per=commands.BucketType.guild, wait=False)
# async def map_changed(self, ctx, marker, color):
# """
# Proof of concept for future real time server map.
# """
# log.info("command was initiated by '%s'", ctx.author.name)
# with BytesIO() as image_binary:
# self.base_map_image, image_binary = await self.bot.execute_in_thread(self.map_image_handling, self.base_map_image, marker, color, image_binary)
# if self.old_map_message is not None:
# await self.old_map_message.delete()
# delete_time = None
# embed = discord.Embed(title='Current Server Map State', color=self.support.green.discord_color, timestamp=datetime.now(tz=timezone("Europe/Berlin")), type="image")
# embed.set_author(name='Antistasi Community Server 1', icon_url="https://s3.amazonaws.com/files.enjin.com/1218665/site_logo/NEW%20LOGO%20BANNER.png", url="https://a3antistasi.enjin.com/")
# embed.set_image(url="attachment://map.png")
# self.old_map_message = await ctx.send(embed=embed, file=discord.File(fp=image_binary, filename="map.png"), delete_after=delete_time)
# log.debug("finished 'map_changed' command")
# region [SpecialMethods]
def __repr__(self):
return f"{self.__class__.__name__}({self.bot.__class__.__name__})"
def __str__(self):
return self.qualified_name
def cog_unload(self):
log.debug("Cog '%s' UNLOADED!", str(self))
# endregion[SpecialMethods]
def setup(bot):
"""
Mandatory function to add the Cog to the bot.
"""
bot.add_cog(attribute_checker(ImageManipulatorCog(bot)))
| [
"PIL.Image.new",
"antipetros_discordbot.utility.checks.allowed_channel_and_allowed_role_2",
"gidlogger.class_init_notification",
"io.BytesIO",
"antipetros_discordbot.utility.misc.make_config_name",
"gidlogger.aux_logger",
"antipetros_discordbot.utility.embed_helpers.make_basic_embed",
"textwrap.dedent... | [((1642, 1667), 'gidlogger.aux_logger', 'glog.aux_logger', (['__name__'], {}), '(__name__)\n', (1657, 1667), True, 'import gidlogger as glog\n'), ((1668, 1707), 'gidlogger.import_notification', 'glog.import_notification', (['log', '__name__'], {}), '(log, __name__)\n', (1692, 1707), True, 'import gidlogger as glog\n'), ((1762, 1793), 'antipetros_discordbot.init_userdata.user_data_setup.ParaStorageKeeper.get_appdata', 'ParaStorageKeeper.get_appdata', ([], {}), '()\n', (1791, 1793), False, 'from antipetros_discordbot.init_userdata.user_data_setup import ParaStorageKeeper\n'), ((1808, 1851), 'antipetros_discordbot.init_userdata.user_data_setup.ParaStorageKeeper.get_config', 'ParaStorageKeeper.get_config', (['"""base_config"""'], {}), "('base_config')\n", (1836, 1851), False, 'from antipetros_discordbot.init_userdata.user_data_setup import ParaStorageKeeper\n'), ((1866, 1909), 'antipetros_discordbot.init_userdata.user_data_setup.ParaStorageKeeper.get_config', 'ParaStorageKeeper.get_config', (['"""cogs_config"""'], {}), "('cogs_config')\n", (1894, 1909), False, 'from antipetros_discordbot.init_userdata.user_data_setup import ParaStorageKeeper\n'), ((2102, 2128), 'antipetros_discordbot.utility.misc.make_config_name', 'make_config_name', (['COG_NAME'], {}), '(COG_NAME)\n', (2118, 2128), False, 'from antipetros_discordbot.utility.misc import make_config_name\n'), ((2151, 2187), 'antipetros_discordbot.utility.checks.command_enabled_checker', 'command_enabled_checker', (['CONFIG_NAME'], {}), '(CONFIG_NAME)\n', (2174, 2187), False, 'from antipetros_discordbot.utility.checks import allowed_channel_and_allowed_role_2, command_enabled_checker, allowed_requester\n'), ((1942, 1967), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (1957, 1967), False, 'import os\n'), ((3124, 3335), 'textwrap.dedent', 'dedent', (['""" avatar_stamp = ASLOGO1\n avatar_stamp_fraction = 0.2\n stamps_margin = 5\n stamp_fraction = 0.3"""'], {}), '(\n """ avatar_stamp = ASLOGO1\n avatar_stamp_fraction = 0.2\n stamps_margin = 5\n stamp_fraction = 0.3"""\n )\n', (3130, 3335), False, 'from textwrap import dedent\n'), ((12520, 12587), 'discord.ext.flags.add_flag', 'flags.add_flag', (['"""--stamp-image"""', '"""-si"""'], {'type': 'str', 'default': '"""ASLOGO1"""'}), "('--stamp-image', '-si', type=str, default='ASLOGO1')\n", (12534, 12587), False, 'from discord.ext import commands, flags\n'), ((12593, 12657), 'discord.ext.flags.add_flag', 'flags.add_flag', (['"""--first-pos"""', '"""-fp"""'], {'type': 'str', 'default': '"""bottom"""'}), "('--first-pos', '-fp', type=str, default='bottom')\n", (12607, 12657), False, 'from discord.ext import commands, flags\n'), ((12663, 12727), 'discord.ext.flags.add_flag', 'flags.add_flag', (['"""--second-pos"""', '"""-sp"""'], {'type': 'str', 'default': '"""right"""'}), "('--second-pos', '-sp', type=str, default='right')\n", (12677, 12727), False, 'from discord.ext import commands, flags\n'), ((12733, 12798), 'discord.ext.flags.add_flag', 'flags.add_flag', (['"""--stamp-opacity"""', '"""-so"""'], {'type': 'float', 'default': '(1.0)'}), "('--stamp-opacity', '-so', type=float, default=1.0)\n", (12747, 12798), False, 'from discord.ext import commands, flags\n'), ((12804, 12862), 'discord.ext.flags.add_flag', 'flags.add_flag', (['"""--factor"""', '"""-f"""'], {'type': 'float', 'default': 'None'}), "('--factor', '-f', type=float, default=None)\n", (12818, 12862), False, 'from discord.ext import commands, flags\n'), ((12963, 13018), 'antipetros_discordbot.utility.checks.allowed_channel_and_allowed_role_2', 'allowed_channel_and_allowed_role_2', ([], {'in_dm_allowed': '(False)'}), '(in_dm_allowed=False)\n', (12997, 13018), False, 'from antipetros_discordbot.utility.checks import allowed_channel_and_allowed_role_2, command_enabled_checker, allowed_requester\n'), ((13024, 13093), 'discord.ext.commands.max_concurrency', 'commands.max_concurrency', (['(1)'], {'per': 'commands.BucketType.guild', 'wait': '(True)'}), '(1, per=commands.BucketType.guild, wait=True)\n', (13048, 13093), False, 'from discord.ext import commands, flags\n'), ((15760, 15815), 'antipetros_discordbot.utility.checks.allowed_channel_and_allowed_role_2', 'allowed_channel_and_allowed_role_2', ([], {'in_dm_allowed': '(False)'}), '(in_dm_allowed=False)\n', (15794, 15815), False, 'from antipetros_discordbot.utility.checks import allowed_channel_and_allowed_role_2, command_enabled_checker, allowed_requester\n'), ((15821, 15875), 'discord.ext.commands.cooldown', 'commands.cooldown', (['(1)', '(120)', 'commands.BucketType.channel'], {}), '(1, 120, commands.BucketType.channel)\n', (15838, 15875), False, 'from discord.ext import commands, flags\n'), ((16965, 17020), 'antipetros_discordbot.utility.checks.allowed_channel_and_allowed_role_2', 'allowed_channel_and_allowed_role_2', ([], {'in_dm_allowed': '(False)'}), '(in_dm_allowed=False)\n', (16999, 17020), False, 'from antipetros_discordbot.utility.checks import allowed_channel_and_allowed_role_2, command_enabled_checker, allowed_requester\n'), ((17026, 17079), 'discord.ext.commands.cooldown', 'commands.cooldown', (['(1)', '(300)', 'commands.BucketType.member'], {}), '(1, 300, commands.BucketType.member)\n', (17043, 17079), False, 'from discord.ext import commands, flags\n'), ((2424, 2471), 'antipetros_discordbot.utility.gidtools_functions.loadjson', 'loadjson', (["APPDATA['image_file_extensions.json']"], {}), "(APPDATA['image_file_extensions.json'])\n", (2432, 2471), False, 'from antipetros_discordbot.utility.gidtools_functions import loadjson, pathmaker\n'), ((5194, 5229), 'antipetros_discordbot.utility.checks.allowed_requester', 'allowed_requester', (['self', '"""channels"""'], {}), "(self, 'channels')\n", (5211, 5229), False, 'from antipetros_discordbot.utility.checks import allowed_channel_and_allowed_role_2, command_enabled_checker, allowed_requester\n'), ((5259, 5291), 'antipetros_discordbot.utility.checks.allowed_requester', 'allowed_requester', (['self', '"""roles"""'], {}), "(self, 'roles')\n", (5276, 5291), False, 'from antipetros_discordbot.utility.checks import allowed_channel_and_allowed_role_2, command_enabled_checker, allowed_requester\n'), ((5322, 5355), 'antipetros_discordbot.utility.checks.allowed_requester', 'allowed_requester', (['self', '"""dm_ids"""'], {}), "(self, 'dm_ids')\n", (5339, 5355), False, 'from antipetros_discordbot.utility.checks import allowed_channel_and_allowed_role_2, command_enabled_checker, allowed_requester\n'), ((5364, 5403), 'gidlogger.class_init_notification', 'glog.class_init_notification', (['log', 'self'], {}), '(log, self)\n', (5392, 5403), True, 'import gidlogger as glog\n'), ((6303, 6334), 'os.scandir', 'os.scandir', (['self.stamp_location'], {}), '(self.stamp_location)\n', (6313, 6334), False, 'import os\n'), ((17876, 17896), 'tempfile.TemporaryDirectory', 'TemporaryDirectory', ([], {}), '()\n', (17894, 17896), False, 'from tempfile import TemporaryDirectory\n'), ((17917, 17960), 'antipetros_discordbot.utility.gidtools_functions.pathmaker', 'pathmaker', (['temp_dir.name', '"""user_avatar.png"""'], {}), "(temp_dir.name, 'user_avatar.png')\n", (17926, 17960), False, 'from antipetros_discordbot.utility.gidtools_functions import loadjson, pathmaker\n'), ((18075, 18096), 'PIL.Image.open', 'Image.open', (['temp_file'], {}), '(temp_file)\n', (18085, 18096), False, 'from PIL import Image, ImageEnhance\n'), ((18567, 18616), 'PIL.Image.new', 'Image.new', (['"""RGBA"""', 'marker_image.size'], {'color': 'color'}), "('RGBA', marker_image.size, color=color)\n", (18576, 18616), False, 'from PIL import Image, ImageEnhance\n'), ((11769, 11778), 'io.BytesIO', 'BytesIO', ([], {}), '()\n', (11776, 11778), False, 'from io import BytesIO\n'), ((16287, 16309), 'PIL.Image.open', 'Image.open', (['image_path'], {}), '(image_path)\n', (16297, 16309), False, 'from PIL import Image, ImageEnhance\n'), ((6743, 6773), 'PIL.ImageEnhance.Brightness', 'ImageEnhance.Brightness', (['alpha'], {}), '(alpha)\n', (6766, 6773), False, 'from PIL import Image, ImageEnhance\n'), ((13591, 13615), 'discord.ext.flags.get', 'flags.get', (['"""stamp_image"""'], {}), "('stamp_image')\n", (13600, 13615), False, 'from discord.ext import commands, flags\n'), ((16373, 16382), 'io.BytesIO', 'BytesIO', ([], {}), '()\n', (16380, 16382), False, 'from io import BytesIO\n'), ((16569, 16619), 'discord.File', 'discord.File', (['image_binary'], {'filename': "(name + '.png')"}), "(image_binary, filename=name + '.png')\n", (16581, 16619), False, 'import discord\n'), ((16644, 16682), 'discord.Embed', 'discord.Embed', ([], {'title': '"""Available Stamp"""'}), "(title='Available Stamp')\n", (16657, 16682), False, 'import discord\n'), ((6351, 6376), 'os.path.isfile', 'os.path.isfile', (['file.path'], {}), '(file.path)\n', (6365, 6376), False, 'import os\n'), ((16422, 16438), 'asyncio.sleep', 'asyncio.sleep', (['(0)'], {}), '(0)\n', (16435, 16438), False, 'import asyncio\n'), ((6389, 6416), 'os.path.splitext', 'os.path.splitext', (['file.name'], {}), '(file.name)\n', (6405, 6416), False, 'import os\n'), ((13821, 13843), 'discord.ext.flags.get', 'flags.get', (['"""first_pos"""'], {}), "('first_pos')\n", (13830, 13843), False, 'from discord.ext import commands, flags\n'), ((13912, 13935), 'discord.ext.flags.get', 'flags.get', (['"""second_pos"""'], {}), "('second_pos')\n", (13921, 13935), False, 'from discord.ext import commands, flags\n'), ((14574, 14598), 'discord.ext.flags.get', 'flags.get', (['"""stamp_image"""'], {}), "('stamp_image')\n", (14583, 14598), False, 'from discord.ext import commands, flags\n'), ((14600, 14626), 'discord.ext.flags.get', 'flags.get', (['"""stamp_opacity"""'], {}), "('stamp_opacity')\n", (14609, 14626), False, 'from discord.ext import commands, flags\n'), ((14696, 14729), 'tempfile.TemporaryDirectory', 'TemporaryDirectory', ([], {'prefix': '"""temp"""'}), "(prefix='temp')\n", (14714, 14729), False, 'from tempfile import TemporaryDirectory\n'), ((15210, 15229), 'discord.ext.flags.get', 'flags.get', (['"""factor"""'], {}), "('factor')\n", (15219, 15229), False, 'from discord.ext import commands, flags\n'), ((16050, 16192), 'antipetros_discordbot.utility.embed_helpers.make_basic_embed', 'make_basic_embed', ([], {'title': '"""__**Currently available Stamps are:**__"""', 'footer': '"""These messages will be deleted in 120 seconds"""', 'symbol': '"""photo"""'}), "(title='__**Currently available Stamps are:**__', footer=\n 'These messages will be deleted in 120 seconds', symbol='photo')\n", (16066, 16192), False, 'from antipetros_discordbot.utility.embed_helpers import make_basic_embed\n'), ((12151, 12176), 'pytz.timezone', 'timezone', (['"""Europe/Berlin"""'], {}), "('Europe/Berlin')\n", (12159, 12176), False, 'from pytz import timezone\n'), ((14784, 14820), 'antipetros_discordbot.utility.gidtools_functions.pathmaker', 'pathmaker', (['temp_dir', '"""temp_file.png"""'], {}), "(temp_dir, 'temp_file.png')\n", (14793, 14820), False, 'from antipetros_discordbot.utility.gidtools_functions import loadjson, pathmaker\n'), ((15177, 15196), 'discord.ext.flags.get', 'flags.get', (['"""factor"""'], {}), "('factor')\n", (15186, 15196), False, 'from discord.ext import commands, flags\n'), ((15469, 15501), 'os.path.splitext', 'os.path.splitext', (['_file.filename'], {}), '(_file.filename)\n', (15485, 15501), False, 'import os\n')] |
import requests
from bs4 import BeautifulSoup as soup
import pandas as pd
# PJ chatlog archive
url_main = 'http://perverted-justice.com/?archive=byName'
# get list of chat URLs
req_main = requests.get(url_main)
main_soup = soup(req_main.text, "html.parser")
# list to store URLs
url_link = []
for link in main_soup.find_all('a'):
url_link.append(str(link.get('href')))
# filter list to only those containing chatlogs
url_link = list(set(['http://perverted-justice.com'+i+'&nocomm=true' for i in url_link if i.startswith('./?archive=')]))
# export chatlog list
urlDF = pd.DataFrame(data=url_link)
urlDF.to_csv('C:/Users/<NAME>/Documents/PhD Research/chat_logs/notes/chatlog_url.csv')
| [
"bs4.BeautifulSoup",
"pandas.DataFrame",
"requests.get"
] | [((190, 212), 'requests.get', 'requests.get', (['url_main'], {}), '(url_main)\n', (202, 212), False, 'import requests\n'), ((225, 259), 'bs4.BeautifulSoup', 'soup', (['req_main.text', '"""html.parser"""'], {}), "(req_main.text, 'html.parser')\n", (229, 259), True, 'from bs4 import BeautifulSoup as soup\n'), ((574, 601), 'pandas.DataFrame', 'pd.DataFrame', ([], {'data': 'url_link'}), '(data=url_link)\n', (586, 601), True, 'import pandas as pd\n')] |
"""
This module contains the basic logging setup for the project.
"""
import datetime
import logging
import os
def setup_logging(module):
"""Set up the logging."""
log_file = os.path.join('/astro/3/mutchler/mt/logs/', module,
module + '_' + datetime.datetime.now().strftime('%Y-%m-%d-%H-%M') + '.log')
logging.basicConfig(filename = log_file,
format = '%(asctime)s %(levelname)s: %(message)s',
datefmt = '%m/%d/%Y %H:%M:%S %p',
level = logging.INFO) | [
"logging.basicConfig",
"datetime.datetime.now"
] | [((325, 474), 'logging.basicConfig', 'logging.basicConfig', ([], {'filename': 'log_file', 'format': '"""%(asctime)s %(levelname)s: %(message)s"""', 'datefmt': '"""%m/%d/%Y %H:%M:%S %p"""', 'level': 'logging.INFO'}), "(filename=log_file, format=\n '%(asctime)s %(levelname)s: %(message)s', datefmt=\n '%m/%d/%Y %H:%M:%S %p', level=logging.INFO)\n", (344, 474), False, 'import logging\n'), ((260, 283), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (281, 283), False, 'import datetime\n')] |
from django.shortcuts import render, HttpResponse
from django.views.decorators.csrf import csrf_exempt
import os
import base64
# from .celery_test import test
# from clone_script import clone
def index(request):
return render(request, 'clone/index.html')
def results(request):
return render(request, 'clone/results.html')
def login(request):
return render(request, 'clone/login.html')
@csrf_exempt
def upload(request):
if request.method == 'POST':
data = request.POST["base64_img"]
format, imgstr = data.split(';base64,')
ext = '.'+format.split('/')[-1]
# print(ext)
directory = "STEPS/0"
filename = directory + ".jpg"
with open(filename, "wb") as fh:
fh.write(base64.b64decode(imgstr))
# clone.delay()
return HttpResponse("success")
return HttpResponse("fail") | [
"django.shortcuts.render",
"base64.b64decode",
"django.shortcuts.HttpResponse"
] | [((222, 257), 'django.shortcuts.render', 'render', (['request', '"""clone/index.html"""'], {}), "(request, 'clone/index.html')\n", (228, 257), False, 'from django.shortcuts import render, HttpResponse\n'), ((289, 326), 'django.shortcuts.render', 'render', (['request', '"""clone/results.html"""'], {}), "(request, 'clone/results.html')\n", (295, 326), False, 'from django.shortcuts import render, HttpResponse\n'), ((356, 391), 'django.shortcuts.render', 'render', (['request', '"""clone/login.html"""'], {}), "(request, 'clone/login.html')\n", (362, 391), False, 'from django.shortcuts import render, HttpResponse\n'), ((775, 795), 'django.shortcuts.HttpResponse', 'HttpResponse', (['"""fail"""'], {}), "('fail')\n", (787, 795), False, 'from django.shortcuts import render, HttpResponse\n'), ((743, 766), 'django.shortcuts.HttpResponse', 'HttpResponse', (['"""success"""'], {}), "('success')\n", (755, 766), False, 'from django.shortcuts import render, HttpResponse\n'), ((690, 714), 'base64.b64decode', 'base64.b64decode', (['imgstr'], {}), '(imgstr)\n', (706, 714), False, 'import base64\n')] |
from __future__ import (
annotations,
)
import logging
import warnings
from pathlib import (
Path,
)
from typing import (
TYPE_CHECKING,
Optional,
Type,
TypeVar,
Union,
)
from .object import (
Object,
)
if TYPE_CHECKING:
from .config import (
Config,
)
logger = logging.getLogger(__name__)
S = TypeVar("S", bound="SetupMixin")
class SetupMixin(Object):
"""Setup Mixin class."""
def __init__(self, *args, already_setup: bool = False, **kwargs):
super().__init__(**kwargs)
self._already_setup = already_setup
@property
def already_setup(self) -> bool:
"""Already Setup getter.
:return: A boolean value.
"""
return self._already_setup
@property
def already_destroyed(self) -> bool:
"""Already Destroy getter.
:return: A boolean value.
"""
return not self._already_setup
@classmethod
def from_config(cls: Type[S], config: Optional[Union[Config, Path]] = None, **kwargs) -> S:
"""Build a new instance from config.
:param config: Config instance. If `None` is provided, default config is chosen.
:param kwargs: Additional named arguments.
:return: A instance of the called class.
"""
if isinstance(config, Path):
from .config import (
Config,
)
config = Config(config)
if config is None:
from .config import (
Config,
)
from .injections import (
Inject,
)
config = Inject.resolve(Config)
logger.info(f"Building a {cls.__name__!r} instance from config...")
return cls._from_config(config=config, **kwargs)
@classmethod
def _from_config(cls: Type[S], config: Config, **kwargs) -> S:
return cls(**kwargs)
async def __aenter__(self: S) -> S:
await self.setup()
return self
async def setup(self) -> None:
"""Setup miscellaneous repository things.
:return: This method does not return anything.
"""
if not self._already_setup:
logger.debug(f"Setting up a {type(self).__name__!r} instance...")
await self._setup()
self._already_setup = True
async def _setup(self) -> None:
return
async def __aexit__(self, exc_type, exc_value, exc_traceback):
await self.destroy()
async def destroy(self) -> None:
"""Destroy miscellaneous repository things.
:return: This method does not return anything.
"""
if self._already_setup:
logger.debug(f"Destroying a {type(self).__name__!r} instance...")
await self._destroy()
self._already_setup = False
async def _destroy(self) -> None:
"""Destroy miscellaneous repository things."""
def __del__(self):
if not getattr(self, "already_destroyed", True):
warnings.warn(
f"A not destroyed {type(self).__name__!r} instance is trying to be deleted...", ResourceWarning
)
class MinosSetup(SetupMixin):
"""Minos Setup class."""
def __init__(self, *args, **kwargs):
warnings.warn(f"{MinosSetup!r} has been deprecated. Use {SetupMixin} instead.", DeprecationWarning)
super().__init__(*args, **kwargs)
| [
"logging.getLogger",
"warnings.warn",
"typing.TypeVar"
] | [((314, 341), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (331, 341), False, 'import logging\n'), ((347, 379), 'typing.TypeVar', 'TypeVar', (['"""S"""'], {'bound': '"""SetupMixin"""'}), "('S', bound='SetupMixin')\n", (354, 379), False, 'from typing import TYPE_CHECKING, Optional, Type, TypeVar, Union\n'), ((3264, 3367), 'warnings.warn', 'warnings.warn', (['f"""{MinosSetup!r} has been deprecated. Use {SetupMixin} instead."""', 'DeprecationWarning'], {}), "(f'{MinosSetup!r} has been deprecated. Use {SetupMixin} instead.',\n DeprecationWarning)\n", (3277, 3367), False, 'import warnings\n')] |
from optparse import OptionParser
import yaml
import cwrap_parser
import nn_parse
import native_parse
import preprocess_declarations
import function_wrapper
import dispatch_macros
import copy_wrapper
from code_template import CodeTemplate
parser = OptionParser()
parser.add_option('-s', '--source-path', help='path to source director for tensorlib',
action='store', default='.')
parser.add_option('-o', '--output-dependencies',
help='only output a list of dependencies', action='store')
parser.add_option('-n', '--no-cuda', action='store_true')
options, files = parser.parse_args()
if options.output_dependencies is not None:
output_dependencies_file = open(options.output_dependencies, 'w')
TEMPLATE_PATH = options.source_path + "/templates"
GENERATOR_DERIVED = CodeTemplate.from_file(
TEMPLATE_PATH + "/GeneratorDerived.h")
STORAGE_DERIVED_CPP = CodeTemplate.from_file(
TEMPLATE_PATH + "/StorageDerived.cpp")
STORAGE_DERIVED_H = CodeTemplate.from_file(TEMPLATE_PATH + "/StorageDerived.h")
TYPE_DERIVED_CPP = CodeTemplate.from_file(TEMPLATE_PATH + "/TypeDerived.cpp")
TYPE_DERIVED_H = CodeTemplate.from_file(TEMPLATE_PATH + "/TypeDerived.h")
TYPE_H = CodeTemplate.from_file(TEMPLATE_PATH + "/Type.h")
TYPE_CPP = CodeTemplate.from_file(TEMPLATE_PATH + "/Type.cpp")
TENSOR_DERIVED_CPP = CodeTemplate.from_file(
TEMPLATE_PATH + "/TensorDerived.cpp")
TENSOR_SPARSE_CPP = CodeTemplate.from_file(
TEMPLATE_PATH + "/TensorSparse.cpp")
TENSOR_DENSE_CPP = CodeTemplate.from_file(
TEMPLATE_PATH + "/TensorDense.cpp")
TENSOR_DERIVED_H = CodeTemplate.from_file(TEMPLATE_PATH + "/TensorDerived.h")
TENSOR_H = CodeTemplate.from_file(TEMPLATE_PATH + "/Tensor.h")
TENSOR_METHODS_H = CodeTemplate.from_file(TEMPLATE_PATH + "/TensorMethods.h")
FUNCTIONS_H = CodeTemplate.from_file(TEMPLATE_PATH + "/Functions.h")
NATIVE_FUNCTIONS_PATH = options.source_path + "/NativeFunctions.h"
generators = {
'CPUGenerator.h': {
'name': 'CPU',
'th_generator': 'THGenerator * generator;',
'header': 'TH/TH.h',
},
'CUDAGenerator.h': {
'name': 'CUDA',
'th_generator': '',
'header': 'THC/THC.h'
},
}
backends = ['CPU']
if not options.no_cuda:
backends.append('CUDA')
densities = ['Dense', 'Sparse']
scalar_types = [
('Byte', 'uint8_t', 'Long', 'uint8_t'),
('Char', 'int8_t', 'Long', 'int8_t'),
('Double', 'double', 'Double', 'double'),
('Float', 'float', 'Double', 'float'),
('Int', 'int', 'Long', 'int32_t'),
('Long', 'int64_t', 'Long', 'int64_t'),
('Short', 'int16_t', 'Long', 'int16_t'),
('Half', 'Half', 'Double', 'THHalf'),
]
# shared environment for non-derived base classes Type.h Tensor.h Storage.h
top_env = {
'type_registrations': [],
'type_headers': [],
'type_method_declarations': [],
'type_method_definitions': [],
'type_method_inline_definitions': [],
'tensor_method_declarations': [],
'tensor_method_definitions': [],
'function_declarations': [],
'function_definitions': [],
'type_ids': [],
}
def write(filename, s):
filename = "ATen/" + filename
if options.output_dependencies is not None:
output_dependencies_file.write(filename + ";")
return
with open(filename, "w") as f:
f.write(s)
def format_yaml(data):
if options.output_dependencies:
# yaml formatting is slow so don't do it if we will ditch it.
return ""
noalias_dumper = yaml.dumper.SafeDumper
noalias_dumper.ignore_aliases = lambda self, data: True
return yaml.dump(data, default_flow_style=False, Dumper=noalias_dumper)
def generate_storage_type_and_tensor(backend, density, scalar_type, declarations):
scalar_name, c_type, accreal, th_scalar_type = scalar_type
env = {}
density_tag = 'Sparse' if density == 'Sparse' else ''
th_density_tag = 'S' if density == 'Sparse' else ''
env['Density'] = density
env['ScalarName'] = scalar_name
env['ScalarType'] = c_type
env['THScalarType'] = th_scalar_type
env['AccScalarName'] = accreal
env['Storage'] = "{}{}Storage".format(backend, scalar_name)
env['Type'] = "{}{}{}Type".format(density_tag, backend, scalar_name)
env['Tensor'] = "{}{}{}Tensor".format(density_tag, backend, scalar_name)
env['SparseTensor'] = "Sparse{}{}Tensor".format(backend, scalar_name)
env['Backend'] = density_tag + backend
# used for generating switch logic for external functions
tag = density_tag + backend + scalar_name
env['TypeID'] = 'TypeID::' + tag
top_env['type_ids'].append(tag + ',')
if backend == 'CUDA':
env['th_headers'] = ['#include <THC/THC.h>',
'#include <THCUNN/THCUNN.h>',
'#undef THNN_',
'#undef THCIndexTensor_']
# if density == 'Sparse':
env['th_headers'] += ['#include <THCS/THCS.h>',
'#undef THCIndexTensor_']
sname = '' if scalar_name == "Float" else scalar_name
env['THType'] = 'Cuda{}'.format(sname)
env['THStorage'] = 'THCuda{}Storage'.format(sname)
if density == 'Dense':
env['THTensor'] = 'THCuda{}Tensor'.format(sname)
else:
env['THTensor'] = 'THCS{}Tensor'.format(scalar_name)
env['THIndexTensor'] = 'THCudaLongTensor'
env['state'] = ['context->thc_state']
env['isCUDA'] = 'true'
env['storage_device'] = 'return storage->device;'
env['Generator'] = 'CUDAGenerator'
else:
env['th_headers'] = ['#include <TH/TH.h>',
'#include <THNN/THNN.h>',
'#undef THNN_']
# if density == 'Sparse':
env['th_headers'].append('#include <THS/THS.h>')
env['THType'] = scalar_name
env['THStorage'] = "TH{}Storage".format(scalar_name)
env['THTensor'] = 'TH{}{}Tensor'.format(th_density_tag, scalar_name)
env['THIndexTensor'] = 'THLongTensor'
env['state'] = []
env['isCUDA'] = 'false'
env['storage_device'] = 'throw std::runtime_error("CPU storage has no device");'
env['Generator'] = 'CPUGenerator'
env['AS_REAL'] = env['ScalarType']
if scalar_name == "Half":
env['SparseTensor'] = 'Tensor'
if backend == "CUDA":
env['to_th_type'] = 'HalfFix<__half,Half>'
env['to_at_type'] = 'HalfFix<Half,__half>'
env['AS_REAL'] = 'convert<half,double>'
env['THScalarType'] = 'half'
else:
env['to_th_type'] = 'HalfFix<THHalf,Half>'
env['to_at_type'] = 'HalfFix<Half,THHalf>'
elif scalar_name == 'Long':
env['to_th_type'] = 'long'
env['to_at_type'] = 'int64_t'
else:
env['to_th_type'] = ''
env['to_at_type'] = ''
declarations, definitions = function_wrapper.create_derived(
env, declarations)
env['type_derived_method_declarations'] = declarations
env['type_derived_method_definitions'] = definitions
if density != 'Sparse':
# there are no special storage types for Sparse, they are composed
# of Dense tensors
write(env['Storage'] + ".cpp", STORAGE_DERIVED_CPP.substitute(env))
write(env['Storage'] + ".h", STORAGE_DERIVED_H.substitute(env))
env['TensorDenseOrSparse'] = TENSOR_DENSE_CPP.substitute(env)
env['THTensor_nDimension'] = 'tensor->nDimension'
else:
env['TensorDenseOrSparse'] = TENSOR_SPARSE_CPP.substitute(env)
env['THTensor_nDimension'] = 'tensor->nDimensionI + tensor->nDimensionV'
write(env['Type'] + ".cpp", TYPE_DERIVED_CPP.substitute(env))
write(env['Type'] + ".h", TYPE_DERIVED_H.substitute(env))
write(env['Tensor'] + ".cpp", TENSOR_DERIVED_CPP.substitute(env))
write(env['Tensor'] + ".h", TENSOR_DERIVED_H.substitute(env))
type_register = (('context->type_registry[static_cast<int>(Backend::{})]' +
'[static_cast<int>(ScalarType::{})].reset(new {}(context));')
.format(env['Backend'], scalar_name, env['Type']))
top_env['type_registrations'].append(type_register)
top_env['type_headers'].append(
'#include "ATen/{}.h"'.format(env['Type']))
return env
cwrap_files = [f for f in files if f.endswith('.cwrap')]
nn_files = [f for f in files if f.endswith('.yaml') or f.endswith('.h')]
declarations = [d
for file in cwrap_files
for d in cwrap_parser.parse(file)]
print(nn_files)
declarations += nn_parse.run(nn_files)
declarations += native_parse.parse(NATIVE_FUNCTIONS_PATH)
declarations = preprocess_declarations.run(declarations)
for fname, env in generators.items():
write(fname, GENERATOR_DERIVED.substitute(env))
# note: this will fill in top_env['type/tensor_method_declarations/definitions']
# and modify the declarations to include any information that will all_backends
# be used by function_wrapper.create_derived
output_declarations = function_wrapper.create_generic(top_env, declarations)
write("Declarations.yaml", format_yaml(output_declarations))
# populated by generate_storage_type_and_tensor
all_types = []
for backend in backends:
for density in densities:
for scalar_type in scalar_types:
if density == 'Sparse' and scalar_type[0] == 'Half':
# THS does not do half type yet.
continue
all_types.append(generate_storage_type_and_tensor(
backend, density, scalar_type, declarations))
write('Type.h', TYPE_H.substitute(top_env))
write('Type.cpp', TYPE_CPP.substitute(top_env))
write('Tensor.h', TENSOR_H.substitute(top_env))
write('TensorMethods.h', TENSOR_METHODS_H.substitute(top_env))
write('Functions.h', FUNCTIONS_H.substitute(top_env))
write('Dispatch.h', dispatch_macros.create(all_types))
write('Copy.cpp', copy_wrapper.create(all_types))
if options.output_dependencies is not None:
output_dependencies_file.close()
| [
"cwrap_parser.parse",
"nn_parse.run",
"preprocess_declarations.run",
"yaml.dump",
"function_wrapper.create_generic",
"code_template.CodeTemplate.from_file",
"optparse.OptionParser",
"function_wrapper.create_derived",
"dispatch_macros.create",
"native_parse.parse",
"copy_wrapper.create"
] | [((252, 266), 'optparse.OptionParser', 'OptionParser', ([], {}), '()\n', (264, 266), False, 'from optparse import OptionParser\n'), ((809, 870), 'code_template.CodeTemplate.from_file', 'CodeTemplate.from_file', (["(TEMPLATE_PATH + '/GeneratorDerived.h')"], {}), "(TEMPLATE_PATH + '/GeneratorDerived.h')\n", (831, 870), False, 'from code_template import CodeTemplate\n'), ((898, 959), 'code_template.CodeTemplate.from_file', 'CodeTemplate.from_file', (["(TEMPLATE_PATH + '/StorageDerived.cpp')"], {}), "(TEMPLATE_PATH + '/StorageDerived.cpp')\n", (920, 959), False, 'from code_template import CodeTemplate\n'), ((985, 1044), 'code_template.CodeTemplate.from_file', 'CodeTemplate.from_file', (["(TEMPLATE_PATH + '/StorageDerived.h')"], {}), "(TEMPLATE_PATH + '/StorageDerived.h')\n", (1007, 1044), False, 'from code_template import CodeTemplate\n'), ((1065, 1123), 'code_template.CodeTemplate.from_file', 'CodeTemplate.from_file', (["(TEMPLATE_PATH + '/TypeDerived.cpp')"], {}), "(TEMPLATE_PATH + '/TypeDerived.cpp')\n", (1087, 1123), False, 'from code_template import CodeTemplate\n'), ((1141, 1197), 'code_template.CodeTemplate.from_file', 'CodeTemplate.from_file', (["(TEMPLATE_PATH + '/TypeDerived.h')"], {}), "(TEMPLATE_PATH + '/TypeDerived.h')\n", (1163, 1197), False, 'from code_template import CodeTemplate\n'), ((1207, 1256), 'code_template.CodeTemplate.from_file', 'CodeTemplate.from_file', (["(TEMPLATE_PATH + '/Type.h')"], {}), "(TEMPLATE_PATH + '/Type.h')\n", (1229, 1256), False, 'from code_template import CodeTemplate\n'), ((1268, 1319), 'code_template.CodeTemplate.from_file', 'CodeTemplate.from_file', (["(TEMPLATE_PATH + '/Type.cpp')"], {}), "(TEMPLATE_PATH + '/Type.cpp')\n", (1290, 1319), False, 'from code_template import CodeTemplate\n'), ((1342, 1402), 'code_template.CodeTemplate.from_file', 'CodeTemplate.from_file', (["(TEMPLATE_PATH + '/TensorDerived.cpp')"], {}), "(TEMPLATE_PATH + '/TensorDerived.cpp')\n", (1364, 1402), False, 'from code_template import CodeTemplate\n'), ((1428, 1487), 'code_template.CodeTemplate.from_file', 'CodeTemplate.from_file', (["(TEMPLATE_PATH + '/TensorSparse.cpp')"], {}), "(TEMPLATE_PATH + '/TensorSparse.cpp')\n", (1450, 1487), False, 'from code_template import CodeTemplate\n'), ((1512, 1570), 'code_template.CodeTemplate.from_file', 'CodeTemplate.from_file', (["(TEMPLATE_PATH + '/TensorDense.cpp')"], {}), "(TEMPLATE_PATH + '/TensorDense.cpp')\n", (1534, 1570), False, 'from code_template import CodeTemplate\n'), ((1596, 1654), 'code_template.CodeTemplate.from_file', 'CodeTemplate.from_file', (["(TEMPLATE_PATH + '/TensorDerived.h')"], {}), "(TEMPLATE_PATH + '/TensorDerived.h')\n", (1618, 1654), False, 'from code_template import CodeTemplate\n'), ((1666, 1717), 'code_template.CodeTemplate.from_file', 'CodeTemplate.from_file', (["(TEMPLATE_PATH + '/Tensor.h')"], {}), "(TEMPLATE_PATH + '/Tensor.h')\n", (1688, 1717), False, 'from code_template import CodeTemplate\n'), ((1737, 1795), 'code_template.CodeTemplate.from_file', 'CodeTemplate.from_file', (["(TEMPLATE_PATH + '/TensorMethods.h')"], {}), "(TEMPLATE_PATH + '/TensorMethods.h')\n", (1759, 1795), False, 'from code_template import CodeTemplate\n'), ((1811, 1865), 'code_template.CodeTemplate.from_file', 'CodeTemplate.from_file', (["(TEMPLATE_PATH + '/Functions.h')"], {}), "(TEMPLATE_PATH + '/Functions.h')\n", (1833, 1865), False, 'from code_template import CodeTemplate\n'), ((8587, 8609), 'nn_parse.run', 'nn_parse.run', (['nn_files'], {}), '(nn_files)\n', (8599, 8609), False, 'import nn_parse\n'), ((8626, 8667), 'native_parse.parse', 'native_parse.parse', (['NATIVE_FUNCTIONS_PATH'], {}), '(NATIVE_FUNCTIONS_PATH)\n', (8644, 8667), False, 'import native_parse\n'), ((8683, 8724), 'preprocess_declarations.run', 'preprocess_declarations.run', (['declarations'], {}), '(declarations)\n', (8710, 8724), False, 'import preprocess_declarations\n'), ((9045, 9099), 'function_wrapper.create_generic', 'function_wrapper.create_generic', (['top_env', 'declarations'], {}), '(top_env, declarations)\n', (9076, 9099), False, 'import function_wrapper\n'), ((3585, 3649), 'yaml.dump', 'yaml.dump', (['data'], {'default_flow_style': '(False)', 'Dumper': 'noalias_dumper'}), '(data, default_flow_style=False, Dumper=noalias_dumper)\n', (3594, 3649), False, 'import yaml\n'), ((6905, 6955), 'function_wrapper.create_derived', 'function_wrapper.create_derived', (['env', 'declarations'], {}), '(env, declarations)\n', (6936, 6955), False, 'import function_wrapper\n'), ((9865, 9898), 'dispatch_macros.create', 'dispatch_macros.create', (['all_types'], {}), '(all_types)\n', (9887, 9898), False, 'import dispatch_macros\n'), ((9918, 9948), 'copy_wrapper.create', 'copy_wrapper.create', (['all_types'], {}), '(all_types)\n', (9937, 9948), False, 'import copy_wrapper\n'), ((8529, 8553), 'cwrap_parser.parse', 'cwrap_parser.parse', (['file'], {}), '(file)\n', (8547, 8553), False, 'import cwrap_parser\n')] |
from logging import getLogger
from typing import Dict, List, Optional
from tmtrader.entity.order import FilledBasicOrder
from tmtrader.entity.position import ClosedPosition, Position, Positions, \
PositionsRef
from tmtrader.exchange_for_backtest.usecase.order_to_share import from_order
logger = getLogger(__name__)
PositionsDict = Dict[int, Positions]
ClosedPositions = List[ClosedPosition]
class PositionManager:
def __init__(self):
self.__positions_dic: PositionsDict = dict()
def current_positions_of(self, product_id: int) -> Optional[PositionsRef]:
if product_id in self.__positions_dic:
return self.__positions_dic[product_id].to_ref()
else:
return None
def current_positions(self) -> Dict[int, PositionsRef]:
return {k: v.to_ref() for k, v in self.__positions_dic.items() if
v.len}
def update_position(self,
order: FilledBasicOrder) -> ClosedPositions:
logger.debug(f'Got filled order at PositionManager: {order}')
pid = order.product_id
if pid in self.__positions_dic:
logger.debug(f'position size before update: {self.__positions_dic[pid].len}')
else:
logger.debug(f'position size before update: 0')
new_shares = from_order(order)
positions = None
if pid in self.__positions_dic:
positions = self.__positions_dic.pop(pid)
closed_pos = []
if positions and positions.is_long:
if order.is_buy:
self.__add_positions(pid, positions, new_shares)
else:
closed_pos = self.__close_and_may_open(pid, positions,
new_shares)
elif positions:
if order.is_buy:
closed_pos = self.__close_and_may_open(pid, positions,
new_shares)
else:
self.__add_positions(pid, positions, new_shares)
else:
self.__positions_dic[pid] = Positions(pid, new_shares,
order.is_buy)
if pid in self.__positions_dic:
logger.debug(
f'position size after update: {self.__positions_dic[pid].len}')
else:
logger.debug('position size after update: 0')
return closed_pos
def __add_positions(self, pid: int, positions: Positions,
new_shares: List[Position]):
positions.add_positions(new_shares)
self.__positions_dic[pid] = positions
def __close_and_may_open(self, pid: int, positions: Positions,
new_shares: List[Position]) -> ClosedPositions:
closed = positions.close_positions(new_shares)
if closed.remaining_contracts:
self.__positions_dic[pid] = Positions(pid,
closed.remaining_contracts,
is_long=not
positions.is_long)
else:
if positions.len:
self.__positions_dic[pid] = positions
return closed.closed
| [
"logging.getLogger",
"tmtrader.exchange_for_backtest.usecase.order_to_share.from_order",
"tmtrader.entity.position.Positions"
] | [((302, 321), 'logging.getLogger', 'getLogger', (['__name__'], {}), '(__name__)\n', (311, 321), False, 'from logging import getLogger\n'), ((1311, 1328), 'tmtrader.exchange_for_backtest.usecase.order_to_share.from_order', 'from_order', (['order'], {}), '(order)\n', (1321, 1328), False, 'from tmtrader.exchange_for_backtest.usecase.order_to_share import from_order\n'), ((2918, 2991), 'tmtrader.entity.position.Positions', 'Positions', (['pid', 'closed.remaining_contracts'], {'is_long': '(not positions.is_long)'}), '(pid, closed.remaining_contracts, is_long=not positions.is_long)\n', (2927, 2991), False, 'from tmtrader.entity.position import ClosedPosition, Position, Positions, PositionsRef\n'), ((2096, 2136), 'tmtrader.entity.position.Positions', 'Positions', (['pid', 'new_shares', 'order.is_buy'], {}), '(pid, new_shares, order.is_buy)\n', (2105, 2136), False, 'from tmtrader.entity.position import ClosedPosition, Position, Positions, PositionsRef\n')] |
import grpc
from concurrent import futures
import time
import sys
sys.path.insert(0, 'service/')
from service_spec import fake_news_pb2
from service_spec import fake_news_pb2_grpc
import json
import test
class fake_news_classificationServicer(fake_news_pb2_grpc.fake_news_classificationServicer):
def classify(self, request, context):
response = fake_news_pb2.OutputMessage()
response.result = test.predict(request.value)
return response
server = grpc.server(futures.ThreadPoolExecutor(max_workers=10))
fake_news_pb2_grpc.add_fake_news_classificationServicer_to_server(fake_news_classificationServicer(), server)
print('Starting server. Listening on port 7011.')
server.add_insecure_port('0.0.0.0:7011')
server.start()
try:
while True:
time.sleep(86400)
except KeyboardInterrupt:
server.stop(0)
| [
"service_spec.fake_news_pb2.OutputMessage",
"sys.path.insert",
"concurrent.futures.ThreadPoolExecutor",
"time.sleep",
"test.predict"
] | [((68, 98), 'sys.path.insert', 'sys.path.insert', (['(0)', '"""service/"""'], {}), "(0, 'service/')\n", (83, 98), False, 'import sys\n'), ((496, 538), 'concurrent.futures.ThreadPoolExecutor', 'futures.ThreadPoolExecutor', ([], {'max_workers': '(10)'}), '(max_workers=10)\n', (522, 538), False, 'from concurrent import futures\n'), ((365, 394), 'service_spec.fake_news_pb2.OutputMessage', 'fake_news_pb2.OutputMessage', ([], {}), '()\n', (392, 394), False, 'from service_spec import fake_news_pb2\n'), ((421, 448), 'test.predict', 'test.predict', (['request.value'], {}), '(request.value)\n', (433, 448), False, 'import test\n'), ((787, 804), 'time.sleep', 'time.sleep', (['(86400)'], {}), '(86400)\n', (797, 804), False, 'import time\n')] |
from io import BytesIO
from PIL import Image
from django.contrib.auth.models import AbstractUser
from django.core.files.uploadedfile import InMemoryUploadedFile
from django.core.mail import send_mail
from django.db import models
from resizeimage import resizeimage
from rest_framework.authtoken.models import Token
from main.settings import USER_SETTINGS
from django.db.models import ObjectDoesNotExist
def generate_avatar_path(obj, filename):
""" Generates an unique path to user's avatar dir according to user's id. """
return 'images/avatars/' + str(obj.id) + '/' + filename
class CustomUser(AbstractUser):
"""Extends base django user model"""
displayed = models.CharField(
max_length=40,
unique=True,
verbose_name='отоброжаемое имя'
)
avatar = models.ImageField(
default='images/avatars/default_avatar.png',
upload_to=generate_avatar_path,
blank=True,
null=True,
verbose_name='аватар',
)
foreign_avatar_url = models.URLField(
null=True,
blank=True,
verbose_name='аватар из стороних источников'
)
def save(self, *args, **kwargs):
""" User's profile update handler. """
try:
self.avatar_update_handler()
except ObjectDoesNotExist:
send_mail('Новый пользователь!', 'Зарегистрирован новый пользователь: ' + self.displayed,
'<EMAIL>', ['<EMAIL>'], fail_silently=True)
super(CustomUser, self).save(*args, **kwargs)
def avatar_update_handler(self):
""" Downloaded avatar image update handler. """
user = CustomUser.objects.get(id=self.id)
if user.avatar != self.avatar:
self.get_avatar_ext()
self.generate_avatar_name()
self.resize_avatar()
# self.delete_current_avatar()
def get_avatar_ext(self):
""" Parses an avatar image extension. """
try:
user_avatar_ext = self.avatar.name.split('.')[-1]
if user_avatar_ext.upper() == 'JPG':
user_avatar_ext = 'jpeg'
self.user_avatar_ext = user_avatar_ext
except AttributeError:
self.avatar = 'images/avatars/default_avatar.png'
raise ObjectDoesNotExist
def resize_avatar(self):
""" Compresses user's avatar image. New sizes declared at project settings. """
user_avatar = Image.open(self.avatar)
avatar_settings = USER_SETTINGS['USER_AVATAR_SETTINGS']
new_user_avatar = resizeimage.resize_cover(
user_avatar,
[avatar_settings['COMPRESSED_WIDTH'], avatar_settings['COMPRESSED_HEIGHT']]
)
new_user_avatar_io = BytesIO()
new_user_avatar.save(new_user_avatar_io, format=self.user_avatar_ext)
self.avatar = InMemoryUploadedFile(new_user_avatar_io, None, self.avatar.name, 'image/' + self.user_avatar_ext,
new_user_avatar_io.tell(), None)
# For using with local storage
"""
def delete_current_avatar(self):
try:
user = CustomUser.objects.get(id=self.id)
except IntegrityError:
raise ValidationError('Некорректный пользователь.')
storage, path = user.avatar.storage, user.avatar.path
if self.avatar.name in path:
storage.delete(path)"""
def generate_avatar_name(self):
""" Generates an user's avatar image name according project settings."""
avatar_settings = USER_SETTINGS['USER_AVATAR_SETTINGS']
self.avatar.name = avatar_settings['AVATAR_IMAGE_NAME'] + '.' + self.user_avatar_ext
AVATAR_FIELD = 'avatar'
REQUIRED_FIELDS = ['email', 'avatar', 'displayed', 'foreign_avatar_url']
| [
"PIL.Image.open",
"django.core.mail.send_mail",
"io.BytesIO",
"django.db.models.ImageField",
"resizeimage.resizeimage.resize_cover",
"django.db.models.URLField",
"django.db.models.CharField"
] | [((681, 758), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(40)', 'unique': '(True)', 'verbose_name': '"""отоброжаемое имя"""'}), "(max_length=40, unique=True, verbose_name='отоброжаемое имя')\n", (697, 758), False, 'from django.db import models\n'), ((803, 948), 'django.db.models.ImageField', 'models.ImageField', ([], {'default': '"""images/avatars/default_avatar.png"""', 'upload_to': 'generate_avatar_path', 'blank': '(True)', 'null': '(True)', 'verbose_name': '"""аватар"""'}), "(default='images/avatars/default_avatar.png', upload_to=\n generate_avatar_path, blank=True, null=True, verbose_name='аватар')\n", (820, 948), False, 'from django.db import models\n'), ((1017, 1106), 'django.db.models.URLField', 'models.URLField', ([], {'null': '(True)', 'blank': '(True)', 'verbose_name': '"""аватар из стороних источников"""'}), "(null=True, blank=True, verbose_name=\n 'аватар из стороних источников')\n", (1032, 1106), False, 'from django.db import models\n'), ((2430, 2453), 'PIL.Image.open', 'Image.open', (['self.avatar'], {}), '(self.avatar)\n', (2440, 2453), False, 'from PIL import Image\n'), ((2544, 2662), 'resizeimage.resizeimage.resize_cover', 'resizeimage.resize_cover', (['user_avatar', "[avatar_settings['COMPRESSED_WIDTH'], avatar_settings['COMPRESSED_HEIGHT']]"], {}), "(user_avatar, [avatar_settings['COMPRESSED_WIDTH'],\n avatar_settings['COMPRESSED_HEIGHT']])\n", (2568, 2662), False, 'from resizeimage import resizeimage\n'), ((2722, 2731), 'io.BytesIO', 'BytesIO', ([], {}), '()\n', (2729, 2731), False, 'from io import BytesIO\n'), ((1318, 1455), 'django.core.mail.send_mail', 'send_mail', (['"""Новый пользователь!"""', "('Зарегистрирован новый пользователь: ' + self.displayed)", '"""<EMAIL>"""', "['<EMAIL>']"], {'fail_silently': '(True)'}), "('Новый пользователь!', 'Зарегистрирован новый пользователь: ' +\n self.displayed, '<EMAIL>', ['<EMAIL>'], fail_silently=True)\n", (1327, 1455), False, 'from django.core.mail import send_mail\n')] |
# Rewritten by @keinshin
import io
from userbot import CMD_LIST, ALIVE_NAME, bot as light
from userbot import CMD_HELP
from userbot.utils import lightning_cmd
import asyncio
from var import Var
DEFAULTUSER = str(ALIVE_NAME) if ALIVE_NAME else "Pls Go To Heroku Vars Then in `ALIVE_NAME`place You Telegram `Your Desired Name` "
@light.on(lightning_cmd(pattern="help ?(.*)"))
async def cmd_list(event):
if not event.text[0].isalpha() and event.text[0] not in ("/", "#", "@", "!"):
lightningusername = Var.TG_BOT_USER_NAME_BF_HER
input_str = event.pattern_match.group(1)
if lightningusername is None or input_str == "text":
string = ""
for i in CMD_LIST:
string += "ℹ️ " + i + "\n"
for iter_list in CMD_LIST[i]:
string += " `" + str(iter_list) + "`"
string += "\n"
string += "\n"
if len(string) > 4095:
await borg.send_message(event.chat_id, "`Lol Try .help`")
await asyncio.sleep(5)
else:
await event.edit(string)
elif input_str:
if input_str in CMD_LIST:
string = "Commands found in {}:\n".format(input_str)
for i in CMD_LIST[input_str]:
string += "\n " + i
string += "\n"
await event.edit(string)
else:
await event.edit("`Wait Checking..`")
await asyncio.sleep(2)
await event.edit(input_str + " ☹️ is not a valid plugin😞😞!")
else:
light_help_strin = """**𝐁𝐔𝐙𝐙 Heres With The Detailed Help For CMDs** 😉😉 !\n If Faced Any Bug Please Give The Feed Back at [𝐁𝐔𝐙𝐙 Support](https://t.me/ossuport):"""
results = await bot.inline_query( # pylint:disable=E0602
lightningusername, light_help_strin
)
await results[0].click(
event.chat_id, reply_to=event.reply_to_msg_id, hide_via=True
)
await event.delete()
| [
"userbot.utils.lightning_cmd",
"asyncio.sleep"
] | [((361, 396), 'userbot.utils.lightning_cmd', 'lightning_cmd', ([], {'pattern': '"""help ?(.*)"""'}), "(pattern='help ?(.*)')\n", (374, 396), False, 'from userbot.utils import lightning_cmd\n'), ((1075, 1091), 'asyncio.sleep', 'asyncio.sleep', (['(5)'], {}), '(5)\n', (1088, 1091), False, 'import asyncio\n'), ((1541, 1557), 'asyncio.sleep', 'asyncio.sleep', (['(2)'], {}), '(2)\n', (1554, 1557), False, 'import asyncio\n')] |
import torch
import torch.nn as nn
from torchvision import models
import torch.nn.functional as F
def model_parser(model, sum_mode=False, dropout_rate=0.0, bayesian=False):
base_model = None
if model == 'Resnet':
base_model = models.resnet34(pretrained=True)
network = HourglassNet(base_model, sum_mode, dropout_rate, bayesian)
else:
assert 'Unvalid Model'
return network
class HourglassNet(nn.Module):
def __init__(self, base_model, sum_mode=False, dropout_rate=0.0, bayesian=False):
super(HourglassNet, self).__init__()
self.bayesian = bayesian
self.dropout_rate = dropout_rate
self.sum_mode = sum_mode
# Encoding Blocks
self.init_block = nn.Sequential(*list(base_model.children())[:4])
# self.res1_block = nn.Sequential(*list(base_model.layer1.children()))
self.res_block1 = base_model.layer1
self.res_block2 = base_model.layer2
self.res_block3 = base_model.layer3
self.res_block4 = base_model.layer4
# Decoding Blocks
if sum_mode:
self.deconv_block1 = nn.ConvTranspose2d(512, 256, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False, output_padding=1)
self.deconv_block2 = nn.ConvTranspose2d(256, 128, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False, output_padding=1)
self.deconv_block3 = nn.ConvTranspose2d(128, 64, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False, output_padding=1)
self.conv_block = nn.Conv2d(64, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
else:
self.deconv_block1 = nn.ConvTranspose2d(512, 256, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False, output_padding=1)
self.deconv_block2 = nn.ConvTranspose2d(512, 128, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False, output_padding=1)
self.deconv_block3 = nn.ConvTranspose2d(256, 64, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False, output_padding=1)
self.conv_block = nn.Conv2d(128, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
# Regressor
self.fc_dim_reduce = nn.Linear(56 * 56 * 32, 1024)
self.fc_trans = nn.Linear(1024, 3)
self.fc_rot = nn.Linear(1024, 4)
# Initialize Weights
init_modules = [self.deconv_block1, self.deconv_block2, self.deconv_block3, self.conv_block,
self.fc_dim_reduce, self.fc_trans, self.fc_rot]
for module in init_modules:
if isinstance(module, nn.ConvTranspose2d) or isinstance(module, nn.Linear) or isinstance(module, nn.Conv3d):
nn.init.kaiming_normal_(module.weight)
if module.bias is not None:
nn.init.constant_(module.bias, 0)
# nn.init.normal_(self.fc_last.weight, 0, 0.01)
# nn.init.constant_(self.fc_last.bias, 0)
#
# nn.init.normal_(self.fc_position.weight, 0, 0.5)
# nn.init.constant_(self.fc_position.bias, 0)
#
# nn.init.normal_(self.fc_rotation.weight, 0, 0.01)
# nn.init.constant_(self.fc_rotation.bias, 0)
def forward(self, x):
# Conv
x = self.init_block(x)
x_res1 = self.res_block1(x)
x_res2 = self.res_block2(x_res1)
x_res3 = self.res_block3(x_res2)
x_res4 = self.res_block4(x_res3)
# Deconv
x_deconv1 = self.deconv_block1(x_res4)
if self.sum_mode:
x_deconv1 = x_res3 + x_deconv1
else:
x_deconv1 = torch.cat((x_res3, x_deconv1), dim=1)
x_deconv2 = self.deconv_block2(x_deconv1)
if self.sum_mode:
x_deconv2 = x_res2 + x_deconv2
else:
x_deconv2 = torch.cat((x_res2, x_deconv2), dim=1)
x_deconv3 = self.deconv_block3(x_deconv2)
if self.sum_mode:
x_deconv3 = x_res1 + x_deconv3
else:
x_deconv3 = torch.cat((x_res1, x_deconv3), dim=1)
x_conv = self.conv_block(x_deconv3)
x_linear = x_conv.view(x_conv.size(0), -1)
x_linear = self.fc_dim_reduce(x_linear)
x_linear = F.relu(x_linear)
dropout_on = self.training or self.bayesian
if self.dropout_rate > 0:
x_linear = F.dropout(x_linear, p=self.dropout_rate, training=dropout_on)
trans = self.fc_trans(x_linear)
rot = self.fc_rot(x_linear)
return trans, rot
if __name__ == '__main__':
model=model_parser('Resnet')
print(model)
| [
"torch.nn.init.constant_",
"torch.nn.init.kaiming_normal_",
"torch.nn.functional.dropout",
"torch.nn.Conv2d",
"torchvision.models.resnet34",
"torch.nn.functional.relu",
"torch.nn.Linear",
"torch.nn.ConvTranspose2d",
"torch.cat"
] | [((244, 276), 'torchvision.models.resnet34', 'models.resnet34', ([], {'pretrained': '(True)'}), '(pretrained=True)\n', (259, 276), False, 'from torchvision import models\n'), ((2234, 2263), 'torch.nn.Linear', 'nn.Linear', (['(56 * 56 * 32)', '(1024)'], {}), '(56 * 56 * 32, 1024)\n', (2243, 2263), True, 'import torch.nn as nn\n'), ((2288, 2306), 'torch.nn.Linear', 'nn.Linear', (['(1024)', '(3)'], {}), '(1024, 3)\n', (2297, 2306), True, 'import torch.nn as nn\n'), ((2329, 2347), 'torch.nn.Linear', 'nn.Linear', (['(1024)', '(4)'], {}), '(1024, 4)\n', (2338, 2347), True, 'import torch.nn as nn\n'), ((4213, 4229), 'torch.nn.functional.relu', 'F.relu', (['x_linear'], {}), '(x_linear)\n', (4219, 4229), True, 'import torch.nn.functional as F\n'), ((1124, 1237), 'torch.nn.ConvTranspose2d', 'nn.ConvTranspose2d', (['(512)', '(256)'], {'kernel_size': '(3, 3)', 'stride': '(2, 2)', 'padding': '(1, 1)', 'bias': '(False)', 'output_padding': '(1)'}), '(512, 256, kernel_size=(3, 3), stride=(2, 2), padding=(1,\n 1), bias=False, output_padding=1)\n', (1142, 1237), True, 'import torch.nn as nn\n'), ((1267, 1380), 'torch.nn.ConvTranspose2d', 'nn.ConvTranspose2d', (['(256)', '(128)'], {'kernel_size': '(3, 3)', 'stride': '(2, 2)', 'padding': '(1, 1)', 'bias': '(False)', 'output_padding': '(1)'}), '(256, 128, kernel_size=(3, 3), stride=(2, 2), padding=(1,\n 1), bias=False, output_padding=1)\n', (1285, 1380), True, 'import torch.nn as nn\n'), ((1410, 1523), 'torch.nn.ConvTranspose2d', 'nn.ConvTranspose2d', (['(128)', '(64)'], {'kernel_size': '(3, 3)', 'stride': '(2, 2)', 'padding': '(1, 1)', 'bias': '(False)', 'output_padding': '(1)'}), '(128, 64, kernel_size=(3, 3), stride=(2, 2), padding=(1, \n 1), bias=False, output_padding=1)\n', (1428, 1523), True, 'import torch.nn as nn\n'), ((1549, 1634), 'torch.nn.Conv2d', 'nn.Conv2d', (['(64)', '(32)'], {'kernel_size': '(3, 3)', 'stride': '(1, 1)', 'padding': '(1, 1)', 'bias': '(False)'}), '(64, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False\n )\n', (1558, 1634), True, 'import torch.nn as nn\n'), ((1677, 1790), 'torch.nn.ConvTranspose2d', 'nn.ConvTranspose2d', (['(512)', '(256)'], {'kernel_size': '(3, 3)', 'stride': '(2, 2)', 'padding': '(1, 1)', 'bias': '(False)', 'output_padding': '(1)'}), '(512, 256, kernel_size=(3, 3), stride=(2, 2), padding=(1,\n 1), bias=False, output_padding=1)\n', (1695, 1790), True, 'import torch.nn as nn\n'), ((1820, 1933), 'torch.nn.ConvTranspose2d', 'nn.ConvTranspose2d', (['(512)', '(128)'], {'kernel_size': '(3, 3)', 'stride': '(2, 2)', 'padding': '(1, 1)', 'bias': '(False)', 'output_padding': '(1)'}), '(512, 128, kernel_size=(3, 3), stride=(2, 2), padding=(1,\n 1), bias=False, output_padding=1)\n', (1838, 1933), True, 'import torch.nn as nn\n'), ((1963, 2076), 'torch.nn.ConvTranspose2d', 'nn.ConvTranspose2d', (['(256)', '(64)'], {'kernel_size': '(3, 3)', 'stride': '(2, 2)', 'padding': '(1, 1)', 'bias': '(False)', 'output_padding': '(1)'}), '(256, 64, kernel_size=(3, 3), stride=(2, 2), padding=(1, \n 1), bias=False, output_padding=1)\n', (1981, 2076), True, 'import torch.nn as nn\n'), ((2102, 2188), 'torch.nn.Conv2d', 'nn.Conv2d', (['(128)', '(32)'], {'kernel_size': '(3, 3)', 'stride': '(1, 1)', 'padding': '(1, 1)', 'bias': '(False)'}), '(128, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=\n False)\n', (2111, 2188), True, 'import torch.nn as nn\n'), ((3620, 3657), 'torch.cat', 'torch.cat', (['(x_res3, x_deconv1)'], {'dim': '(1)'}), '((x_res3, x_deconv1), dim=1)\n', (3629, 3657), False, 'import torch\n'), ((3816, 3853), 'torch.cat', 'torch.cat', (['(x_res2, x_deconv2)'], {'dim': '(1)'}), '((x_res2, x_deconv2), dim=1)\n', (3825, 3853), False, 'import torch\n'), ((4012, 4049), 'torch.cat', 'torch.cat', (['(x_res1, x_deconv3)'], {'dim': '(1)'}), '((x_res1, x_deconv3), dim=1)\n', (4021, 4049), False, 'import torch\n'), ((4340, 4401), 'torch.nn.functional.dropout', 'F.dropout', (['x_linear'], {'p': 'self.dropout_rate', 'training': 'dropout_on'}), '(x_linear, p=self.dropout_rate, training=dropout_on)\n', (4349, 4401), True, 'import torch.nn.functional as F\n'), ((2725, 2763), 'torch.nn.init.kaiming_normal_', 'nn.init.kaiming_normal_', (['module.weight'], {}), '(module.weight)\n', (2748, 2763), True, 'import torch.nn as nn\n'), ((2828, 2861), 'torch.nn.init.constant_', 'nn.init.constant_', (['module.bias', '(0)'], {}), '(module.bias, 0)\n', (2845, 2861), True, 'import torch.nn as nn\n')] |
"""MongoDB IO tasks."""
import logging
from typing import List, Optional, Sequence
from urllib.parse import urlparse
import attr
from icecream import ic # noqa pylint: disable=unused-import
from pymongo import MongoClient
from pymongo.errors import ServerSelectionTimeoutError
import voluptuous as vol
from dataplaybook import Columns, Table, task
from dataplaybook.utils import PlaybookError
_LOGGER = logging.getLogger(__name__)
def _clean_netloc(db_netloc: str) -> str:
if "/" not in db_netloc:
return db_netloc
try:
res = urlparse(db_netloc)
return res.netloc
except AttributeError as err:
_LOGGER.error("could not parse URL: %s: %s", db_netloc, err)
raise err
@attr.s(slots=True)
class MongoURI:
"""MongoDB URI."""
netloc = attr.ib(converter=_clean_netloc)
database = attr.ib()
collection = attr.ib()
set_id = attr.ib(default="")
@staticmethod
def new_from_string(db_uri: str, set_id=None):
"""new mongodb uri."""
try:
res = urlparse(db_uri)
except AttributeError as err:
_LOGGER.error("could not parse URL: %s: %s", db_uri, err)
raise err
if res.scheme not in ["mdb", "mongodb", "db"]:
raise vol.Invalid("mdb://host:port/database/collection/[set_id]")
pth = res.path.split("/")
if len(pth) == 4:
if set_id:
raise vol.InInvalid("set_id specified, not allowed in mdb URI")
set_id = pth[3]
return MongoURI(
netloc=res.netloc,
database=pth[1],
collection=pth[2],
set_id=set_id,
)
# @staticmethod
# def validate(opt):
# """Validate MongoDB URI."""
# if not isinstance(opt.get("mdb"), MongoURI):
# opt["mdb"] = MongoURI.new_from_string(opt["mdb"], opt.pop("set_id", None))
# return opt
def __str__(self) -> str:
return f"{self.netloc}/{self.database}/{self.collection}/{self.set_id}"
def get_client(self, connect=True) -> MongoClient:
"""Return a MongoClient."""
return MongoClient(self.netloc, connect=connect)
@task()
def read_mongo(
mdb: MongoURI,
*,
set_id: Optional[str] = None,
) -> Table:
"""Read data from a MongoDB collection."""
client = MongoClient(mdb.netloc, connect=True)
if not set_id:
set_id = mdb.set_id
if set_id:
cursor = client[mdb.database][mdb.collection].find({"_sid": set_id})
else:
cursor = client[mdb.database][mdb.collection].find()
cursor.batch_size(200)
for result in cursor:
result.pop("_sid", None)
result.pop("_id", None)
yield result
@task()
def write_mongo(
table: Table, mdb: MongoURI, *, set_id: Optional[str] = None, force=False
):
"""Write data to a MongoDB collection."""
if not set_id:
set_id = mdb.set_id
try:
client = MongoClient(mdb.netloc, connect=True)
col = client[mdb.database][mdb.collection]
if not set_id:
_LOGGER.info("Writing %s documents", len(table))
client[mdb.database][mdb.collection].insert_many(table)
return
filtr = {"_sid": set_id}
existing_count = col.count(filtr)
if not force and existing_count > 0 and not table:
_LOGGER.error(
"Trying to replace %s documents with an empty set", existing_count
)
return
_LOGGER.info(
"Replacing %s documents matching %s, %s new",
existing_count,
set_id,
len(table),
)
col.delete_many(filtr)
if table:
col.insert_many([dict(d, _sid=set_id) for d in table])
except ServerSelectionTimeoutError as err:
raise PlaybookError(f"Could not open connection to mdb {mdb}") from err
@task
def columns_to_list(table: Table, *, list_column: str, columns: Columns) -> None:
"""Convert columns with booleans to a list in a single column.
Useful to store columns with true/false in a single list with the columns
names.
"""
for row in table:
row[list_column] = [n for n in columns if row.pop(n, False)]
@task
def list_to_columns(table: Table, *, list_column: str, columns: Columns) -> None:
"""Convert a list with values to columns with True."""
for row in table:
for col in columns:
if col in row[list_column]:
row[col] = True
del row[list_column]
@task
def mongo_list_sids(mdb: MongoURI) -> List[str]:
"""Return a list of _sid's"""
client = MongoClient(mdb.netloc, connect=True)
cursor = client[mdb.database][mdb.collection]
# non = cursor.find_one({"_sid": {"$exists": False}})
# print(non)
other = cursor.distinct("_sid")
# print(other)
return other
@task
def mongo_delete_sids(*, mdb: MongoURI, sids: List[str]):
"""Delete a specific _sid."""
client = MongoClient(mdb.netloc, connect=True)
cursor = client[mdb.database][mdb.collection]
for sid in sids:
if sid == "None" or sid is None:
cursor.delete_many({"_sid": {"$exists": False}})
else:
cursor.delete_many({"_sid": sid})
@task
def mongo_sync_sids(
*, mdb_local: MongoURI, mdb_remote: MongoURI, ignore_remote: Sequence[str] = None
):
"""Sync two MongoDB collections. Only sync _sid's where the count is different.
Dont delete additional SIDs from th remote if in ignore_remote
"""
agg = [{"$group": {"_id": "$_sid", "count": {"$sum": 1}}}]
# get local
l_db = mdb_local.get_client()[mdb_local.database][mdb_local.collection]
lsc = {i["_id"]: i["count"] for i in l_db.aggregate(agg)}
# get remote
r_db = mdb_remote.get_client()[mdb_remote.database][mdb_remote.collection]
rsc = {i["_id"]: i["count"] for i in r_db.aggregate(agg)}
for sid, lval in lsc.items():
rval = rsc.pop(sid, None)
if rval != lval:
# counts are different!
mdb_local.set_id = sid
lcl = read_mongo(mdb=mdb_local)
write_mongo(mdb=mdb_remote, table=lcl, set_id=sid)
extra = list(set(rsc.keys()) - set(ignore_remote or []))
ic(extra)
if extra:
mongo_delete_sids(mdb=mdb_remote, sids=extra)
| [
"logging.getLogger",
"icecream.ic",
"dataplaybook.utils.PlaybookError",
"attr.s",
"urllib.parse.urlparse",
"voluptuous.Invalid",
"pymongo.MongoClient",
"dataplaybook.task",
"voluptuous.InInvalid",
"attr.ib"
] | [((407, 434), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (424, 434), False, 'import logging\n'), ((726, 744), 'attr.s', 'attr.s', ([], {'slots': '(True)'}), '(slots=True)\n', (732, 744), False, 'import attr\n'), ((2186, 2192), 'dataplaybook.task', 'task', ([], {}), '()\n', (2190, 2192), False, 'from dataplaybook import Columns, Table, task\n'), ((2732, 2738), 'dataplaybook.task', 'task', ([], {}), '()\n', (2736, 2738), False, 'from dataplaybook import Columns, Table, task\n'), ((798, 830), 'attr.ib', 'attr.ib', ([], {'converter': '_clean_netloc'}), '(converter=_clean_netloc)\n', (805, 830), False, 'import attr\n'), ((846, 855), 'attr.ib', 'attr.ib', ([], {}), '()\n', (853, 855), False, 'import attr\n'), ((873, 882), 'attr.ib', 'attr.ib', ([], {}), '()\n', (880, 882), False, 'import attr\n'), ((896, 915), 'attr.ib', 'attr.ib', ([], {'default': '""""""'}), "(default='')\n", (903, 915), False, 'import attr\n'), ((2341, 2378), 'pymongo.MongoClient', 'MongoClient', (['mdb.netloc'], {'connect': '(True)'}), '(mdb.netloc, connect=True)\n', (2352, 2378), False, 'from pymongo import MongoClient\n'), ((4649, 4686), 'pymongo.MongoClient', 'MongoClient', (['mdb.netloc'], {'connect': '(True)'}), '(mdb.netloc, connect=True)\n', (4660, 4686), False, 'from pymongo import MongoClient\n'), ((4997, 5034), 'pymongo.MongoClient', 'MongoClient', (['mdb.netloc'], {'connect': '(True)'}), '(mdb.netloc, connect=True)\n', (5008, 5034), False, 'from pymongo import MongoClient\n'), ((6258, 6267), 'icecream.ic', 'ic', (['extra'], {}), '(extra)\n', (6260, 6267), False, 'from icecream import ic\n'), ((556, 575), 'urllib.parse.urlparse', 'urlparse', (['db_netloc'], {}), '(db_netloc)\n', (564, 575), False, 'from urllib.parse import urlparse\n'), ((2141, 2182), 'pymongo.MongoClient', 'MongoClient', (['self.netloc'], {'connect': 'connect'}), '(self.netloc, connect=connect)\n', (2152, 2182), False, 'from pymongo import MongoClient\n'), ((2956, 2993), 'pymongo.MongoClient', 'MongoClient', (['mdb.netloc'], {'connect': '(True)'}), '(mdb.netloc, connect=True)\n', (2967, 2993), False, 'from pymongo import MongoClient\n'), ((1048, 1064), 'urllib.parse.urlparse', 'urlparse', (['db_uri'], {}), '(db_uri)\n', (1056, 1064), False, 'from urllib.parse import urlparse\n'), ((1268, 1327), 'voluptuous.Invalid', 'vol.Invalid', (['"""mdb://host:port/database/collection/[set_id]"""'], {}), "('mdb://host:port/database/collection/[set_id]')\n", (1279, 1327), True, 'import voluptuous as vol\n'), ((3833, 3889), 'dataplaybook.utils.PlaybookError', 'PlaybookError', (['f"""Could not open connection to mdb {mdb}"""'], {}), "(f'Could not open connection to mdb {mdb}')\n", (3846, 3889), False, 'from dataplaybook.utils import PlaybookError\n'), ((1434, 1491), 'voluptuous.InInvalid', 'vol.InInvalid', (['"""set_id specified, not allowed in mdb URI"""'], {}), "('set_id specified, not allowed in mdb URI')\n", (1447, 1491), True, 'import voluptuous as vol\n')] |
import argparse
import json
import numpy as np
import typing
from blockfs.directory import Directory
import logging
from precomputed_tif.blockfs_stack import BlockfsStack
from precomputed_tif.ngff_stack import NGFFStack
import os
import sys
from spimstitch.ngff import NGFFDirectory
from ..stitch import get_output_size, StitchSrcVolume, run
def parse_args(args=sys.argv[1:]):
parser = argparse.ArgumentParser()
parser.add_argument(
"--input",
help="The root directory of the oblique volume tree. The program expects "
"blockfs Neuroglancer volumes in directories whose name is in the "
"format, <x>_<y> where <x> and <y> are the X and Y coordinates of "
"the top left corner of the volume.",
required=True)
parser.add_argument(
"--output",
help="The directory for the precomputed volume output"
)
parser.add_argument(
"--levels",
help="The number of mipmap levels in the precomputed volume",
default=5,
type=int)
parser.add_argument(
"--log-level",
help="The log level for logging",
default="WARNING")
parser.add_argument(
"--n-writers",
help="The number of writer processes for writing blockfs files",
default=min(12, os.cpu_count()),
type=int)
parser.add_argument(
"--n-workers",
help="The number of worker processes for the processing pipeline",
default=min(12, os.cpu_count()),
type=int)
parser.add_argument(
"--silent",
help="Turn off progress bars",
action="store_true")
parser.add_argument(
"--x-step-size",
help="X step size in microns",
default=1.28,
type=float)
parser.add_argument(
"--y-voxel-size",
help="Size of a voxel in the Y direction in microns",
default=1.8,
type=float)
parser.add_argument(
"--z-offset",
help="# of voxels of offset between the start of the stack above "
"in Z and the stack underneath it",
default=2048,
type=int
)
parser.add_argument(
"--output-size",
help="Size of the output volume (x,y,z). Defaults to the extent of all "
"prestitched volumes.")
parser.add_argument(
"--output-offset",
help="Offset of the output volume. Only use with --output-size. ")
parser.add_argument(
"--alignment",
help="Alignment file from oblique-align. Default is use static "
"alignment"
)
parser.add_argument(
"--y-illum-corr",
help="Fractional brightness of y[2047] with respect to y[0] for "
"each subvolume. Default is properly corrected",
type=float
)
parser.add_argument(
"--compute-y-illum-corr",
help="If present, compute fractional brightness at overlaps "
"between volumes",
action="store_true"
)
parser.add_argument(
"--n-y-illum-patches",
help="Number of patches to take to compute the y illumination "
"correction",
type=int,
default=1000
)
parser.add_argument(
"--min-y-illum-mean",
help="For an illum patch, the minimum allowed value of the mean "
"intensity of the patch",
type=int,
default=100
)
parser.add_argument(
"--min-y-illum-corr-coef",
help="The two overlapping volumes in an illumination patch must "
"have at least this correlation coefficient "
"(0 <= min-y-illum-corr-coef < 1) to be included",
type=float,
default=.80
)
parser.add_argument(
"--ngff",
help="Output an NGFF volume instead of blockfs",
action="store_true"
)
return parser.parse_args(args)
def main(args=sys.argv[1:]):
opts = parse_args(args)
logging.basicConfig(level=getattr(logging,opts.log_level))
volume_paths = []
zs = []
for root, folders, files in os.walk(opts.input, followlinks=True):
if os.path.split(root)[-1] == "1_1_1":
for file in files:
if file == BlockfsStack.DIRECTORY_FILENAME:
volume_paths.append(os.path.join(root, file))
try:
zs.append(int(os.path.split(os.path.dirname(root))[1]))
except ValueError:
logging.warning(
"Non-numeric Z found in stack path: %s" % root)
all_z = sorted(set(zs))
if opts.alignment is not None:
with open(opts.alignment) as fd:
align_z = json.load(fd)["align-z"]
else:
align_z = False
if align_z:
z_offsets = [z / 10 for z in zs]
else:
z_offsets = [opts.z_offset * all_z.index(z) * opts.x_step_size for z in zs]
volumes = [
StitchSrcVolume(volume_path,
opts.x_step_size,
opts.y_voxel_size,
z_offset)
for volume_path, z_offset in zip(volume_paths, z_offsets)]
z_too = adjust_alignments(opts, volumes)
StitchSrcVolume.rebase_all(volumes, z_too=z_too)
if opts.compute_y_illum_corr:
y_illum_corr = StitchSrcVolume.compute_illum_corr(
volumes,
n_patches=opts.n_y_illum_patches,
min_mean=opts.min_y_illum_mean,
min_corr_coef=opts.min_y_illum_corr_coef,
n_workers=opts.n_workers
)
elif opts.y_illum_corr is not None:
y_illum_corr = opts.y_illum_corr
else:
y_illum_corr = None
if y_illum_corr is not None:
y_illum_corr = \
(1 - y_illum_corr) * (2047 - np.arange(2048)) / 2047 + \
y_illum_corr
if opts.output_size is None:
zs, ys, xs = get_output_size(volumes)
x0 = y0 = z0 = 0
else:
xs, ys, zs = [int(_) for _ in opts.output_size.split(",")]
if opts.output_offset is None:
x0 = y0 = z0 = 0
else:
x0, y0, z0 = [int(_) for _ in opts.output_offset.split(",")]
if not os.path.exists(opts.output):
os.mkdir(opts.output)
l1_dir = os.path.join(opts.output, "1_1_1")
if not os.path.exists(l1_dir):
os.mkdir(l1_dir)
if opts.ngff:
output = NGFFStack((xs, ys, xs), opts.output)
output.create()
else:
output = BlockfsStack((zs, ys, xs), opts.output)
voxel_size = (opts.x_step_size * 1000,
opts.y_voxel_size * 1000,
opts.x_step_size * 1000)
output.write_info_file(opts.levels, voxel_size)
if opts.ngff:
directory = NGFFDirectory(output)
directory.create()
else:
directory_path = os.path.join(l1_dir, BlockfsStack.DIRECTORY_FILENAME)
directory = Directory(xs, ys, zs, volumes[0].directory.dtype,
directory_path,
n_filenames=opts.n_writers)
directory.create()
directory.start_writer_processes()
run(volumes, directory, x0, y0, z0, opts.n_workers, opts.silent,
y_illum_corr)
directory.close()
for level in range(2, opts.levels + 1):
output.write_level_n(level,
silent=opts.silent,
n_cores=opts.n_writers)
def adjust_alignments(opts, volumes:typing.Sequence[StitchSrcVolume]):
"""
Adjust the volume coordinates based on alignments recorded by
oblique-align or similar.
:param opts: The command-line options - we take the --alignment arg
as a json file.
:param volumes: The volumes to be adjusted
"""
if opts.alignment is not None:
alignments = {}
with open(opts.alignment) as fd:
d:dict = json.load(fd)
if "alignments" in d:
for k, v in d["alignments"].items():
alignments[tuple(json.loads(k)[:-1])] = v
align_z = d.get("align-z", False)
for volume in volumes:
k = (volume.x0, volume.y0)
if k in alignments:
if align_z:
volume.x0, volume.y0, volume.z0 = alignments[k]
else:
volume.x0, volume.y0, _ = alignments[k]
return align_z
if __name__ == "__main__":
main()
| [
"os.path.exists",
"json.loads",
"spimstitch.ngff.NGFFDirectory",
"argparse.ArgumentParser",
"precomputed_tif.blockfs_stack.BlockfsStack",
"numpy.arange",
"os.path.join",
"logging.warning",
"os.path.split",
"os.path.dirname",
"blockfs.directory.Directory",
"os.mkdir",
"os.cpu_count",
"json.... | [((394, 419), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (417, 419), False, 'import argparse\n'), ((4039, 4076), 'os.walk', 'os.walk', (['opts.input'], {'followlinks': '(True)'}), '(opts.input, followlinks=True)\n', (4046, 4076), False, 'import os\n'), ((6213, 6247), 'os.path.join', 'os.path.join', (['opts.output', '"""1_1_1"""'], {}), "(opts.output, '1_1_1')\n", (6225, 6247), False, 'import os\n'), ((6141, 6168), 'os.path.exists', 'os.path.exists', (['opts.output'], {}), '(opts.output)\n', (6155, 6168), False, 'import os\n'), ((6178, 6199), 'os.mkdir', 'os.mkdir', (['opts.output'], {}), '(opts.output)\n', (6186, 6199), False, 'import os\n'), ((6259, 6281), 'os.path.exists', 'os.path.exists', (['l1_dir'], {}), '(l1_dir)\n', (6273, 6281), False, 'import os\n'), ((6291, 6307), 'os.mkdir', 'os.mkdir', (['l1_dir'], {}), '(l1_dir)\n', (6299, 6307), False, 'import os\n'), ((6343, 6379), 'precomputed_tif.ngff_stack.NGFFStack', 'NGFFStack', (['(xs, ys, xs)', 'opts.output'], {}), '((xs, ys, xs), opts.output)\n', (6352, 6379), False, 'from precomputed_tif.ngff_stack import NGFFStack\n'), ((6431, 6470), 'precomputed_tif.blockfs_stack.BlockfsStack', 'BlockfsStack', (['(zs, ys, xs)', 'opts.output'], {}), '((zs, ys, xs), opts.output)\n', (6443, 6470), False, 'from precomputed_tif.blockfs_stack import BlockfsStack\n'), ((6691, 6712), 'spimstitch.ngff.NGFFDirectory', 'NGFFDirectory', (['output'], {}), '(output)\n', (6704, 6712), False, 'from spimstitch.ngff import NGFFDirectory\n'), ((6775, 6828), 'os.path.join', 'os.path.join', (['l1_dir', 'BlockfsStack.DIRECTORY_FILENAME'], {}), '(l1_dir, BlockfsStack.DIRECTORY_FILENAME)\n', (6787, 6828), False, 'import os\n'), ((6849, 6946), 'blockfs.directory.Directory', 'Directory', (['xs', 'ys', 'zs', 'volumes[0].directory.dtype', 'directory_path'], {'n_filenames': 'opts.n_writers'}), '(xs, ys, zs, volumes[0].directory.dtype, directory_path,\n n_filenames=opts.n_writers)\n', (6858, 6946), False, 'from blockfs.directory import Directory\n'), ((7814, 7827), 'json.load', 'json.load', (['fd'], {}), '(fd)\n', (7823, 7827), False, 'import json\n'), ((1296, 1310), 'os.cpu_count', 'os.cpu_count', ([], {}), '()\n', (1308, 1310), False, 'import os\n'), ((1478, 1492), 'os.cpu_count', 'os.cpu_count', ([], {}), '()\n', (1490, 1492), False, 'import os\n'), ((4089, 4108), 'os.path.split', 'os.path.split', (['root'], {}), '(root)\n', (4102, 4108), False, 'import os\n'), ((4669, 4682), 'json.load', 'json.load', (['fd'], {}), '(fd)\n', (4678, 4682), False, 'import json\n'), ((4256, 4280), 'os.path.join', 'os.path.join', (['root', 'file'], {}), '(root, file)\n', (4268, 4280), False, 'import os\n'), ((5740, 5755), 'numpy.arange', 'np.arange', (['(2048)'], {}), '(2048)\n', (5749, 5755), True, 'import numpy as np\n'), ((4450, 4513), 'logging.warning', 'logging.warning', (["('Non-numeric Z found in stack path: %s' % root)"], {}), "('Non-numeric Z found in stack path: %s' % root)\n", (4465, 4513), False, 'import logging\n'), ((7940, 7953), 'json.loads', 'json.loads', (['k'], {}), '(k)\n', (7950, 7953), False, 'import json\n'), ((4359, 4380), 'os.path.dirname', 'os.path.dirname', (['root'], {}), '(root)\n', (4374, 4380), False, 'import os\n')] |
from haystack import indexes
from issues.models import Issue, Proposal
from haystack.fields import IntegerField, CharField, BooleanField, DateField, DateTimeField
from datetime import date, datetime, timedelta
class IssueIndex(indexes.ModelSearchIndex, indexes.Indexable):
community = IntegerField(model_attr='community_id')
is_confidential = BooleanField(model_attr='is_confidential')
class Meta:
model = Issue
fields = ['title', 'abstract']
# Note that regular ``SearchIndex`` methods apply.
def index_queryset(self, using=None):
"Used when the entire index for model is updated."
return Issue.objects.active()
class ProposalIndex(indexes.ModelSearchIndex, indexes.Indexable):
text = CharField(document=True, use_template=True)
active = BooleanField(model_attr='active')
title = CharField(model_attr='title')
community = IntegerField(model_attr='issue__community_id')
status = IntegerField(model_attr='status')
task_completed = BooleanField(model_attr='task_completed')
type = IntegerField(model_attr='type')
decided_at = DateTimeField()
assignee = CharField()
due_by = DateField(model_attr='due_by', null=True)
is_confidential = BooleanField(model_attr='is_confidential')
def get_model(self):
return Proposal
def prepare_assignee(self, obj):
return u'' if not obj.assigned_to_user else \
obj.assigned_to_user.display_name
def prepare_decided_at(self, obj):
return obj.created_at if not obj.decided_at_meeting \
else obj.decided_at_meeting.held_at
# Note that regular ``SearchIndex`` methods apply.
def index_queryset(self, using=None):
"Used when the entire index for model is updated."
return Proposal.objects.active()
| [
"haystack.fields.DateField",
"issues.models.Issue.objects.active",
"haystack.fields.CharField",
"issues.models.Proposal.objects.active",
"haystack.fields.BooleanField",
"haystack.fields.DateTimeField",
"haystack.fields.IntegerField"
] | [((291, 330), 'haystack.fields.IntegerField', 'IntegerField', ([], {'model_attr': '"""community_id"""'}), "(model_attr='community_id')\n", (303, 330), False, 'from haystack.fields import IntegerField, CharField, BooleanField, DateField, DateTimeField\n'), ((353, 395), 'haystack.fields.BooleanField', 'BooleanField', ([], {'model_attr': '"""is_confidential"""'}), "(model_attr='is_confidential')\n", (365, 395), False, 'from haystack.fields import IntegerField, CharField, BooleanField, DateField, DateTimeField\n'), ((748, 791), 'haystack.fields.CharField', 'CharField', ([], {'document': '(True)', 'use_template': '(True)'}), '(document=True, use_template=True)\n', (757, 791), False, 'from haystack.fields import IntegerField, CharField, BooleanField, DateField, DateTimeField\n'), ((805, 838), 'haystack.fields.BooleanField', 'BooleanField', ([], {'model_attr': '"""active"""'}), "(model_attr='active')\n", (817, 838), False, 'from haystack.fields import IntegerField, CharField, BooleanField, DateField, DateTimeField\n'), ((851, 880), 'haystack.fields.CharField', 'CharField', ([], {'model_attr': '"""title"""'}), "(model_attr='title')\n", (860, 880), False, 'from haystack.fields import IntegerField, CharField, BooleanField, DateField, DateTimeField\n'), ((897, 943), 'haystack.fields.IntegerField', 'IntegerField', ([], {'model_attr': '"""issue__community_id"""'}), "(model_attr='issue__community_id')\n", (909, 943), False, 'from haystack.fields import IntegerField, CharField, BooleanField, DateField, DateTimeField\n'), ((957, 990), 'haystack.fields.IntegerField', 'IntegerField', ([], {'model_attr': '"""status"""'}), "(model_attr='status')\n", (969, 990), False, 'from haystack.fields import IntegerField, CharField, BooleanField, DateField, DateTimeField\n'), ((1012, 1053), 'haystack.fields.BooleanField', 'BooleanField', ([], {'model_attr': '"""task_completed"""'}), "(model_attr='task_completed')\n", (1024, 1053), False, 'from haystack.fields import IntegerField, CharField, BooleanField, DateField, DateTimeField\n'), ((1065, 1096), 'haystack.fields.IntegerField', 'IntegerField', ([], {'model_attr': '"""type"""'}), "(model_attr='type')\n", (1077, 1096), False, 'from haystack.fields import IntegerField, CharField, BooleanField, DateField, DateTimeField\n'), ((1114, 1129), 'haystack.fields.DateTimeField', 'DateTimeField', ([], {}), '()\n', (1127, 1129), False, 'from haystack.fields import IntegerField, CharField, BooleanField, DateField, DateTimeField\n'), ((1145, 1156), 'haystack.fields.CharField', 'CharField', ([], {}), '()\n', (1154, 1156), False, 'from haystack.fields import IntegerField, CharField, BooleanField, DateField, DateTimeField\n'), ((1170, 1211), 'haystack.fields.DateField', 'DateField', ([], {'model_attr': '"""due_by"""', 'null': '(True)'}), "(model_attr='due_by', null=True)\n", (1179, 1211), False, 'from haystack.fields import IntegerField, CharField, BooleanField, DateField, DateTimeField\n'), ((1234, 1276), 'haystack.fields.BooleanField', 'BooleanField', ([], {'model_attr': '"""is_confidential"""'}), "(model_attr='is_confidential')\n", (1246, 1276), False, 'from haystack.fields import IntegerField, CharField, BooleanField, DateField, DateTimeField\n'), ((646, 668), 'issues.models.Issue.objects.active', 'Issue.objects.active', ([], {}), '()\n', (666, 668), False, 'from issues.models import Issue, Proposal\n'), ((1795, 1820), 'issues.models.Proposal.objects.active', 'Proposal.objects.active', ([], {}), '()\n', (1818, 1820), False, 'from issues.models import Issue, Proposal\n')] |
#
# For licensing see accompanying LICENSE file.
# Copyright (C) 2022 Apple Inc. All Rights Reserved.
#
import argparse
from typing import Optional
from data.sampler import arguments_sampler
from data.collate_fns import arguments_collate_fn
from options.utils import load_config_file
from data.datasets import arguments_dataset
from cvnets import arguments_model, arguments_nn_layers, arguments_ema
from cvnets.anchor_generator import arguments_anchor_gen
from loss_fn import arguments_loss_fn
from optim import arguments_optimizer
from optim.scheduler import arguments_scheduler
from common import SUPPORTED_MODALITIES
from data.transforms import arguments_augmentation
from metrics import arguments_stats
from data.video_reader import arguments_video_reader
from cvnets.matcher_det import arguments_box_matcher
from utils import logger
class ParseKwargs(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
namespace_dict = vars(namespace)
if len(values) > 0:
override_dict = {}
# values are list of key-value pairs
for value in values:
key = None
try:
key, value = value.split("=")
except ValueError as e:
logger.error(
"For override arguments, a key-value pair of the form key=value is expected"
)
if key in namespace_dict:
value_namespace = namespace_dict[key]
if value_namespace is None and value is None:
value = None
elif value_namespace is None and value is not None:
# possibly a string or list of strings or list of integers
# check if string is a list or not
value = value.split(",")
if len(value) == 1:
# its a string
value = str(value[0])
# check if its empty string or not
if value == "" or value.lower() == "none":
value = None
else:
# its a list of integers or strings
try:
# convert to int
value = [int(v) for v in value]
except:
# pass because its a string
pass
else:
try:
if value.lower() == "true": # check for boolean
value = True
elif value.lower() == "false":
value = False
else:
desired_type = type(value_namespace)
value = desired_type(value)
except ValueError as e:
logger.warning(
"Type mismatch while over-riding. Skipping key: {}".format(
key
)
)
continue
override_dict[key] = value
setattr(namespace, "override_args", override_dict)
else:
setattr(namespace, "override_args", None)
def arguments_common(parser: argparse.ArgumentParser) -> argparse.ArgumentParser:
group = parser.add_argument_group(
title="Common arguments", description="Common arguments"
)
group.add_argument("--common.seed", type=int, default=0, help="Random seed")
group.add_argument(
"--common.config-file", type=str, default=None, help="Configuration file"
)
group.add_argument(
"--common.results-loc",
type=str,
default="results",
help="Directory where results will be stored",
)
group.add_argument(
"--common.run-label",
type=str,
default="run_1",
help="Label id for the current run",
)
group.add_argument(
"--common.resume", type=str, default=None, help="Resume location"
)
group.add_argument(
"--common.finetune_imagenet1k",
type=str,
default=None,
help="Checkpoint location to be used for finetuning",
)
group.add_argument(
"--common.finetune_imagenet1k-ema",
type=str,
default=None,
help="EMA Checkpoint location to be used for finetuning",
)
group.add_argument(
"--common.mixed-precision", action="store_true", help="Mixed precision training"
)
group.add_argument(
"--common.accum-freq",
type=int,
default=1,
help="Accumulate gradients for this number of iterations",
)
group.add_argument(
"--common.accum-after-epoch",
type=int,
default=0,
help="Start accumulation after this many epochs",
)
group.add_argument(
"--common.log-freq",
type=int,
default=100,
help="Display after these many iterations",
)
group.add_argument(
"--common.auto-resume",
action="store_true",
help="Resume training from the last checkpoint",
)
group.add_argument(
"--common.grad-clip", type=float, default=None, help="Gradient clipping value"
)
group.add_argument(
"--common.k-best-checkpoints",
type=int,
default=5,
help="Keep k-best checkpoints",
)
group.add_argument(
"--common.inference-modality",
type=str,
default="image",
choices=SUPPORTED_MODALITIES,
help="Inference modality. Image or videos",
)
group.add_argument(
"--common.channels-last",
action="store_true",
default=False,
help="Use channel last format during training. "
"Note 1: that some models may not support it, so we recommend to use it with caution"
"Note 2: Channel last format does not work with 1-, 2-, and 3- tensors. "
"Therefore, we support it via custom collate functions",
)
group.add_argument(
"--common.tensorboard-logging",
action="store_true",
help="Enable tensorboard logging",
)
group.add_argument(
"--common.bolt-logging", action="store_true", help="Enable bolt logging"
)
group.add_argument(
"--common.override-kwargs",
nargs="*",
action=ParseKwargs,
help="Override arguments. Example. To override the value of --sampler.vbs.crop-size-width, "
"we can pass override argument as "
"--common.override-kwargs sampler.vbs.crop_size_width=512 \n "
"Note that keys in override arguments do not contain -- or -",
)
group.add_argument(
"--common.enable-coreml-compatible-module",
action="store_true",
help="Use coreml compatible modules (if applicable) during inference",
)
group.add_argument(
"--common.debug-mode",
action="store_true",
help="You can use this flag for debugging purposes.",
)
return parser
def arguments_ddp(parser: argparse.ArgumentParser) -> argparse.ArgumentParser:
group = parser.add_argument_group(
title="DDP arguments", description="DDP arguments"
)
group.add_argument("--ddp.disable", action="store_true", help="Don't use DDP")
group.add_argument(
"--ddp.rank", type=int, default=0, help="Node rank for distributed training"
)
group.add_argument(
"--ddp.world-size", type=int, default=-1, help="World size for DDP"
)
group.add_argument("--ddp.dist-url", type=str, default=None, help="DDP URL")
group.add_argument(
"--ddp.dist-port",
type=int,
default=30786,
help="DDP Port. Only used when --ddp.dist-url is not specified",
)
group.add_argument("--ddp.device-id", type=int, default=None, help="Device ID")
group.add_argument(
"--ddp.no-spawn", action="store_true", help="Don't use DDP with spawn"
)
group.add_argument(
"--ddp.backend", type=str, default="nccl", help="DDP backend. Default is nccl"
)
group.add_argument(
"--ddp.find-unused-params",
action="store_true",
help="Find unused params in model. useful for debugging with DDP",
)
return parser
def get_training_arguments(parse_args: Optional[bool] = True):
parser = argparse.ArgumentParser(description="Training arguments", add_help=True)
# sampler related arguments
parser = arguments_sampler(parser=parser)
# dataset related arguments
parser = arguments_dataset(parser=parser)
# anchor generator arguments
parser = arguments_anchor_gen(parser=parser)
# arguments related to box matcher
parser = arguments_box_matcher(parser=parser)
# Video reader related arguments
parser = arguments_video_reader(parser=parser)
# collate fn related arguments
parser = arguments_collate_fn(parser=parser)
# transform related arguments
parser = arguments_augmentation(parser=parser)
# model related arguments
parser = arguments_nn_layers(parser=parser)
parser = arguments_model(parser=parser)
parser = arguments_ema(parser=parser)
# loss function arguments
parser = arguments_loss_fn(parser=parser)
# optimizer arguments
parser = arguments_optimizer(parser=parser)
parser = arguments_scheduler(parser=parser)
# DDP arguments
parser = arguments_ddp(parser=parser)
# stats arguments
parser = arguments_stats(parser=parser)
# common
parser = arguments_common(parser=parser)
if parse_args:
# parse args
opts = parser.parse_args()
opts = load_config_file(opts)
return opts
else:
return parser
def get_eval_arguments(parse_args=True):
return get_training_arguments(parse_args=parse_args)
def get_conversion_arguments():
parser = get_training_arguments(parse_args=False)
# Arguments related to coreml conversion
group = parser.add_argument_group("Conversion arguments")
group.add_argument(
"--conversion.coreml-extn",
type=str,
default="mlmodel",
help="Extension for converted model. Default is mlmodel",
)
group.add_argument(
"--conversion.input-image-path",
type=str,
default=None,
help="Path of the image to be used for conversion",
)
# Arguments related to server.
group.add_argument(
"--conversion.bucket-name", type=str, help="Model job's bucket name"
)
group.add_argument("--conversion.task-id", type=str, help="Model job's id")
group.add_argument(
"--conversion.viewers",
type=str,
nargs="+",
default=None,
help="Users who can view your models on server",
)
# parse args
opts = parser.parse_args()
opts = load_config_file(opts)
return opts
def get_bencmarking_arguments():
parser = get_training_arguments(parse_args=False)
#
group = parser.add_argument_group("Benchmarking arguments")
group.add_argument(
"--benchmark.batch-size",
type=int,
default=1,
help="Batch size for benchmarking",
)
group.add_argument(
"--benchmark.warmup-iter", type=int, default=10, help="Warm-up iterations"
)
group.add_argument(
"--benchmark.n-iter",
type=int,
default=100,
help="Number of iterations for benchmarking",
)
group.add_argument(
"--benchmark.use-jit-model",
action="store_true",
help="Convert the model to JIT and then benchmark it",
)
# parse args
opts = parser.parse_args()
opts = load_config_file(opts)
return opts
def get_segmentation_eval_arguments():
parser = get_training_arguments(parse_args=False)
group = parser.add_argument_group("Segmentation evaluation related arguments")
group.add_argument(
"--evaluation.segmentation.apply-color-map",
action="store_true",
help="Apply color map to different classes in segmentation masks. Useful in visualization "
"+ some competitions (e.g, PASCAL VOC) accept submissions with colored segmentation masks",
)
group.add_argument(
"--evaluation.segmentation.save-overlay-rgb-pred",
action="store_true",
help="enable this flag to visualize predicted masks on top of input image",
)
group.add_argument(
"--evaluation.segmentation.save-masks",
action="store_true",
help="save predicted masks without colormaps. Useful for submitting to "
"competitions like Cityscapes",
)
group.add_argument(
"--evaluation.segmentation.overlay-mask-weight",
default=0.5,
type=float,
help="Contribution of mask when overlaying on top of RGB image. ",
)
group.add_argument(
"--evaluation.segmentation.mode",
type=str,
default="validation_set",
required=False,
choices=["single_image", "image_folder", "validation_set"],
help="Contribution of mask when overlaying on top of RGB image. ",
)
group.add_argument(
"--evaluation.segmentation.path",
type=str,
default=None,
help="Path of the image or image folder (only required for single_image and image_folder modes)",
)
group.add_argument(
"--evaluation.segmentation.num-classes",
type=str,
default=None,
help="Number of segmentation classes used during training",
)
group.add_argument(
"--evaluation.segmentation.resize-input-images",
action="store_true",
help="Resize input images",
)
# parse args
opts = parser.parse_args()
opts = load_config_file(opts)
return opts
def get_detection_eval_arguments():
parser = get_training_arguments(parse_args=False)
group = parser.add_argument_group("Detection evaluation related arguments")
group.add_argument(
"--evaluation.detection.save-overlay-boxes",
action="store_true",
help="enable this flag to visualize predicted masks on top of input image",
)
group.add_argument(
"--evaluation.detection.mode",
type=str,
default="validation_set",
required=False,
choices=["single_image", "image_folder", "validation_set"],
help="Contribution of mask when overlaying on top of RGB image. ",
)
group.add_argument(
"--evaluation.detection.path",
type=str,
default=None,
help="Path of the image or image folder (only required for single_image and image_folder modes)",
)
group.add_argument(
"--evaluation.detection.num-classes",
type=str,
default=None,
help="Number of segmentation classes used during training",
)
group.add_argument(
"--evaluation.detection.resize-input-images",
action="store_true",
default=False,
help="Resize the input images",
)
# parse args
opts = parser.parse_args()
opts = load_config_file(opts)
return opts
def get_loss_landscape_args():
parser = get_training_arguments(parse_args=False)
group = parser.add_argument_group("Loss landscape related arguments")
group.add_argument(
"--loss-landscape.n-points",
type=int,
default=11,
help="No. of grid points. Default is 11, so we have 11x11 grid",
)
group.add_argument(
"--loss-landscape.min-x",
type=float,
default=-1.0,
help="Min. value along x-axis",
)
group.add_argument(
"--loss-landscape.max-x",
type=float,
default=1.0,
help="Max. value along x-axis",
)
group.add_argument(
"--loss-landscape.min-y",
type=float,
default=-1.0,
help="Min. value along y-axis",
)
group.add_argument(
"--loss-landscape.max-y",
type=float,
default=1.0,
help="Max. value along y-axis",
)
# parse args
opts = parser.parse_args()
opts = load_config_file(opts)
return opts
| [
"data.sampler.arguments_sampler",
"metrics.arguments_stats",
"options.utils.load_config_file",
"loss_fn.arguments_loss_fn",
"optim.arguments_optimizer",
"argparse.ArgumentParser",
"cvnets.arguments_model",
"optim.scheduler.arguments_scheduler",
"data.collate_fns.arguments_collate_fn",
"cvnets.anch... | [((8652, 8724), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Training arguments"""', 'add_help': '(True)'}), "(description='Training arguments', add_help=True)\n", (8675, 8724), False, 'import argparse\n'), ((8771, 8803), 'data.sampler.arguments_sampler', 'arguments_sampler', ([], {'parser': 'parser'}), '(parser=parser)\n', (8788, 8803), False, 'from data.sampler import arguments_sampler\n'), ((8850, 8882), 'data.datasets.arguments_dataset', 'arguments_dataset', ([], {'parser': 'parser'}), '(parser=parser)\n', (8867, 8882), False, 'from data.datasets import arguments_dataset\n'), ((8930, 8965), 'cvnets.anchor_generator.arguments_anchor_gen', 'arguments_anchor_gen', ([], {'parser': 'parser'}), '(parser=parser)\n', (8950, 8965), False, 'from cvnets.anchor_generator import arguments_anchor_gen\n'), ((9019, 9055), 'cvnets.matcher_det.arguments_box_matcher', 'arguments_box_matcher', ([], {'parser': 'parser'}), '(parser=parser)\n', (9040, 9055), False, 'from cvnets.matcher_det import arguments_box_matcher\n'), ((9107, 9144), 'data.video_reader.arguments_video_reader', 'arguments_video_reader', ([], {'parser': 'parser'}), '(parser=parser)\n', (9129, 9144), False, 'from data.video_reader import arguments_video_reader\n'), ((9195, 9230), 'data.collate_fns.arguments_collate_fn', 'arguments_collate_fn', ([], {'parser': 'parser'}), '(parser=parser)\n', (9215, 9230), False, 'from data.collate_fns import arguments_collate_fn\n'), ((9279, 9316), 'data.transforms.arguments_augmentation', 'arguments_augmentation', ([], {'parser': 'parser'}), '(parser=parser)\n', (9301, 9316), False, 'from data.transforms import arguments_augmentation\n'), ((9361, 9395), 'cvnets.arguments_nn_layers', 'arguments_nn_layers', ([], {'parser': 'parser'}), '(parser=parser)\n', (9380, 9395), False, 'from cvnets import arguments_model, arguments_nn_layers, arguments_ema\n'), ((9409, 9439), 'cvnets.arguments_model', 'arguments_model', ([], {'parser': 'parser'}), '(parser=parser)\n', (9424, 9439), False, 'from cvnets import arguments_model, arguments_nn_layers, arguments_ema\n'), ((9453, 9481), 'cvnets.arguments_ema', 'arguments_ema', ([], {'parser': 'parser'}), '(parser=parser)\n', (9466, 9481), False, 'from cvnets import arguments_model, arguments_nn_layers, arguments_ema\n'), ((9526, 9558), 'loss_fn.arguments_loss_fn', 'arguments_loss_fn', ([], {'parser': 'parser'}), '(parser=parser)\n', (9543, 9558), False, 'from loss_fn import arguments_loss_fn\n'), ((9599, 9633), 'optim.arguments_optimizer', 'arguments_optimizer', ([], {'parser': 'parser'}), '(parser=parser)\n', (9618, 9633), False, 'from optim import arguments_optimizer\n'), ((9647, 9681), 'optim.scheduler.arguments_scheduler', 'arguments_scheduler', ([], {'parser': 'parser'}), '(parser=parser)\n', (9666, 9681), False, 'from optim.scheduler import arguments_scheduler\n'), ((9781, 9811), 'metrics.arguments_stats', 'arguments_stats', ([], {'parser': 'parser'}), '(parser=parser)\n', (9796, 9811), False, 'from metrics import arguments_stats\n'), ((11142, 11164), 'options.utils.load_config_file', 'load_config_file', (['opts'], {}), '(opts)\n', (11158, 11164), False, 'from options.utils import load_config_file\n'), ((11972, 11994), 'options.utils.load_config_file', 'load_config_file', (['opts'], {}), '(opts)\n', (11988, 11994), False, 'from options.utils import load_config_file\n'), ((14044, 14066), 'options.utils.load_config_file', 'load_config_file', (['opts'], {}), '(opts)\n', (14060, 14066), False, 'from options.utils import load_config_file\n'), ((15376, 15398), 'options.utils.load_config_file', 'load_config_file', (['opts'], {}), '(opts)\n', (15392, 15398), False, 'from options.utils import load_config_file\n'), ((16398, 16420), 'options.utils.load_config_file', 'load_config_file', (['opts'], {}), '(opts)\n', (16414, 16420), False, 'from options.utils import load_config_file\n'), ((9962, 9984), 'options.utils.load_config_file', 'load_config_file', (['opts'], {}), '(opts)\n', (9978, 9984), False, 'from options.utils import load_config_file\n'), ((1290, 1390), 'utils.logger.error', 'logger.error', (['"""For override arguments, a key-value pair of the form key=value is expected"""'], {}), "(\n 'For override arguments, a key-value pair of the form key=value is expected'\n )\n", (1302, 1390), False, 'from utils import logger\n')] |
from django.core.management.base import BaseCommand
from django.contrib.auth import get_user_model
from django.template.defaultfilters import slugify, linebreaks, date, truncatechars
from wagtail.core.models import Page
from wagtail.core.rich_text import RichText
from migration.models import *
from accounts.models import CompsocUser, ShellAccount, DatabaseAccount
from blog.models import BlogPage
from events.models import EventSignup, EventPage, EventType
import time
COMMS_DICT = {
'NL': 'Newsletter',
'M': 'Meeting Minutes',
'N': 'News Item'
}
def migrate_compsoc_memberinfo():
"""
Amalgamates the old user detail objects into the new CompsocUser and other models
"""
websites = WebsiteDetails.objects.using('old_data').all()
nicks = NicknameDetails.objects.using('old_data').all()
shell_accounts = OldShellAccount.objects.using('old_data').all()
db_accounts = OldDatabaseAccount.objects.using('old_data').all()
userinfo = {}
# Handle shell accounts
for account in shell_accounts:
user = get_user_model().objects.filter(id=account.user_id).first()
new_account = ShellAccount(name=account.name, user=user, status=account.status)
new_account.save()
# Handle DB accounts
for account in db_accounts:
user = get_user_model().objects.filter(id=account.user_id).first()
new_account = DatabaseAccount(name=account.name, user=user, status=account.status)
new_account.save()
# Handle transfer of Nickname info to CompsocUser model
for nick in nicks:
user_id = nick.user_id
userinfo[user_id] = {
'nickname': nick.nickname,
'website_title': '',
'website_url': ''
}
# Handle transfer of Website info to CompsocUser model
for site in websites:
user_id = site.user_id
if user_id in userinfo:
userinfo[user_id]['website_title'] = site.websiteTitle
userinfo[user_id]['website_url'] = site.websiteUrl
else:
userinfo[user_id] = {
'nickname': '',
'website_title': site.websiteTitle,
'website_url': site.websiteUrl
}
# Save new CompsocUser info
for uid, details in userinfo.items():
user = get_user_model().objects.filter(id=uid).first()
new_userinfo = CompsocUser(nickname=details['nickname'], website_title=details['website_title'],
website_url=details['website_url'], user=user)
print('Restoring {user}'.format(user=new_userinfo))
new_userinfo.save()
def migrate_old_posts():
"""
Converts all old posts from a simple page format to one Wagtail accepts
"""
# id=4 is the specific page ID for the news index page
index = Page.objects.get(id=4).specific
old_posts = Communication.objects.using('old_data').all().order_by('date')
user = get_user_model().objects.get(id=1)
for post in old_posts:
if post.title:
title = post.title
else:
title = 'Archived item from {date}'.format(date=date(post.date, 'D jS F Y'))
slug = slugify('{title} - {rand}'.format(title=title, rand=int(round(time.time() * 1000))))
if len(post.text) > 512:
intro = post.text[:512] + '...'
else:
intro = post.text
page = BlogPage(
search_description='',
seo_title=title,
show_in_menus=False,
slug=slug,
title=title,
date=post.date,
first_published_at=post.date,
intro=linebreaks(intro),
)
page.body.stream_data = [
('paragraph', RichText('<p>{body}</p>'.format(body=linebreaks(post.text))))
]
page.tags.add(COMMS_DICT[post.type])
print('Restoring article from {date}'.format(date=post.date))
index.add_child(instance=page)
revision = page.save_revision(
user=user,
submitted_for_moderation=False
)
revision.publish()
page.save()
def migrate_events():
event_index = Page.objects.get(id=6).specific
user = get_user_model().objects.get(id=1)
old_events = OldEvent.objects.using('old_data').all()
# Migrate events
for old_event in old_events:
old_event_type = old_event.type
try:
# We don't actually care about this - its a test to migrate the event across
event_type = EventType.objects.get(name=old_event_type.name, target=old_event_type.target)
except EventType.DoesNotExist:
event_type = EventType(name=old_event_type.name, target=old_event_type.target)
event_type.save()
title = '{type} on {date}'.format(type=old_event_type.name, date=date(old_event.start, 'D jS F Y'))
slug = slugify('{title} - {rand}'.format(title=title, rand=int(round(time.time() * 1000))))
if old_event.shortDescription:
description = old_event.shortDescription
else:
if old_event_type.info:
description = old_event_type.info
else:
description = old_event_type.name
new_event = EventPage(
title=title.strip(),
slug=slug,
description=description.strip(),
start=old_event.start,
finish=old_event.finish,
cancelled=old_event.cancelled,
category=event_type,
location=old_event.location.name
)
new_event.body.stream_data = [
('paragraph', RichText('<p>{body}</p>'.format(body=linebreaks(old_event.longDescription))))
]
print('Restoring event {type} from {date}'.format(type=old_event.type.name, date=old_event.start))
event_index.add_child(instance=new_event)
revision = new_event.save_revision(
user=user,
submitted_for_moderation=False
)
revision.publish()
new_event.save()
# Deal with signups
old_signups = Signup.objects.using('old_data').filter(event_id=old_event.id)
for old_signup in old_signups:
print('Restoring signup for {type} from {date}'.format(type=old_event.type.name, date=old_event.start))
new_signup = EventSignup(comment=truncatechars(old_signup.comment, 1024),
member=get_user_model().objects.get(id=old_signup.user_id),
event_id=new_event.id, signup_created=old_signup.time)
new_signup.save()
class Command(BaseCommand):
def handle(self, *args, **options):
migrate_compsoc_memberinfo()
migrate_old_posts()
migrate_events()
| [
"events.models.EventType.objects.get",
"django.contrib.auth.get_user_model",
"accounts.models.DatabaseAccount",
"django.template.defaultfilters.linebreaks",
"django.template.defaultfilters.date",
"django.template.defaultfilters.truncatechars",
"events.models.EventType",
"accounts.models.ShellAccount",... | [((1142, 1207), 'accounts.models.ShellAccount', 'ShellAccount', ([], {'name': 'account.name', 'user': 'user', 'status': 'account.status'}), '(name=account.name, user=user, status=account.status)\n', (1154, 1207), False, 'from accounts.models import CompsocUser, ShellAccount, DatabaseAccount\n'), ((1390, 1458), 'accounts.models.DatabaseAccount', 'DatabaseAccount', ([], {'name': 'account.name', 'user': 'user', 'status': 'account.status'}), '(name=account.name, user=user, status=account.status)\n', (1405, 1458), False, 'from accounts.models import CompsocUser, ShellAccount, DatabaseAccount\n'), ((2378, 2511), 'accounts.models.CompsocUser', 'CompsocUser', ([], {'nickname': "details['nickname']", 'website_title': "details['website_title']", 'website_url': "details['website_url']", 'user': 'user'}), "(nickname=details['nickname'], website_title=details[\n 'website_title'], website_url=details['website_url'], user=user)\n", (2389, 2511), False, 'from accounts.models import CompsocUser, ShellAccount, DatabaseAccount\n'), ((2822, 2844), 'wagtail.core.models.Page.objects.get', 'Page.objects.get', ([], {'id': '(4)'}), '(id=4)\n', (2838, 2844), False, 'from wagtail.core.models import Page\n'), ((4169, 4191), 'wagtail.core.models.Page.objects.get', 'Page.objects.get', ([], {'id': '(6)'}), '(id=6)\n', (4185, 4191), False, 'from wagtail.core.models import Page\n'), ((4528, 4605), 'events.models.EventType.objects.get', 'EventType.objects.get', ([], {'name': 'old_event_type.name', 'target': 'old_event_type.target'}), '(name=old_event_type.name, target=old_event_type.target)\n', (4549, 4605), False, 'from events.models import EventSignup, EventPage, EventType\n'), ((2944, 2960), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (2958, 2960), False, 'from django.contrib.auth import get_user_model\n'), ((3646, 3663), 'django.template.defaultfilters.linebreaks', 'linebreaks', (['intro'], {}), '(intro)\n', (3656, 3663), False, 'from django.template.defaultfilters import slugify, linebreaks, date, truncatechars\n'), ((4212, 4228), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (4226, 4228), False, 'from django.contrib.auth import get_user_model\n'), ((4670, 4735), 'events.models.EventType', 'EventType', ([], {'name': 'old_event_type.name', 'target': 'old_event_type.target'}), '(name=old_event_type.name, target=old_event_type.target)\n', (4679, 4735), False, 'from events.models import EventSignup, EventPage, EventType\n'), ((4840, 4873), 'django.template.defaultfilters.date', 'date', (['old_event.start', '"""D jS F Y"""'], {}), "(old_event.start, 'D jS F Y')\n", (4844, 4873), False, 'from django.template.defaultfilters import slugify, linebreaks, date, truncatechars\n'), ((3135, 3162), 'django.template.defaultfilters.date', 'date', (['post.date', '"""D jS F Y"""'], {}), "(post.date, 'D jS F Y')\n", (3139, 3162), False, 'from django.template.defaultfilters import slugify, linebreaks, date, truncatechars\n'), ((6372, 6411), 'django.template.defaultfilters.truncatechars', 'truncatechars', (['old_signup.comment', '(1024)'], {}), '(old_signup.comment, 1024)\n', (6385, 6411), False, 'from django.template.defaultfilters import slugify, linebreaks, date, truncatechars\n'), ((1060, 1076), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (1074, 1076), False, 'from django.contrib.auth import get_user_model\n'), ((1308, 1324), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (1322, 1324), False, 'from django.contrib.auth import get_user_model\n'), ((2307, 2323), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (2321, 2323), False, 'from django.contrib.auth import get_user_model\n'), ((3773, 3794), 'django.template.defaultfilters.linebreaks', 'linebreaks', (['post.text'], {}), '(post.text)\n', (3783, 3794), False, 'from django.template.defaultfilters import slugify, linebreaks, date, truncatechars\n'), ((5675, 5712), 'django.template.defaultfilters.linebreaks', 'linebreaks', (['old_event.longDescription'], {}), '(old_event.longDescription)\n', (5685, 5712), False, 'from django.template.defaultfilters import slugify, linebreaks, date, truncatechars\n'), ((3242, 3253), 'time.time', 'time.time', ([], {}), '()\n', (3251, 3253), False, 'import time\n'), ((4952, 4963), 'time.time', 'time.time', ([], {}), '()\n', (4961, 4963), False, 'import time\n'), ((6457, 6473), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (6471, 6473), False, 'from django.contrib.auth import get_user_model\n')] |
import flask
import restea.formats as formats
from restea.adapters.base import (
BaseResourceWrapper,
BaseRequestWrapper,
)
class FlaskRequestWrapper(BaseRequestWrapper):
'''
Object wrapping Flask request context.
'''
@property
def data(self):
'''
Returns a payload sent to server
:returns: string -- raw value of payload sent to server
'''
return self._original_request.data.decode()
@property
def method(self):
'''
Returns HTTP method for the current request
:returns: string -- HTTP method name
'''
return self._original_request.method
@property
def headers(self):
'''
Returns a headers dict
:returns: dict -- received request headers
'''
return self._original_request.headers
def get(self, value):
'''
Returns a value from the HTTP GET "map"
:param value: string -- key from GET
:returns: string -- value from GET or None if anything is found
'''
return self._original_request.values.get(value)
class FlaskResourceWrapper(BaseResourceWrapper):
'''
FlaskResourceWrapper implements Flask 'view' API for the
`restea.Resource` object, aka routing and return values in Flask format
'''
@property
def app(self):
'''
Returns current Flask application
:returns: :class: `app.Flask` -- current Flask app
'''
return flask.current_app
def wrap_request(self, *args, **kwargs):
'''
Prepares data and pass control to `restea.Resource` object
:returns: :class: `flask.Response`
'''
data_format, kwargs = self._get_format_name(kwargs)
formatter = formats.get_formatter(data_format)
resource = self._resource_class(
FlaskRequestWrapper(flask.request), formatter
)
res, status_code, content_type = resource.dispatch(*args, **kwargs)
return flask.Response(
res,
mimetype=content_type,
status=status_code
)
def __adapt_path(self, path):
'''
Adapts the path to path Flask requirements for the url routes
:param path: string -- route path
:returns: string -- normalized route path
'''
if not path.startswith('/'):
return '/' + path
return path
def get_routes(self, path='', iden='<iden>'):
'''
Prepare routes for the given REST resource
:param path: string -- base path for the REST resource
:param iden: string -- format for identifier, for instance might be
used to make composite identifier
'''
path = self.__adapt_path(path)
routes = (
'{}'.format(path),
'{}/{}'.format(path, iden),
'{}.<data_format>'.format(path),
'{}/{}.<data_format>'.format(path, iden),
)
for route in routes:
self.app.add_url_rule(
route,
view_func=self.wrap_request,
methods=[m.upper() for m in self._resource_class.method_map]
)
| [
"restea.formats.get_formatter",
"flask.Response"
] | [((1781, 1815), 'restea.formats.get_formatter', 'formats.get_formatter', (['data_format'], {}), '(data_format)\n', (1802, 1815), True, 'import restea.formats as formats\n'), ((2018, 2080), 'flask.Response', 'flask.Response', (['res'], {'mimetype': 'content_type', 'status': 'status_code'}), '(res, mimetype=content_type, status=status_code)\n', (2032, 2080), False, 'import flask\n')] |
from datetime import datetime
from tts import tts
def take_notes(speech_text):
words_of_message = speech_text.split()
words_of_message.remove("note")
cleaned_message = ' '.join(words_of_message)
f = open("notes.txt", "a+")
f.write("'" + cleaned_message + "'" + " - note taken at: " + datetime.strftime(datetime.now(), "%d-%m-%y") + "\n")
f.close()
tts("Your note has been saved")
def show_all_notes():
tts("Your notes are as follows: ")
f = open("notes.txt", "r")
if f.mode == "r":
contents = f.read()
tts(contents)
f.close()
def delete_all_notes():
f = open("notes.txt", "w+") #+w means write/create. +a means append/create. r means read
f.write("")
f.close()
tts("All notes have been deleted")
| [
"datetime.datetime.now",
"tts.tts"
] | [((377, 408), 'tts.tts', 'tts', (['"""Your note has been saved"""'], {}), "('Your note has been saved')\n", (380, 408), False, 'from tts import tts\n'), ((436, 470), 'tts.tts', 'tts', (['"""Your notes are as follows: """'], {}), "('Your notes are as follows: ')\n", (439, 470), False, 'from tts import tts\n'), ((742, 776), 'tts.tts', 'tts', (['"""All notes have been deleted"""'], {}), "('All notes have been deleted')\n", (745, 776), False, 'from tts import tts\n'), ((560, 573), 'tts.tts', 'tts', (['contents'], {}), '(contents)\n', (563, 573), False, 'from tts import tts\n'), ((323, 337), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (335, 337), False, 'from datetime import datetime\n')] |
#!/usr/bin/env python
# coding: utf-8
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Jan 5 2021
@author: <NAME>
based on the Iso-MPS codes
"""
#%% -- IMPORTS --
import sys
sys.path.append("..") # import one subdirectory up in files
# external packages
import numpy as np
import qiskit as qk
import networkx as nx
import tenpy
# custom things
from networks.isonetwork import IsoTensor, IsoNetwork, QKParamCircuit
import mps.mps as mps
#%%
class IsoMERA(IsoNetwork):
"""
MPS defined by
- number of physical and bond qubits (sets up associated quantum registers accordingly)
- l_uc - length of unit cell
- L number of times to repeat unit cell
- circuits for each site in the unit cell, and initial state of bond-qubits
"""
def __init__(self,
preg,
breg,
pcircs,
smax, #
**kwargs):
"""
inputs:
preg, list of lists of physical qubit registers on each site;
notice that in MERA setting we require len(preg) = 2^(smax-1)
breg, list of lists of physical qubit registers on each site;
notice that in MERA setting we require len(preg) = smax
(for qiskit: register= quantum register)
smax, # of layers; count from 0 to smax-1; total smax layers
pcircs, list, of parameterized circuit objects:
pcircs[0] - boundary circuit (acting only on bond-qubits)
pcircs[1...l_uc] for each site in unit-cell
param_names,list of sympy symbols, parameterized gate parameters (shared by all tensors)
L, int (default=1), Length of System (number of times to repeat unit cell)
bdry_circ, boundary vector circuit for prepping initial state of bond-qubits
circuit_format, str, (default='cirq'), type of circuit editor/simulator used
"""
# here, pcircs is a list of lists with length 1,2,4...2^(smax-1), respectively
# self.n_params = len(param_names)
# parse kwargs that don't depend on circuit_format
if 'circuit_format' in kwargs.keys():
self.circuit_format = kwargs['circuit_format']
else:
self.circuit_format = 'qiskit'
if 'L' in kwargs.keys():
self.L = kwargs['L']
else:
self.L=1
if self.circuit_format == 'qiskit':
# setup classical registers for measurement outcomes
self.cregs = [[qk.ClassicalRegister(len(preg[z]))for z in range(2**(smax-1))]#label the thing on each layer
for x in range(self.L)]
self.nphys = 0
self.nbond = 0
for i in range(len(preg)):
self.nphys += len(preg[i]) # number of physical qubits
for i in range(len(breg)):
self.nbond += len(breg[i]) # number of bond qubits
if 'boundary_circuit' in kwargs.keys():
bdry_circ = kwargs['boundary_circuit'] #this, as well, has to be a list
else:
bdry_circ = [QKParamCircuit(qk.QuantumCircuit(), []) for i in range(smax)]
# make the MPS/tensor-train -- same qubits used by each tensor
self.bdry_tensor = [IsoTensor('v_L'+str(i),
[breg[i]],
bdry_circ[i]) for i in range(smax)]
def mlist(preg,x,y,z):
if y == smax-1:
meas_list=[(preg,self.cregs[x][z],qk.QuantumCircuit())]
else:
meas_list=[]
return meas_list
self.sites= [[[IsoTensor('A'+str(x)+str(y)+str(z),
[preg[z],breg[y]],
pcircs[y][z],
meas_list=mlist(preg[z],x,y,z) )
for z in range(2**(y))]#label the nodes on each layer
for y in range(smax)]#label the layers
for x in range(self.L)]
# setup IsoNetwork
# make a flat list of nodes
self.nodes = self.bdry_tensor
for x in range(self.L):
for y in range(smax):
self.nodes += self.sites[x][y]
self.edges = [(self.bdry_tensor[i],self.sites[0][i][0],{'qreg':breg[i]}) for i in range(smax)]
self.edges+=[(self.sites[x][y][z],self.sites[x][y][z+1],{'qreg':breg[y]}) for x in range(self.L) for y in range(smax) for z in range (int(2**(y)-1))]
self.edges+=[(self.sites[x][y][z],self.sites[x][y+1][int(2*z)],{'qreg':preg[z]}) for x in range(self.L) for y in range(int(smax-1)) for z in range(int(2**(y)))]
self.edges+=[(self.sites[x][y][int(2**(y-1)-1)],self.sites[x+1][y][0],{'qreg':breg[y]})for x in range(self.L-1) for y in range(int(smax-1))]
self.qregs = breg+preg
# construct graph and check that is a DAG
# check for repeated node names
self.graph = nx.DiGraph()
self.graph.add_nodes_from(self.nodes)
self.graph.add_edges_from(self.edges)
# check that graph is directed & acyclic (DAG)
if nx.algorithms.dag.is_directed_acyclic_graph(self.graph) != True:
raise RuntimeError('Graph must be directed and acyclic')
# store node information
# self.creg_dict = creg_dict
self.node_names = [node.name for node in self.nodes]
if len(self.node_names) != len(set(self.node_names)):
raise ValueError('Tensor nodes must have unique names')
# store variational parameter info
self.param_assignments = {}
for node in self.nodes:
self.param_assignments[node]=node.param_names
# topologically sort nodes in order of execution
self.sorted_nodes = [node for node in nx.topological_sort(self.graph)]
else:
raise NotImplementedError('only qiskit implemented')
## cpu simulation ##
def left_bdry_vector(self,params):
"""
computes full unitaries for each state (any initial state for physicalqubit)
inputs:
params, dictionary of parameters {'name':numerical-value}
returns:
bdry_vec, unitary correspond to boundary
ulist, list of unitaries for tensors in unit cell
"""
bvec_l = self.bdry_tensor.unitary(params)[:,0] # boundary circuit tensor
return bvec_l
def unitaries(self,params):
"""
computes full unitaries for each state (any initial state for physicalqubit)
inputs:
params, dictionary of parameters {'name':numerical-value}
returns:
ulist, list of rank-4 tensors for each site in unit cell
"""
ulist = [self.sites[j].unitary(params) for j in range(self.l_uc)]
return ulist
def tensors(self,params):
"""
computes tensors for fixed initial state of physical qubit = |0>
inputs:
params, dictionary of parameters {'name':numerical-value}
returns:
tensors, list of rank-3 tensors for each site in unit cell
"""
tensors = [self.sites[j].unitary(params)[:,:,0,:] for j in range(self.l_uc)]
return tensors
## Convert to other format(s) ##
def to_tenpy(self,params,L=1):
"""
inputs:
params, dictionary of parameters {'name':numerical-value}
L, int, number of repetitions of unit cell,
set to np.inf for iMPS
TODO: add any other args needed to specify, symmetries, site-type etc...
outputs:
tenpy MPS object created from cirq description
"""
site = tenpy.networks.site.SpinHalfSite(conserve=None)
if (L==np.inf) and (self.l_uc==1) and (self.nphys==1):
B = np.swapaxes(self.tensors(params)[0],1,2)
psi = tenpy.networks.mps.MPS.from_Bflat([site],
[B],
bc='infinite',
dtype=complex,
form=None)
else:
B_arrs = [np.swapaxes(tensor,1,2) for tensor in self.tensors(params)]
B_arrs[0] = B_arrs[0][:,0:1,:]
B_arrs[-1] = B_arrs[-1][:,:,0:1]
psi = tenpy.networks.mps.MPS.from_Bflat([site]*L,
B_arrs,
bc = 'finite',
dtype=complex,
form=None)
psi.canonical_form()
psi.convert_form(psi.form)
return psi
def as_mps(self,params,L=1):
"""
converts to custom MPS class object
inputs:
params, dictionary of parameters {'name':numerical-value}
L, int, number of repetitions of unit cell,
set to np.inf for iMPS
outputs:
custom MPS object created from cirq description
"""
tensors = self.tensors(params)
bvecl = self.left_bdry_vector(params)
state = mps.MPS(tensors,L=L,bdry_vecs=[bvecl,None], rcf = True)
return state
def as_mpo(self,params):
"""
converts to custom MPO class object
inputs:
params, dictionary of parameters {'name':numerical-value}
outputs:
custom MPS object created from cirq description
"""
tensors = self.compute_unitaries(params)
bvecl = self.compute_left_bdry_vector(params)
op = mps.MPO(tensors,L=self.L,bdry_vecs=[bvecl,None], rcf = True)
return op
## correlation function sampling ##
def sample_correlations(self,L,bases,N_samples):
"""
basis: measurement basis for each site
possible formats:
- cirq circuit for physical qubits that maps physical qubits to measurement basis
- string of
possible backends:
'tenpy' - uses
'qasm' - output qasm script to measure
inputs:
options: dictionary with entries specifying:
burn-in length,
unit cell length,
basis to measure in for each site,
number of samples to take (could be infinite for cpu-simulations)
backend: whether to run as
"""
raise NotImplementedError
#%%
| [
"mps.mps.MPO",
"tenpy.networks.site.SpinHalfSite",
"networkx.topological_sort",
"networkx.DiGraph",
"networkx.algorithms.dag.is_directed_acyclic_graph",
"numpy.swapaxes",
"tenpy.networks.mps.MPS.from_Bflat",
"mps.mps.MPS",
"qiskit.QuantumCircuit",
"sys.path.append"
] | [((188, 209), 'sys.path.append', 'sys.path.append', (['""".."""'], {}), "('..')\n", (203, 209), False, 'import sys\n'), ((8075, 8122), 'tenpy.networks.site.SpinHalfSite', 'tenpy.networks.site.SpinHalfSite', ([], {'conserve': 'None'}), '(conserve=None)\n', (8107, 8122), False, 'import tenpy\n'), ((9621, 9677), 'mps.mps.MPS', 'mps.MPS', (['tensors'], {'L': 'L', 'bdry_vecs': '[bvecl, None]', 'rcf': '(True)'}), '(tensors, L=L, bdry_vecs=[bvecl, None], rcf=True)\n', (9628, 9677), True, 'import mps.mps as mps\n'), ((10079, 10140), 'mps.mps.MPO', 'mps.MPO', (['tensors'], {'L': 'self.L', 'bdry_vecs': '[bvecl, None]', 'rcf': '(True)'}), '(tensors, L=self.L, bdry_vecs=[bvecl, None], rcf=True)\n', (10086, 10140), True, 'import mps.mps as mps\n'), ((5225, 5237), 'networkx.DiGraph', 'nx.DiGraph', ([], {}), '()\n', (5235, 5237), True, 'import networkx as nx\n'), ((8261, 8352), 'tenpy.networks.mps.MPS.from_Bflat', 'tenpy.networks.mps.MPS.from_Bflat', (['[site]', '[B]'], {'bc': '"""infinite"""', 'dtype': 'complex', 'form': 'None'}), "([site], [B], bc='infinite', dtype=complex,\n form=None)\n", (8294, 8352), False, 'import tenpy\n'), ((8760, 8857), 'tenpy.networks.mps.MPS.from_Bflat', 'tenpy.networks.mps.MPS.from_Bflat', (['([site] * L)', 'B_arrs'], {'bc': '"""finite"""', 'dtype': 'complex', 'form': 'None'}), "([site] * L, B_arrs, bc='finite', dtype=\n complex, form=None)\n", (8793, 8857), False, 'import tenpy\n'), ((5412, 5467), 'networkx.algorithms.dag.is_directed_acyclic_graph', 'nx.algorithms.dag.is_directed_acyclic_graph', (['self.graph'], {}), '(self.graph)\n', (5455, 5467), True, 'import networkx as nx\n'), ((8594, 8619), 'numpy.swapaxes', 'np.swapaxes', (['tensor', '(1)', '(2)'], {}), '(tensor, 1, 2)\n', (8605, 8619), True, 'import numpy as np\n'), ((6166, 6197), 'networkx.topological_sort', 'nx.topological_sort', (['self.graph'], {}), '(self.graph)\n', (6185, 6197), True, 'import networkx as nx\n'), ((3196, 3215), 'qiskit.QuantumCircuit', 'qk.QuantumCircuit', ([], {}), '()\n', (3213, 3215), True, 'import qiskit as qk\n'), ((3641, 3660), 'qiskit.QuantumCircuit', 'qk.QuantumCircuit', ([], {}), '()\n', (3658, 3660), True, 'import qiskit as qk\n')] |
import sys
import click
from sonic_py_common import multi_asic, device_info
platform_sfputil = None
def load_platform_sfputil():
global platform_sfputil
try:
import sonic_platform_base.sonic_sfp.sfputilhelper
platform_sfputil = sonic_platform_base.sonic_sfp.sfputilhelper.SfpUtilHelper()
except Exception as e:
click.echo("Failed to instantiate platform_sfputil due to {}".format(repr(e)))
sys.exit(1)
return 0
def platform_sfputil_read_porttab_mappings():
try:
if multi_asic.is_multi_asic():
# For multi ASIC platforms we pass DIR of port_config_file_path and the number of asics
(platform_path, hwsku_path) = device_info.get_paths_to_platform_and_hwsku_dirs()
# Load platform module from source
platform_sfputil.read_all_porttab_mappings(hwsku_path, multi_asic.get_num_asics())
else:
# For single ASIC platforms we pass port_config_file_path and the asic_inst as 0
port_config_file_path = device_info.get_path_to_port_config_file()
platform_sfputil.read_porttab_mappings(port_config_file_path, 0)
except Exception as e:
click.echo("Error reading port info (%s)" % str(e))
sys.exit(1)
return 0
| [
"sonic_py_common.multi_asic.is_multi_asic",
"sonic_py_common.device_info.get_path_to_port_config_file",
"sys.exit",
"sonic_py_common.multi_asic.get_num_asics",
"sonic_py_common.device_info.get_paths_to_platform_and_hwsku_dirs"
] | [((534, 560), 'sonic_py_common.multi_asic.is_multi_asic', 'multi_asic.is_multi_asic', ([], {}), '()\n', (558, 560), False, 'from sonic_py_common import multi_asic, device_info\n'), ((438, 449), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (446, 449), False, 'import sys\n'), ((704, 754), 'sonic_py_common.device_info.get_paths_to_platform_and_hwsku_dirs', 'device_info.get_paths_to_platform_and_hwsku_dirs', ([], {}), '()\n', (752, 754), False, 'from sonic_py_common import multi_asic, device_info\n'), ((1041, 1083), 'sonic_py_common.device_info.get_path_to_port_config_file', 'device_info.get_path_to_port_config_file', ([], {}), '()\n', (1081, 1083), False, 'from sonic_py_common import multi_asic, device_info\n'), ((1256, 1267), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (1264, 1267), False, 'import sys\n'), ((870, 896), 'sonic_py_common.multi_asic.get_num_asics', 'multi_asic.get_num_asics', ([], {}), '()\n', (894, 896), False, 'from sonic_py_common import multi_asic, device_info\n')] |
import tempfile
from urllib.request import urlretrieve, urlopen
from urllib.error import URLError
import pyscipopt as scip
import os
import pandas as pd
class Loader:
def __init__(self, persistent_directory=None):
"""
Initializes the MIPLIB loader object
Parameters
----------
persistent_directory: str or None
Path for directory to use for persistent files,
If set to None, resorts to default case of using temporary files
that get deleted after program execution
"""
self.instances_cache = {}
self.dir = persistent_directory
if persistent_directory:
self._load_instances_cache()
def load_instance(self, instance_name, with_solution=False):
if not self._instance_cached(instance_name):
self._download_instance(instance_name)
problem_path = self._instance_path(instance_name)
model = scip.Model()
model.readProblem(problem_path)
if with_solution:
self._add_solution(model, instance_name)
return model
def _instance_path(self, instance_name):
return self.instances_cache[instance_name]
def _generate_path_for_instance(self, instance_name):
if self.dir:
return self.dir + instance_name
else:
extension = instance_name[instance_name.index(".") :]
return tempfile.NamedTemporaryFile(suffix=extension, delete=False).name
def _download_instance(self, instance_name):
path = self._generate_path_for_instance(instance_name)
url = self._look_for_working_url(self._instance_urls(instance_name))
if url:
urlretrieve(url, path)
self.instances_cache[instance_name] = path
else:
raise ValueError(
"Was not able to find the instance in any of the MIPLIB sources"
)
def _look_for_working_url(self, urls):
for url in urls:
try:
response = urlopen(url)
except URLError:
continue
if self._successful_response(response):
return url
return None
@staticmethod
def _successful_response(response):
return response.status == 200 and "not_found" not in response.url
def _instance_cached(self, instance_name):
return instance_name in self.instances_cache
def _load_instances_cache(self):
for path in os.listdir(self.dir):
if path.endswith(".mps.gz"):
instance_name = path.split("/")[-1]
self.instances_cache[instance_name] = self.dir + path
def _add_solution(self, model, instance_name):
url = self._look_for_working_url(self._solution_urls(instance_name))
if url:
with tempfile.NamedTemporaryFile(suffix=".sol.gz") as sol_file:
urlretrieve(url, sol_file.name)
model.readSol(sol_file.name)
else:
raise ValueError(
"Was not able to find the solution in any of the MIPLIB sources"
)
@staticmethod
def _instance_urls(instance_name):
return [
f"https://miplib.zib.de/WebData/instances/{instance_name}", # 2017 instances
f"http://miplib2010.zib.de/download/{instance_name}", # 2010 instances
f"http://miplib2010.zib.de/miplib2003/download/{instance_name}", # 2003 instance
]
@staticmethod
def _solution_urls(instance_name):
name = instance_name[: instance_name.index(".")]
return [
f"https://miplib.zib.de/downloads/solutions/{name}/1/{name}.sol.gz", # 2017 solutions
f"http://miplib2010.zib.de/download/{name}.sol.gz", # 2010 solutions
f"http://miplib2010.zib.de/miplib2003/download/{name}.sol.gz", # 2003 solutions
]
def __del__(self):
if self.dir is None:
for path in self.instances_cache.values():
os.unlink(path)
def benchmark_instances():
for instance in custom_list("https://miplib.zib.de/downloads/benchmark-v2.test"):
yield instance
def easy_instances():
for instance in custom_list("https://miplib.zib.de/downloads/easy-v9.test"):
yield instance
def hard_instances():
for instance in custom_list("https://miplib.zib.de/downloads/hard-v15.test"):
yield instance
def open_instances():
for instance in custom_list("https://miplib.zib.de/downloads/open-v14.test"):
yield instance
def custom_list(source, with_solution=False, loader=None):
"""
Returns a generator of instances from the given list
Parameters
----------
source: str
Path or URL for the instance list source
with_solution: bool
Whether to return the instance with the known solutions or not
loader: Loader
Loader object to download instances with
Returns
-------
A generator for the instances
"""
df = pd.read_csv(source, names=["instance"])
if loader is None:
loader = Loader()
for instance in df["instance"]:
yield loader.load_instance(instance, with_solution=with_solution)
| [
"os.listdir",
"pandas.read_csv",
"urllib.request.urlretrieve",
"pyscipopt.Model",
"os.unlink",
"tempfile.NamedTemporaryFile",
"urllib.request.urlopen"
] | [((5028, 5067), 'pandas.read_csv', 'pd.read_csv', (['source'], {'names': "['instance']"}), "(source, names=['instance'])\n", (5039, 5067), True, 'import pandas as pd\n'), ((952, 964), 'pyscipopt.Model', 'scip.Model', ([], {}), '()\n', (962, 964), True, 'import pyscipopt as scip\n'), ((2496, 2516), 'os.listdir', 'os.listdir', (['self.dir'], {}), '(self.dir)\n', (2506, 2516), False, 'import os\n'), ((1708, 1730), 'urllib.request.urlretrieve', 'urlretrieve', (['url', 'path'], {}), '(url, path)\n', (1719, 1730), False, 'from urllib.request import urlretrieve, urlopen\n'), ((1425, 1484), 'tempfile.NamedTemporaryFile', 'tempfile.NamedTemporaryFile', ([], {'suffix': 'extension', 'delete': '(False)'}), '(suffix=extension, delete=False)\n', (1452, 1484), False, 'import tempfile\n'), ((2038, 2050), 'urllib.request.urlopen', 'urlopen', (['url'], {}), '(url)\n', (2045, 2050), False, 'from urllib.request import urlretrieve, urlopen\n'), ((2843, 2888), 'tempfile.NamedTemporaryFile', 'tempfile.NamedTemporaryFile', ([], {'suffix': '""".sol.gz"""'}), "(suffix='.sol.gz')\n", (2870, 2888), False, 'import tempfile\n'), ((2918, 2949), 'urllib.request.urlretrieve', 'urlretrieve', (['url', 'sol_file.name'], {}), '(url, sol_file.name)\n', (2929, 2949), False, 'from urllib.request import urlretrieve, urlopen\n'), ((4027, 4042), 'os.unlink', 'os.unlink', (['path'], {}), '(path)\n', (4036, 4042), False, 'import os\n')] |
#!/usr/bin/python
import Adafruit_SSD1306
import os
from retrying import retry
from PIL import Image, ImageDraw, ImageFont
class Oled:
def __init__(self, display_bus, font_size):
# declare member variables
self.draw = None
self.font = None
self.disp = None
self.width = None
self.height = None
self.image = None
self.font_size = font_size
# display bus
# Rev 2 Pi, Pi 2 & Pi 3 uses bus 1
# Rev 1 Pi uses bus 0
# Orange Pi Zero uses bus 0 for pins 1-5 (other pins for bus 1 & 2)
self.display_bus = display_bus
# init
self.initialize()
def initialize(self):
# 128x64 display with hardware I2C:
self.disp = Adafruit_SSD1306.SSD1306_128_64(rst=None, i2c_bus=self.display_bus)
# Initialize library.
self.disp.begin()
# Clear display.
self.disp.clear()
self.disp.display()
# Create blank image for drawing.
# Make sure to create image with mode '1' for 1-bit color.
self.width = self.disp.width
self.height = self.disp.height
self.image = Image.new('1', (self.width, self.height))
# Get drawing object to draw on image.
self.draw = ImageDraw.Draw(self.image)
# set full puth for incling libs below
full_path = os.path.dirname(os.path.abspath(__file__)) + "/"
# Draw a black filled box to clear the image.
self.draw.rectangle((-20, -20, self.width, self.height), outline=0, fill=0)
self.font = ImageFont.truetype(full_path + "Lato-Heavy.ttf", self.font_size)
@retry()
def display(self, text):
# Draw some shapes.
# First define some constants to allow easy resizing of shapes.
padding = -2
top = padding
# bottom = self.height - padding
# Draw a black filled box to clear the image.
self.draw.rectangle((0, 0, self.width, self.height), outline=0, fill=0)
self.draw.text((0, top), str(text), font=self.font, fill=255)
# Display image.
self.disp.image(self.image)
self.disp.display()
| [
"PIL.Image.new",
"PIL.ImageFont.truetype",
"Adafruit_SSD1306.SSD1306_128_64",
"PIL.ImageDraw.Draw",
"retrying.retry",
"os.path.abspath"
] | [((1650, 1657), 'retrying.retry', 'retry', ([], {}), '()\n', (1655, 1657), False, 'from retrying import retry\n'), ((754, 821), 'Adafruit_SSD1306.SSD1306_128_64', 'Adafruit_SSD1306.SSD1306_128_64', ([], {'rst': 'None', 'i2c_bus': 'self.display_bus'}), '(rst=None, i2c_bus=self.display_bus)\n', (785, 821), False, 'import Adafruit_SSD1306\n'), ((1166, 1207), 'PIL.Image.new', 'Image.new', (['"""1"""', '(self.width, self.height)'], {}), "('1', (self.width, self.height))\n", (1175, 1207), False, 'from PIL import Image, ImageDraw, ImageFont\n'), ((1276, 1302), 'PIL.ImageDraw.Draw', 'ImageDraw.Draw', (['self.image'], {}), '(self.image)\n', (1290, 1302), False, 'from PIL import Image, ImageDraw, ImageFont\n'), ((1579, 1643), 'PIL.ImageFont.truetype', 'ImageFont.truetype', (["(full_path + 'Lato-Heavy.ttf')", 'self.font_size'], {}), "(full_path + 'Lato-Heavy.ttf', self.font_size)\n", (1597, 1643), False, 'from PIL import Image, ImageDraw, ImageFont\n'), ((1387, 1412), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (1402, 1412), False, 'import os\n')] |
import os
import json
import boto3
def handler(event, context):
table = os.environ.get('table')
dynamodb = boto3.client('dynamodb')
item = {
"name":{'S':event["queryStringParameters"]["name"]},
"location":{'S':event["queryStringParameters"]["location"]},
"age":{'S':event["queryStringParameters"]["age"]}
}
response = dynamodb.put_item(TableName=table,
Item=item
)
message = 'Status of the write to DynamoDB {}!'.format(response)
return {
"statusCode": 200,
"body": json.dumps(message)
}
| [
"boto3.client",
"json.dumps",
"os.environ.get"
] | [((77, 100), 'os.environ.get', 'os.environ.get', (['"""table"""'], {}), "('table')\n", (91, 100), False, 'import os\n'), ((116, 140), 'boto3.client', 'boto3.client', (['"""dynamodb"""'], {}), "('dynamodb')\n", (128, 140), False, 'import boto3\n'), ((589, 608), 'json.dumps', 'json.dumps', (['message'], {}), '(message)\n', (599, 608), False, 'import json\n')] |
"""
Module for Gemini FLAMINGOS.
.. include:: ../include/links.rst
"""
import os
from pkg_resources import resource_filename
from IPython import embed
import numpy as np
from pypeit import msgs
from pypeit import telescopes
from pypeit.core import framematch
from pypeit.images import detector_container
from pypeit.spectrographs import spectrograph
class GeminiFLAMINGOSSpectrograph(spectrograph.Spectrograph):
"""
Base class for the Gemini FLAMINGOS spectrograph.
"""
ndet = 1
telescope = telescopes.GeminiSTelescopePar()
def init_meta(self):
"""
Define how metadata are derived from the spectrograph files.
That is, this associates the ``PypeIt``-specific metadata keywords
with the instrument-specific header cards using :attr:`meta`.
"""
self.meta = {}
# Required (core)
self.meta['ra'] = dict(ext=0, card='RA')
self.meta['dec'] = dict(ext=0, card='DEC')
self.meta['target'] = dict(ext=0, card='OBJECT')
self.meta['decker'] = dict(ext=0, card='MASKNAME')
self.meta['dichroic'] = dict(ext=0, card='FILTER')
self.meta['binning'] = dict(ext=0, card=None, default='1,1')
self.meta['mjd'] = dict(ext=0, card='MJD-OBS')
self.meta['exptime'] = dict(ext=0, card='EXPTIME')
self.meta['airmass'] = dict(ext=0, card='AIRMASS')
# Extras for config and frametyping
self.meta['dispname'] = dict(ext=0, card='GRISM')
self.meta['idname'] = dict(ext=0, card='OBSTYPE')
class GeminiFLAMINGOS2Spectrograph(GeminiFLAMINGOSSpectrograph):
"""
Gemini/Flamingos2 Echelle spectrograph methods.
"""
name = 'gemini_flamingos2'
camera = 'FLAMINGOS'
supported = True
comment = 'Flamingos-2 NIR spectrograph'
def get_detector_par(self, hdu, det):
"""
Return metadata for the selected detector.
Args:
hdu (`astropy.io.fits.HDUList`_):
The open fits file with the raw image of interest.
det (:obj:`int`):
1-indexed detector number.
Returns:
:class:`~pypeit.images.detector_container.DetectorContainer`:
Object with the detector metadata.
"""
# Detector 1
detector_dict = dict(
binning = '1,1',
det = 1,
dataext = 1,
specaxis = 0,
specflip = True,
spatflip = False,
platescale = 0.1787,
darkcurr = 0.5,
saturation = 700000., #155400.,
nonlinear = 1.0,
mincounts = -1e10,
numamplifiers = 1,
gain = np.atleast_1d(4.44),
ronoise = np.atleast_1d(5.0), #8 CDS read
datasec = np.atleast_1d('[:,:]'),
oscansec = np.atleast_1d('[:,:]'),
)
return detector_container.DetectorContainer(**detector_dict)
@classmethod
def default_pypeit_par(cls):
"""
Return the default parameters to use for this instrument.
Returns:
:class:`~pypeit.par.pypeitpar.PypeItPar`: Parameters required by
all of ``PypeIt`` methods.
"""
par = super().default_pypeit_par()
# Image processing steps
turn_off = dict(use_illumflat=False, use_biasimage=False, use_overscan=False,
use_darkimage=False)
par.reset_all_processimages_par(**turn_off)
# Wavelengths
# 1D wavelength solution with arc lines
par['calibrations']['wavelengths']['rms_threshold'] = 0.5
par['calibrations']['wavelengths']['sigdetect']=5
par['calibrations']['wavelengths']['fwhm'] = 5
par['calibrations']['wavelengths']['n_first']=2
par['calibrations']['wavelengths']['n_final']=4
par['calibrations']['wavelengths']['lamps'] = ['OH_NIRES']
par['calibrations']['wavelengths']['match_toler']=5.0
# Set slits and tilts parameters
par['calibrations']['tilts']['tracethresh'] = 5
par['calibrations']['tilts']['spat_order'] = 4
par['calibrations']['slitedges']['trace_thresh'] = 10.
par['calibrations']['slitedges']['edge_thresh'] = 200.
par['calibrations']['slitedges']['fit_min_spec_length'] = 0.4
par['calibrations']['slitedges']['sync_predict'] = 'nearest'
# Set the default exposure time ranges for the frame typing
par['calibrations']['standardframe']['exprng'] = [None, 30]
par['calibrations']['tiltframe']['exprng'] = [50, None]
par['calibrations']['arcframe']['exprng'] = [50, None]
par['calibrations']['darkframe']['exprng'] = [20, None]
par['scienceframe']['exprng'] = [20, None]
# Scienceimage parameters
par['reduce']['findobj']['sig_thresh'] = 5.0
par['reduce']['skysub']['sky_sigrej'] = 5.0
par['reduce']['findobj']['find_trim_edge'] = [10,10]
# Do not correct for flexure
par['flexure']['spec_method'] = 'skip'
# Sensitivity function parameters
par['sensfunc']['algorithm'] = 'IR'
par['sensfunc']['polyorder'] = 8
# TODO: replace the telluric grid file for Gemini-S site.
par['sensfunc']['IR']['telgridfile'] \
= os.path.join(par['sensfunc']['IR'].default_root,
'TelFit_LasCampanas_3100_26100_R20000.fits')
return par
def config_specific_par(self, scifile, inp_par=None):
"""
Modify the ``PypeIt`` parameters to hard-wired values used for
specific instrument configurations.
Args:
scifile (:obj:`str`):
File to use when determining the configuration and how
to adjust the input parameters.
inp_par (:class:`~pypeit.par.parset.ParSet`, optional):
Parameter set used for the full run of PypeIt. If None,
use :func:`default_pypeit_par`.
Returns:
:class:`~pypeit.par.parset.ParSet`: The PypeIt parameter set
adjusted for configuration specific parameter values.
"""
par = super().config_specific_par(scifile, inp_par=inp_par)
# TODO: Should we allow the user to override these?
if self.get_meta_value(scifile, 'dispname') == 'JH_G5801':
par['calibrations']['wavelengths']['method'] = 'full_template'
par['calibrations']['wavelengths']['reid_arxiv'] = 'Flamingos2_JH_JH.fits'
elif self.get_meta_value(scifile, 'dispname') == 'HK_G5802':
par['calibrations']['wavelengths']['method'] = 'full_template'
par['calibrations']['wavelengths']['reid_arxiv'] = 'Flamingos2_HK_HK.fits'
return par
def check_frame_type(self, ftype, fitstbl, exprng=None):
"""
Check for frames of the provided type.
Args:
ftype (:obj:`str`):
Type of frame to check. Must be a valid frame type; see
frame-type :ref:`frame_type_defs`.
fitstbl (`astropy.table.Table`_):
The table with the metadata for one or more frames to check.
exprng (:obj:`list`, optional):
Range in the allowed exposure time for a frame of type
``ftype``. See
:func:`pypeit.core.framematch.check_frame_exptime`.
Returns:
`numpy.ndarray`_: Boolean array with the flags selecting the
exposures in ``fitstbl`` that are ``ftype`` type frames.
"""
good_exp = framematch.check_frame_exptime(fitstbl['exptime'], exprng)
if ftype in ['pinhole', 'bias']:
# No pinhole or bias frames
return np.zeros(len(fitstbl), dtype=bool)
if ftype in ['pixelflat', 'trace']:
return good_exp & (fitstbl['idname'] == 'FLAT')
if ftype == 'standard':
return good_exp & (fitstbl['idname'] == 'OBJECT')
if ftype == 'science':
return good_exp & (fitstbl['idname'] == 'OBJECT')
if ftype in ['arc', 'tilt']:
return good_exp & (fitstbl['idname'] == 'OBJECT')
msgs.warn('Cannot determine if frames are of type {0}.'.format(ftype))
return np.zeros(len(fitstbl), dtype=bool)
class GeminiFLAMINGOS1Spectrograph(GeminiFLAMINGOSSpectrograph):
"""
Gemini/Flamingos1 Echelle spectrograph methods.
.. todo::
This is a placeholder class that is not yet supported.
"""
name = 'gemini_flamingos1'
camera = 'FLAMINGOS'
def get_detector_par(self, hdu, det):
"""
Return metadata for the selected detector.
Args:
hdu (`astropy.io.fits.HDUList`_):
The open fits file with the raw image of interest.
det (:obj:`int`):
1-indexed detector number.
Returns:
:class:`~pypeit.images.detector_container.DetectorContainer`:
Object with the detector metadata.
"""
# Detector 1
detector_dict = dict(
binning='1,1',
det = 1,
dataext = 1,
specaxis = 0,
specflip = False,
spatflip = False,
platescale = 0.15,
darkcurr = 0.01,
saturation = 320000., #155400.,
nonlinear = 0.875,
mincounts = -1e10,
numamplifiers = 1,
gain = np.atleast_1d(3.8),
ronoise = np.atleast_1d(6.0), # SUTR readout
datasec= np.atleast_1d('[5:2044, 900:1250]'),
oscansec= np.atleast_1d('[:5, 900:1250]'),
)
return detector_container.DetectorContainer(**detector_dict)
@classmethod
def default_pypeit_par(cls):
"""
Return the default parameters to use for this instrument.
Returns:
:class:`~pypeit.par.pypeitpar.PypeItPar`: Parameters required by
all of ``PypeIt`` methods.
"""
par = super().default_pypeit_par()
# Image processing steps
turn_off = dict(use_illumflat=False, use_biasimage=False, use_overscan=False,
use_darkimage=False)
par.reset_all_processimages_par(**turn_off)
# Wavelengths
# 1D wavelength solution with arc lines
par['calibrations']['wavelengths']['rms_threshold'] = 1.0
par['calibrations']['wavelengths']['sigdetect']=3
par['calibrations']['wavelengths']['fwhm'] = 20
par['calibrations']['wavelengths']['n_first']=2
par['calibrations']['wavelengths']['n_final']=4
par['calibrations']['wavelengths']['lamps'] = ['ArI', 'ArII', 'ThAr', 'NeI']
par['calibrations']['wavelengths']['method'] = 'full_template'
par['calibrations']['wavelengths']['reid_arxiv'] = 'magellan_fire_long.fits'
par['calibrations']['wavelengths']['match_toler']=5.0
# Set slits and tilts parameters
par['calibrations']['tilts']['tracethresh'] = 5
par['calibrations']['slitedges']['trace_thresh'] = 5.
par['calibrations']['slitedges']['sync_predict'] = 'nearest'
# Scienceimage parameters
par['reduce']['findobj']['sig_thresh'] = 5.0
# TODO: I think this parameter was removed
par['reduce']['findobj']['find_trim_edge'] = [50,50]
# Do not correct for flexure
par['flexure']['spec_method'] = 'skip'
# Set the default exposure time ranges for the frame typing
par['calibrations']['standardframe']['exprng'] = [None, 60]
par['calibrations']['arcframe']['exprng'] = [1, 50]
par['calibrations']['darkframe']['exprng'] = [20, None]
par['scienceframe']['exprng'] = [20, None]
return par
def check_frame_type(self, ftype, fitstbl, exprng=None):
"""
Check for frames of the provided type.
Args:
ftype (:obj:`str`):
Type of frame to check. Must be a valid frame type; see
frame-type :ref:`frame_type_defs`.
fitstbl (`astropy.table.Table`_):
The table with the metadata for one or more frames to check.
exprng (:obj:`list`, optional):
Range in the allowed exposure time for a frame of type
``ftype``. See
:func:`pypeit.core.framematch.check_frame_exptime`.
Returns:
`numpy.ndarray`_: Boolean array with the flags selecting the
exposures in ``fitstbl`` that are ``ftype`` type frames.
"""
good_exp = framematch.check_frame_exptime(fitstbl['exptime'], exprng)
if ftype in ['pinhole', 'bias']:
# No pinhole or bias frames
return np.zeros(len(fitstbl), dtype=bool)
if ftype in ['pixelflat', 'trace']:
return good_exp & (fitstbl['idname'] == 'PixFlat')
if ftype == 'standard':
return good_exp & (fitstbl['idname'] == 'Telluric')
if ftype == 'science':
return good_exp & (fitstbl['idname'] == 'Science')
if ftype in ['arc', 'tilt']:
return good_exp & (fitstbl['idname'] == 'Arc')
msgs.warn('Cannot determine if frames are of type {0}.'.format(ftype))
return np.zeros(len(fitstbl), dtype=bool)
| [
"os.path.join",
"pypeit.images.detector_container.DetectorContainer",
"pypeit.telescopes.GeminiSTelescopePar",
"pypeit.core.framematch.check_frame_exptime",
"numpy.atleast_1d"
] | [((517, 549), 'pypeit.telescopes.GeminiSTelescopePar', 'telescopes.GeminiSTelescopePar', ([], {}), '()\n', (547, 549), False, 'from pypeit import telescopes\n'), ((2994, 3047), 'pypeit.images.detector_container.DetectorContainer', 'detector_container.DetectorContainer', ([], {}), '(**detector_dict)\n', (3030, 3047), False, 'from pypeit.images import detector_container\n'), ((5423, 5520), 'os.path.join', 'os.path.join', (["par['sensfunc']['IR'].default_root", '"""TelFit_LasCampanas_3100_26100_R20000.fits"""'], {}), "(par['sensfunc']['IR'].default_root,\n 'TelFit_LasCampanas_3100_26100_R20000.fits')\n", (5435, 5520), False, 'import os\n'), ((7707, 7765), 'pypeit.core.framematch.check_frame_exptime', 'framematch.check_frame_exptime', (["fitstbl['exptime']", 'exprng'], {}), "(fitstbl['exptime'], exprng)\n", (7737, 7765), False, 'from pypeit.core import framematch\n'), ((9884, 9937), 'pypeit.images.detector_container.DetectorContainer', 'detector_container.DetectorContainer', ([], {}), '(**detector_dict)\n', (9920, 9937), False, 'from pypeit.images import detector_container\n'), ((12811, 12869), 'pypeit.core.framematch.check_frame_exptime', 'framematch.check_frame_exptime', (["fitstbl['exptime']", 'exprng'], {}), "(fitstbl['exptime'], exprng)\n", (12841, 12869), False, 'from pypeit.core import framematch\n'), ((2774, 2793), 'numpy.atleast_1d', 'np.atleast_1d', (['(4.44)'], {}), '(4.44)\n', (2787, 2793), True, 'import numpy as np\n'), ((2825, 2843), 'numpy.atleast_1d', 'np.atleast_1d', (['(5.0)'], {}), '(5.0)\n', (2838, 2843), True, 'import numpy as np\n'), ((2887, 2909), 'numpy.atleast_1d', 'np.atleast_1d', (['"""[:,:]"""'], {}), "('[:,:]')\n", (2900, 2909), True, 'import numpy as np\n'), ((2941, 2963), 'numpy.atleast_1d', 'np.atleast_1d', (['"""[:,:]"""'], {}), "('[:,:]')\n", (2954, 2963), True, 'import numpy as np\n'), ((9657, 9675), 'numpy.atleast_1d', 'np.atleast_1d', (['(3.8)'], {}), '(3.8)\n', (9670, 9675), True, 'import numpy as np\n'), ((9707, 9725), 'numpy.atleast_1d', 'np.atleast_1d', (['(6.0)'], {}), '(6.0)\n', (9720, 9725), True, 'import numpy as np\n'), ((9763, 9798), 'numpy.atleast_1d', 'np.atleast_1d', (['"""[5:2044, 900:1250]"""'], {}), "('[5:2044, 900:1250]')\n", (9776, 9798), True, 'import numpy as np\n'), ((9822, 9853), 'numpy.atleast_1d', 'np.atleast_1d', (['"""[:5, 900:1250]"""'], {}), "('[:5, 900:1250]')\n", (9835, 9853), True, 'import numpy as np\n')] |
import os
from starlette.applications import Starlette
from starlette.responses import PlainTextResponse, Response
from starlette.testclient import TestClient
from apistar.client import Client, decoders
app = Starlette()
@app.route("/text-response/")
def text_response(request):
return PlainTextResponse("hello, world")
@app.route("/file-response/")
def file_response(request):
headers = {
"Content-Type": "image/png",
"Content-Disposition": 'attachment; filename="filename.png"',
}
return Response(b"<somedata>", headers=headers)
@app.route("/file-response-url-filename/name.png")
def file_response_url_filename(request):
headers = {"Content-Type": "image/png", "Content-Disposition": "attachment"}
return Response(b"<somedata>", headers=headers)
@app.route("/file-response-no-extension/name")
def file_response_no_extension(request):
headers = {"Content-Type": "image/png", "Content-Disposition": "attachment"}
return Response(b"<somedata>", headers=headers)
@app.route("/")
def file_response_no_name(request):
headers = {"Content-Type": "image/png", "Content-Disposition": "attachment"}
return Response(b"<somedata>", headers=headers)
schema = {
"openapi": "3.0.0",
"info": {"title": "Test API", "version": "1.0"},
"servers": [{"url": "http://testserver"}],
"paths": {
"/text-response/": {"get": {"operationId": "text-response"}},
"/file-response/": {"get": {"operationId": "file-response"}},
"/file-response-url-filename/name.png": {
"get": {"operationId": "file-response-url-filename"}
},
"/file-response-no-extension/name": {
"get": {"operationId": "file-response-no-extension"}
},
"/": {"get": {"operationId": "file-response-no-name"}},
},
}
def test_text_response():
client = Client(schema, session=TestClient(app))
data = client.request("text-response")
assert data == "hello, world"
def test_file_response():
client = Client(schema, session=TestClient(app))
data = client.request("file-response")
assert os.path.basename(data.name) == "filename.png"
assert data.read() == b"<somedata>"
def test_file_response_url_filename():
client = Client(schema, session=TestClient(app))
data = client.request("file-response-url-filename")
assert os.path.basename(data.name) == "name.png"
assert data.read() == b"<somedata>"
def test_file_response_no_extension():
client = Client(schema, session=TestClient(app))
data = client.request("file-response-no-extension")
assert os.path.basename(data.name) == "name.png"
assert data.read() == b"<somedata>"
def test_file_response_no_name():
client = Client(schema, session=TestClient(app))
data = client.request("file-response-no-name")
assert os.path.basename(data.name) == "download.png"
assert data.read() == b"<somedata>"
def test_unique_filename(tmpdir):
client = Client(
schema, session=TestClient(app), decoders=[decoders.DownloadDecoder(tmpdir)]
)
data = client.request("file-response")
assert os.path.basename(data.name) == "filename.png"
assert data.read() == b"<somedata>"
data = client.request("file-response")
assert os.path.basename(data.name) == "filename (1).png"
assert data.read() == b"<somedata>"
| [
"starlette.applications.Starlette",
"starlette.responses.Response",
"starlette.testclient.TestClient",
"os.path.basename",
"apistar.client.decoders.DownloadDecoder",
"starlette.responses.PlainTextResponse"
] | [((212, 223), 'starlette.applications.Starlette', 'Starlette', ([], {}), '()\n', (221, 223), False, 'from starlette.applications import Starlette\n'), ((295, 328), 'starlette.responses.PlainTextResponse', 'PlainTextResponse', (['"""hello, world"""'], {}), "('hello, world')\n", (312, 328), False, 'from starlette.responses import PlainTextResponse, Response\n'), ((529, 569), 'starlette.responses.Response', 'Response', (["b'<somedata>'"], {'headers': 'headers'}), "(b'<somedata>', headers=headers)\n", (537, 569), False, 'from starlette.responses import PlainTextResponse, Response\n'), ((756, 796), 'starlette.responses.Response', 'Response', (["b'<somedata>'"], {'headers': 'headers'}), "(b'<somedata>', headers=headers)\n", (764, 796), False, 'from starlette.responses import PlainTextResponse, Response\n'), ((979, 1019), 'starlette.responses.Response', 'Response', (["b'<somedata>'"], {'headers': 'headers'}), "(b'<somedata>', headers=headers)\n", (987, 1019), False, 'from starlette.responses import PlainTextResponse, Response\n'), ((1166, 1206), 'starlette.responses.Response', 'Response', (["b'<somedata>'"], {'headers': 'headers'}), "(b'<somedata>', headers=headers)\n", (1174, 1206), False, 'from starlette.responses import PlainTextResponse, Response\n'), ((2113, 2140), 'os.path.basename', 'os.path.basename', (['data.name'], {}), '(data.name)\n', (2129, 2140), False, 'import os\n'), ((2360, 2387), 'os.path.basename', 'os.path.basename', (['data.name'], {}), '(data.name)\n', (2376, 2387), False, 'import os\n'), ((2603, 2630), 'os.path.basename', 'os.path.basename', (['data.name'], {}), '(data.name)\n', (2619, 2630), False, 'import os\n'), ((2836, 2863), 'os.path.basename', 'os.path.basename', (['data.name'], {}), '(data.name)\n', (2852, 2863), False, 'import os\n'), ((3124, 3151), 'os.path.basename', 'os.path.basename', (['data.name'], {}), '(data.name)\n', (3140, 3151), False, 'import os\n'), ((3265, 3292), 'os.path.basename', 'os.path.basename', (['data.name'], {}), '(data.name)\n', (3281, 3292), False, 'import os\n'), ((1884, 1899), 'starlette.testclient.TestClient', 'TestClient', (['app'], {}), '(app)\n', (1894, 1899), False, 'from starlette.testclient import TestClient\n'), ((2042, 2057), 'starlette.testclient.TestClient', 'TestClient', (['app'], {}), '(app)\n', (2052, 2057), False, 'from starlette.testclient import TestClient\n'), ((2276, 2291), 'starlette.testclient.TestClient', 'TestClient', (['app'], {}), '(app)\n', (2286, 2291), False, 'from starlette.testclient import TestClient\n'), ((2519, 2534), 'starlette.testclient.TestClient', 'TestClient', (['app'], {}), '(app)\n', (2529, 2534), False, 'from starlette.testclient import TestClient\n'), ((2757, 2772), 'starlette.testclient.TestClient', 'TestClient', (['app'], {}), '(app)\n', (2767, 2772), False, 'from starlette.testclient import TestClient\n'), ((3003, 3018), 'starlette.testclient.TestClient', 'TestClient', (['app'], {}), '(app)\n', (3013, 3018), False, 'from starlette.testclient import TestClient\n'), ((3030, 3062), 'apistar.client.decoders.DownloadDecoder', 'decoders.DownloadDecoder', (['tmpdir'], {}), '(tmpdir)\n', (3054, 3062), False, 'from apistar.client import Client, decoders\n')] |
import os, json
from collections import Counter
# test if exist and mkdir
# ../Results/features/corpus_info/
# ../Results/labels
for col in ["FEATS", "PARSEME:MWE", "UPOS", "XPOS", "DEPREL", "DEPS", "LEMMA"]:
for file_type in ["train.cupt", "dev.cupt", "test.blind.cupt"]:
counter_dict = dict()
count_all = Counter()
for lang_dir in os.listdir("../sharedtask_11/"):
for filename in os.listdir("../sharedtask_11/"+lang_dir):
if filename.endswith(file_type):
lang_counter = Counter()
with open("../sharedtask_11/"+lang_dir+'/'+filename) as f:
content = f.readlines()
for line in content:
if line.startswith('# global.columns = '):
header = line.split('# global.columns = ')[1].strip()
nr_col = header.split(' ').index(col)
print(nr_col)
if not line.startswith("#") and line != '\n':
col_value = line.strip().split('\t')[nr_col]
if col_value == "_" or col_value == "*":
lang_counter[col_value] += 1
count_all[col_value] +=1
continue
if col == "FEATS":
splitted_morpho_info = col_value.split("|")
if len(splitted_morpho_info) > 1:
for e in splitted_morpho_info:
if "=" in e:
e = e.split("=")[0]
lang_counter[e] += 1
count_all[e] += 1
else:
if "=" in splitted_morpho_info[0]:
splitted_morpho_info = splitted_morpho_info[0].split("=")[0]
else:
splitted_morpho_info = splitted_morpho_info[0]
lang_counter[splitted_morpho_info] += 1
count_all[splitted_morpho_info] +=1
elif col == "PARSEME:MWE":
splitted_mwe_type = col_value.split(";")
if len(splitted_mwe_type) > 1:
for e in splitted_mwe_type:
if ":" in e:
e = e.split(":")[1]
if e.isdigit():
continue
lang_counter[e] += 1
count_all[e] += 1
else:
if ":" in col_value:
col_value = col_value.split(":")[1]
if col_value.isdigit():
continue
lang_counter[col_value] += 1
count_all[col_value] +=1
else:
lang_counter[col_value] += 1
count_all[col_value] +=1
counter_dict[lang_dir] = lang_counter
if col == "PARSEME:MWE":
with open("../Results/labels/number_"+col+'_'+file_type.split(".")[0]+".json", "w") as f:
json.dump(counter_dict, f)
with open("../Results/labels/number_"+col+'_'+file_type.split(".")[0]+"_all.json", "w") as f:
json.dump(count_all, f)
else:
with open("../Results/features/corpus_info/number_"+col+'_'+file_type.split(".")[0]+".json", "w") as f:
json.dump(counter_dict, f)
with open("../Results/features/corpus_info/number_"+col+'_'+file_type.split(".")[0]+"_all.json", "w") as f:
json.dump(count_all, f)
| [
"collections.Counter",
"os.listdir",
"json.dump"
] | [((314, 323), 'collections.Counter', 'Counter', ([], {}), '()\n', (321, 323), False, 'from collections import Counter\n'), ((342, 373), 'os.listdir', 'os.listdir', (['"""../sharedtask_11/"""'], {}), "('../sharedtask_11/')\n", (352, 373), False, 'import os, json\n'), ((394, 436), 'os.listdir', 'os.listdir', (["('../sharedtask_11/' + lang_dir)"], {}), "('../sharedtask_11/' + lang_dir)\n", (404, 436), False, 'import os, json\n'), ((2397, 2423), 'json.dump', 'json.dump', (['counter_dict', 'f'], {}), '(counter_dict, f)\n', (2406, 2423), False, 'import os, json\n'), ((2525, 2548), 'json.dump', 'json.dump', (['count_all', 'f'], {}), '(count_all, f)\n', (2534, 2548), False, 'import os, json\n'), ((2668, 2694), 'json.dump', 'json.dump', (['counter_dict', 'f'], {}), '(counter_dict, f)\n', (2677, 2694), False, 'import os, json\n'), ((2810, 2833), 'json.dump', 'json.dump', (['count_all', 'f'], {}), '(count_all, f)\n', (2819, 2833), False, 'import os, json\n'), ((493, 502), 'collections.Counter', 'Counter', ([], {}), '()\n', (500, 502), False, 'from collections import Counter\n')] |
#!/usr/bin/env python3
import sys
from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter, FileType
from Bio import Entrez
from rex.util import batchify
def parse_args(argv):
parser = ArgumentParser(description="eutil", formatter_class=ArgumentDefaultsHelpFormatter)
parser.add_argument(
"eutil", default="efetch", help="the E-utility to use"
)
parser.add_argument(
"id", type=FileType(), help="the identifiers"
)
parser.add_argument(
"-db", "--db", "-database", "--database", default="nuccore",
help="the NCBI database"
)
parser.add_argument(
"-params", help="the space separated key=value pairs"
)
parser.add_argument(
"-post-size", "--post-size", type=int, default=200,
help="the number of records to post at a time"
)
parser.add_argument(
"-email", "--email", default="",
help="the e-mail to identify yourself to NCBI (for politeness reasons)"
)
args = parser.parse_args(argv)
return args
def main(argv):
args = parse_args(argv[1:])
Entrez.email = args.email
eutil = getattr(Entrez, args.eutil)
params = dict(item.split("=") for item in args.params.split()) if args.params else {}
with args.id as file:
for batch in batchify(map(str.strip, file), size=args.post_size):
with eutil(db=args.db, id=",".join(batch), **params) as handle:
sys.stdout.write(handle.read())
return 0
if __name__ == "__main__":
sys.exit(main(sys.argv))
| [
"argparse.FileType",
"argparse.ArgumentParser"
] | [((201, 288), 'argparse.ArgumentParser', 'ArgumentParser', ([], {'description': '"""eutil"""', 'formatter_class': 'ArgumentDefaultsHelpFormatter'}), "(description='eutil', formatter_class=\n ArgumentDefaultsHelpFormatter)\n", (215, 288), False, 'from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter, FileType\n'), ((401, 411), 'argparse.FileType', 'FileType', ([], {}), '()\n', (409, 411), False, 'from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter, FileType\n')] |
from django.contrib import admin
from establishment.documentation.models import DocumentationEntry
admin.site.register(DocumentationEntry)
| [
"django.contrib.admin.site.register"
] | [((101, 140), 'django.contrib.admin.site.register', 'admin.site.register', (['DocumentationEntry'], {}), '(DocumentationEntry)\n', (120, 140), False, 'from django.contrib import admin\n')] |
from precise.skaters.covariance.allcovskaters import ALL_D0_SKATERS
from precise.skaters.covarianceutil.likelihood import cov_skater_loglikelihood
from uuid import uuid4
import os
import json
import pathlib
from pprint import pprint
import traceback
from collections import Counter
from momentum.functions import rvar
from precise.skatertools.data.equity import random_m6_returns
from precise.whereami import SKATER_WIN_DATA
import numpy as np
import time
DEFAULT_M6_PARAMS = {'n_dim': 25,
'n_obs': 356,
'n_burn':300,
'atol': 1,
'lb':-1000,
'ub':1000,
'interval':'d'}
def params_category_and_data(params:dict):
"""
Supplement params (usually inferred from battle script file names) with defaults
"""
if params['topic']== 'm6':
combined_params = DEFAULT_M6_PARAMS
combined_params.update(params)
descriptions = {'m': 'm6_stocks_monthly',
'd': 'm6_stocks_daily'}
combined_params['description'] = descriptions[combined_params['interval']]
category = combined_params['description'] + '_p' + str(combined_params['n_dim']) + '_n' + str(combined_params['n_burn'])
xs = random_m6_returns(verbose=False, **combined_params)
return combined_params, category, xs
else:
raise ValueError('m6 is only topic, for now')
def skater_battle( params:dict ):
"""
Write results to a new queue
"""
n_per_battle = 3
atol = 1.0
try:
params, category, xs_test = params_category_and_data(params=params)
except Exception as e:
print(e)
pprint(params)
raise ValueError('Something is probably wrong with params for getting data, so this config will not fly')
print('Data retrieval test passed for category '+category)
pprint(params)
time.sleep(1)
print('Will test the following skaters')
pprint(ALL_D0_SKATERS)
qn = str(uuid4())+'.json'
queue_dir = os.path.join(SKATER_WIN_DATA, category)
queue = os.path.join(queue_dir,qn)
pathlib.Path(queue_dir).mkdir(parents=True, exist_ok=True)
print(queue)
battles = Counter()
timing = dict()
reliability = dict()
failures = dict()
worst_ll_seen = 10000000
lb = params['lb']
ub = params['ub']
while True:
n_obs = params['n_obs']
params, category, xs = params_category_and_data(params=params)
assert len(xs)==n_obs
xs = np.array(xs)
np.random.shuffle(ALL_D0_SKATERS)
fs = ALL_D0_SKATERS[:n_per_battle]
stuff = list()
for f in fs:
try:
ll, metrics = cov_skater_loglikelihood(f=f, xs=xs, n_burn=params['n_burn'], with_metrics=True, lb=lb, ub=ub)
metrics['name']=f.__name__
metrics['traceback']=''
metrics['passing']=1
stuff.append( (ll,metrics) )
if ll<worst_ll_seen:
worst_ll_seen = ll
print({'worst_ll_seen':ll})
name = metrics['name']
if name not in timing:
timing[name] = {}
timing[name] = rvar(timing[name], x=metrics['time'], rho=0.05)
if name not in reliability:
reliability[name] = {}
reliability[name] = rvar(reliability[name], x=1.0, rho=0.05)
except Exception as e:
metrics = {'name':f.__name__,'passing':0,'traceback':traceback.format_exc(),'ll':-100000000}
if f.__name__ not in reliability:
reliability[f.__name__] = {}
reliability[f.__name__] = rvar(reliability[f.__name__], x=0.0, rho=0.05)
failures[f.__name__] = traceback.format_exc()
ll = worst_ll_seen
stuff.append( (ll,metrics))
valid = [ s for s in stuff if s[1]['passing']>0.5 ]
if len(valid)<=2:
print('urhg')
for i, mi in enumerate(valid):
for j, mj in enumerate(valid):
if j != i:
if mi[0] > mj[0]+atol:
i_name = mi[1]['name']
j_name = mj[1]['name']
cmp_name = i_name+'>'+j_name
battles.update({cmp_name:1.0})
reliabilties = dict([(nm, reliab['mean']) for nm,reliab in reliability.items() ] )
cpu_times = dict([(nm, tm['mean']) for nm, tm in timing.items()])
if np.random.rand()<0.01:
with open(queue,'wt') as fh:
json.dump(battles,fh)
print('---')
pprint(reliabilties)
print('---')
pprint(cpu_times)
print('---')
pprint(battles)
print(' ')
pprint(failures)
| [
"traceback.format_exc",
"numpy.random.rand",
"pathlib.Path",
"json.dump",
"os.path.join",
"time.sleep",
"uuid.uuid4",
"collections.Counter",
"numpy.array",
"precise.skaters.covarianceutil.likelihood.cov_skater_loglikelihood",
"precise.skatertools.data.equity.random_m6_returns",
"pprint.pprint"... | [((1910, 1924), 'pprint.pprint', 'pprint', (['params'], {}), '(params)\n', (1916, 1924), False, 'from pprint import pprint\n'), ((1929, 1942), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (1939, 1942), False, 'import time\n'), ((1992, 2014), 'pprint.pprint', 'pprint', (['ALL_D0_SKATERS'], {}), '(ALL_D0_SKATERS)\n', (1998, 2014), False, 'from pprint import pprint\n'), ((2062, 2101), 'os.path.join', 'os.path.join', (['SKATER_WIN_DATA', 'category'], {}), '(SKATER_WIN_DATA, category)\n', (2074, 2101), False, 'import os\n'), ((2114, 2141), 'os.path.join', 'os.path.join', (['queue_dir', 'qn'], {}), '(queue_dir, qn)\n', (2126, 2141), False, 'import os\n'), ((2236, 2245), 'collections.Counter', 'Counter', ([], {}), '()\n', (2243, 2245), False, 'from collections import Counter\n'), ((1290, 1341), 'precise.skatertools.data.equity.random_m6_returns', 'random_m6_returns', ([], {'verbose': '(False)'}), '(verbose=False, **combined_params)\n', (1307, 1341), False, 'from precise.skatertools.data.equity import random_m6_returns\n'), ((2550, 2562), 'numpy.array', 'np.array', (['xs'], {}), '(xs)\n', (2558, 2562), True, 'import numpy as np\n'), ((2571, 2604), 'numpy.random.shuffle', 'np.random.shuffle', (['ALL_D0_SKATERS'], {}), '(ALL_D0_SKATERS)\n', (2588, 2604), True, 'import numpy as np\n'), ((1713, 1727), 'pprint.pprint', 'pprint', (['params'], {}), '(params)\n', (1719, 1727), False, 'from pprint import pprint\n'), ((2029, 2036), 'uuid.uuid4', 'uuid4', ([], {}), '()\n', (2034, 2036), False, 'from uuid import uuid4\n'), ((2145, 2168), 'pathlib.Path', 'pathlib.Path', (['queue_dir'], {}), '(queue_dir)\n', (2157, 2168), False, 'import pathlib\n'), ((4598, 4614), 'numpy.random.rand', 'np.random.rand', ([], {}), '()\n', (4612, 4614), True, 'import numpy as np\n'), ((2740, 2839), 'precise.skaters.covarianceutil.likelihood.cov_skater_loglikelihood', 'cov_skater_loglikelihood', ([], {'f': 'f', 'xs': 'xs', 'n_burn': "params['n_burn']", 'with_metrics': '(True)', 'lb': 'lb', 'ub': 'ub'}), "(f=f, xs=xs, n_burn=params['n_burn'], with_metrics=\n True, lb=lb, ub=ub)\n", (2764, 2839), False, 'from precise.skaters.covarianceutil.likelihood import cov_skater_loglikelihood\n'), ((3271, 3318), 'momentum.functions.rvar', 'rvar', (['timing[name]'], {'x': "metrics['time']", 'rho': '(0.05)'}), "(timing[name], x=metrics['time'], rho=0.05)\n", (3275, 3318), False, 'from momentum.functions import rvar\n'), ((3442, 3482), 'momentum.functions.rvar', 'rvar', (['reliability[name]'], {'x': '(1.0)', 'rho': '(0.05)'}), '(reliability[name], x=1.0, rho=0.05)\n', (3446, 3482), False, 'from momentum.functions import rvar\n'), ((4678, 4700), 'json.dump', 'json.dump', (['battles', 'fh'], {}), '(battles, fh)\n', (4687, 4700), False, 'import json\n'), ((4745, 4765), 'pprint.pprint', 'pprint', (['reliabilties'], {}), '(reliabilties)\n', (4751, 4765), False, 'from pprint import pprint\n'), ((4811, 4828), 'pprint.pprint', 'pprint', (['cpu_times'], {}), '(cpu_times)\n', (4817, 4828), False, 'from pprint import pprint\n'), ((4874, 4889), 'pprint.pprint', 'pprint', (['battles'], {}), '(battles)\n', (4880, 4889), False, 'from pprint import pprint\n'), ((4933, 4949), 'pprint.pprint', 'pprint', (['failures'], {}), '(failures)\n', (4939, 4949), False, 'from pprint import pprint\n'), ((3768, 3814), 'momentum.functions.rvar', 'rvar', (['reliability[f.__name__]'], {'x': '(0.0)', 'rho': '(0.05)'}), '(reliability[f.__name__], x=0.0, rho=0.05)\n', (3772, 3814), False, 'from momentum.functions import rvar\n'), ((3854, 3876), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (3874, 3876), False, 'import traceback\n'), ((3587, 3609), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (3607, 3609), False, 'import traceback\n')] |
"""
Advent of Code : Day 09
"""
from os import path
def parse_input(filename):
""" Parse input file values """
script_dir = path.dirname(__file__)
file_path = path.join(script_dir, filename)
with open(file_path, "r") as file:
val = list(map(int, file.read().splitlines()))
return val
def find_pair(target, values):
""" Return sum pair for target value """
values = set(values)
for value in values:
if target - value in values:
return True
return False
# PART 1
def part1(values, window=25):
""" Find invalid XMAS no. """
index = 0
for value in values[window:]:
if find_pair(value, values[index : (window + index)]):
index += 1
else:
return value
return -1
# PART 2
def part2(values, window=25):
""" Solve part two """
target = part1(values, window)
sum_, index, offset = 0, 0, 0
while sum_ != target:
if sum_ < target:
sum_ += values[offset + index]
index += 1
if sum_ > target:
offset += 1
sum_ = 0
index = 0
min_of_range = min(values[offset : offset + index])
max_of_range = max(values[offset : offset + index])
return min_of_range + max_of_range
| [
"os.path.dirname",
"os.path.join"
] | [((135, 157), 'os.path.dirname', 'path.dirname', (['__file__'], {}), '(__file__)\n', (147, 157), False, 'from os import path\n'), ((174, 205), 'os.path.join', 'path.join', (['script_dir', 'filename'], {}), '(script_dir, filename)\n', (183, 205), False, 'from os import path\n')] |
#!/usr/bin/env python3
'''
==============================================================
Copyright © 2019 Intel Corporation
SPDX-License-Identifier: MIT
==============================================================
'''
import intel.tca as tca
target = tca.get_target(id="whl_u_cnp_lp")
components = [(c.component, tca.latest(c.steppings))
for c in target.components]
component_config = tca.ComponentWithSelectedSteppingList()
for comp in components:
config_tmp = tca.ComponentWithSelectedStepping()
config_tmp.component, config_tmp.stepping = comp
supported_connections = target.get_supported_connection_configurations(
component_config)
def conn_filter(conn: tca.ConnectionConfiguration) -> bool:
if conn.type != tca.ConnectionType_IPC:
return False
if "CCA" not in conn.ipc_configuration.selection:
return False
return True
connection_config = next(filter(conn_filter, supported_connections))
profile = tca.Profile()
profile.name = "My TCA profile"
profile.target = target
profile.component_configuration = component_config
profile.connection_configuration = connection_config
tca.load(profile)
tca.connect()
| [
"intel.tca.ComponentWithSelectedSteppingList",
"intel.tca.get_target",
"intel.tca.latest",
"intel.tca.ComponentWithSelectedStepping",
"intel.tca.load",
"intel.tca.connect",
"intel.tca.Profile"
] | [((259, 292), 'intel.tca.get_target', 'tca.get_target', ([], {'id': '"""whl_u_cnp_lp"""'}), "(id='whl_u_cnp_lp')\n", (273, 292), True, 'import intel.tca as tca\n'), ((407, 446), 'intel.tca.ComponentWithSelectedSteppingList', 'tca.ComponentWithSelectedSteppingList', ([], {}), '()\n', (444, 446), True, 'import intel.tca as tca\n'), ((970, 983), 'intel.tca.Profile', 'tca.Profile', ([], {}), '()\n', (981, 983), True, 'import intel.tca as tca\n'), ((1144, 1161), 'intel.tca.load', 'tca.load', (['profile'], {}), '(profile)\n', (1152, 1161), True, 'import intel.tca as tca\n'), ((1162, 1175), 'intel.tca.connect', 'tca.connect', ([], {}), '()\n', (1173, 1175), True, 'import intel.tca as tca\n'), ((488, 523), 'intel.tca.ComponentWithSelectedStepping', 'tca.ComponentWithSelectedStepping', ([], {}), '()\n', (521, 523), True, 'import intel.tca as tca\n'), ((321, 344), 'intel.tca.latest', 'tca.latest', (['c.steppings'], {}), '(c.steppings)\n', (331, 344), True, 'import intel.tca as tca\n')] |
from cvpro import stackImages
import cv2
cap = cv2.VideoCapture(0)
while True:
success, img = cap.read()
imgGray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
imgList = [img, img, imgGray, img, imgGray]
imgStacked = stackImages(imgList, 2, 0.5)
cv2.imshow("stackedImg", imgStacked)
cv2.waitKey(1)
| [
"cvpro.stackImages",
"cv2.imshow",
"cv2.VideoCapture",
"cv2.cvtColor",
"cv2.waitKey"
] | [((51, 70), 'cv2.VideoCapture', 'cv2.VideoCapture', (['(0)'], {}), '(0)\n', (67, 70), False, 'import cv2\n'), ((130, 167), 'cv2.cvtColor', 'cv2.cvtColor', (['img', 'cv2.COLOR_BGR2GRAY'], {}), '(img, cv2.COLOR_BGR2GRAY)\n', (142, 167), False, 'import cv2\n'), ((237, 265), 'cvpro.stackImages', 'stackImages', (['imgList', '(2)', '(0.5)'], {}), '(imgList, 2, 0.5)\n', (248, 265), False, 'from cvpro import stackImages\n'), ((273, 309), 'cv2.imshow', 'cv2.imshow', (['"""stackedImg"""', 'imgStacked'], {}), "('stackedImg', imgStacked)\n", (283, 309), False, 'import cv2\n'), ((315, 329), 'cv2.waitKey', 'cv2.waitKey', (['(1)'], {}), '(1)\n', (326, 329), False, 'import cv2\n')] |
#!/usr/bin/python2
from pprint import PrettyPrinter
from argparse import ArgumentParser
def parse_image_header(input_file):
comment = None
for num, line in enumerate(input_file):
line = line.strip()
if num == 0:
# First line, has to be a comment with the version, but we don't care
continue
elif num == 1:
# Orientation = Landscape | Portrait
orientation = line
elif num == 2:
# Justification = Center | Flush Left
justification = line
elif num == 3:
# Units = Metric | Inches
units = line
elif num == 4:
# PaperSize = Letter | Legal | Ledger | Tabloid |
# A | B | C | D | E | A4 | A3 | A2 | A1 | A0 | B5
papersize = line
elif num == 5:
# Magnification = FLOAT
magnification = float(line)
elif num == 6:
# MultiPage = Single | Multiple
multipage = (line == 'Multiple')
elif num == 7:
# TransparentColor = -3=bg | -2=none | -1=default |
# 0-31=stdcolors | 32-=usercolors
transpcolor = int(line)
elif line.startswith('#'):
# optional comment, if not, then resolution
comment += line
comment += '\n'
else:
# Resolution = units/inch & CoordSystem = 1=LowerLeft | 2=UpperLeft
tmp_line = line.split(' ')
resolution = int(tmp_line[0])
coordsystem = int(tmp_line[1])
break # VERY IMPORTANT
return {'orientation': orientation,
'justification': justification,
'units': units,
'papersize': papersize,
'magnification': magnification,
'multipage': multipage,
'transpcolor': transpcolor,
'comment': comment,
'resolution': resolution,
'coordsystem': coordsystem}
def parse_objects(line, input_file):
objects = []
tmp_line = line.split(' ')
if int(tmp_line[0]) == 2: # polyline, polygon, box
# SubType: 1=polyline, 2=box, 3=polygon, 4=arc-box, 5=pic
subtype = int(tmp_line[1])
# LineStyle: -1=Default, 0=Solid, 1=Dashed, 2=Dotted, 3=Dash-dotted,
# 4=Dash-double-dotted, 5=Dash-triple-dotted
linestyle = int(tmp_line[2])
thickness = int(tmp_line[3])
pencolor = int(tmp_line[4])
fillcolor = int(tmp_line[5])
depth = int(tmp_line[6]) # 0...999
penstyle = int(tmp_line[7]) # NOT USED
areafill = int(tmp_line[8]) # -1=not filled
styleval = float(tmp_line[9])
# 0=Miter, 1=Round, 2=Bevel
joinstyle = int(tmp_line[10])
# ONLY FOR POLYLINE
# 0=Butt, 1=Round, 2=Projecting
capstyle = int(tmp_line[11])
radius = int(tmp_line[12])
# 0=off, 1=on
forwardarrow = int(tmp_line[13])
backwardarrow = int(tmp_line[14])
npoints = int(tmp_line[15])
for line in input_file:
pass
return objects
if __name__ == '__main__':
argparser = ArgumentParser(description='Convert a FIG file')
argparser.add_argument('input', help='Input FIG file to convert')
argparser.add_argument('output', help='Output file')
argparser.add_argument('format', help='Output format')
args = argparser.parse_args()
header = None
pseudocolors = {}
objects = []
with open(args.input, 'r') as input_file:
header = parse_image_header(input_file)
for line in input_file:
line = line.strip()
if line.startswith('0'):
tmp_line = line.split(' ')
pseudocolors[int(tmp_line[1])] = tmp_line[2]
else:
objects = parse_objects(line, input_file)
pp = PrettyPrinter(indent=2)
pp.pprint(header)
pp.pprint(pseudocolors)
pp.pprint(objects)
| [
"pprint.PrettyPrinter",
"argparse.ArgumentParser"
] | [((3232, 3280), 'argparse.ArgumentParser', 'ArgumentParser', ([], {'description': '"""Convert a FIG file"""'}), "(description='Convert a FIG file')\n", (3246, 3280), False, 'from argparse import ArgumentParser\n'), ((3945, 3968), 'pprint.PrettyPrinter', 'PrettyPrinter', ([], {'indent': '(2)'}), '(indent=2)\n', (3958, 3968), False, 'from pprint import PrettyPrinter\n')] |
#!/usr/bin/env python
# coding: utf-8
# In[1]:
import dask
from dask_kubernetes import KubeCluster
import numpy as np
# In[ ]:
#tag::remote_lb_deploy[]
# In[2]:
# Specify a remote deployment using a load blanacer, necessary for communication with notebook from cluster
dask.config.set({"kubernetes.scheduler-service-type": "LoadBalancer"})
# In[4]:
cluster = KubeCluster.from_yaml('worker-spec.yaml', namespace='dask', deploy_mode='remote')
# In[ ]:
#end::remote_lb_deploy[]
# In[5]:
cluster.adapt(minimum=1, maximum=100)
# In[6]:
# Example usage
from dask.distributed import Client
import dask.array as da
# Connect Dask to the cluster
client = Client(cluster)
# In[7]:
client.scheduler_comm.comm.handshake_info()
# In[8]:
# Create a large array and calculate the mean
array = da.ones((1000, 1000, 1000))
print(array.mean().compute()) # Should print 1.0|
# In[9]:
print(array.mean().compute())
# In[10]:
print(array.sum().compute())
# In[13]:
dir(array)
# In[18]:
np.take(array, indices=[0, 10]).sum().compute()
# In[15]:
# In[ ]:
| [
"dask.config.set",
"dask_kubernetes.KubeCluster.from_yaml",
"dask.distributed.Client",
"numpy.take",
"dask.array.ones"
] | [((280, 350), 'dask.config.set', 'dask.config.set', (["{'kubernetes.scheduler-service-type': 'LoadBalancer'}"], {}), "({'kubernetes.scheduler-service-type': 'LoadBalancer'})\n", (295, 350), False, 'import dask\n'), ((374, 460), 'dask_kubernetes.KubeCluster.from_yaml', 'KubeCluster.from_yaml', (['"""worker-spec.yaml"""'], {'namespace': '"""dask"""', 'deploy_mode': '"""remote"""'}), "('worker-spec.yaml', namespace='dask', deploy_mode=\n 'remote')\n", (395, 460), False, 'from dask_kubernetes import KubeCluster\n'), ((674, 689), 'dask.distributed.Client', 'Client', (['cluster'], {}), '(cluster)\n', (680, 689), False, 'from dask.distributed import Client\n'), ((814, 841), 'dask.array.ones', 'da.ones', (['(1000, 1000, 1000)'], {}), '((1000, 1000, 1000))\n', (821, 841), True, 'import dask.array as da\n'), ((1018, 1049), 'numpy.take', 'np.take', (['array'], {'indices': '[0, 10]'}), '(array, indices=[0, 10])\n', (1025, 1049), True, 'import numpy as np\n')] |
from sys import stdin
# Main program
def main():
expenses = [0]*1000
for line in stdin:
n = int(line)
if (n == 0):
break;
total, toExchangePos, toExchangeNeg = (0,)*3
for i in range(n):
line = stdin.readline()
expenses[i] = float(line)
total += expenses[i]
# Get average
average = total / n
for i in range(n):
dif = expenses[i] - average
# Set two digits accuracy
dif = float((int(dif * 100.0)) / 100.0)
if (dif > 0):
toExchangePos += dif
else:
toExchangeNeg += dif
if (-toExchangeNeg > toExchangePos):
minToExchange = -toExchangeNeg
else:
minToExchange = toExchangePos
print("$%.2f" % minToExchange)
main()
| [
"sys.stdin.readline"
] | [((262, 278), 'sys.stdin.readline', 'stdin.readline', ([], {}), '()\n', (276, 278), False, 'from sys import stdin\n')] |
#!/usr/bin/env python3
# TODO: Write a command line tool to browser and search in the database
# TODO: Define a command set to search for strings, tags, similar talks, mark talks as seen, mark talks as irrelevant, mark talks as relevant, open a browser and watch, show details, quit
# https://opensource.com/article/17/5/4-practical-python-libraries
# TODO: Maybe use fuzzyfinder
# TODO: use prompt_toolkit autocompletion, auto suggestion and history
# TODO: Use pygments for syntax highlighting https://pygments.org/
from prompt_toolkit import prompt
from prompt_toolkit.history import FileHistory
from prompt_toolkit.auto_suggest import AutoSuggestFromHistory
from prompt_toolkit.completion import NestedCompleter
from dropdata import MediaTagger
import argparse
def printHelp():
print("""
tags: list tags
TODO tags + tag: list all talks containing a specific tag
TODO similar: Find similar content
TODO seen: Mark talks as seen
TODO irrelevant: Mark talks as irrelevant
TODO relevant: Mark talks as relevant
TODO show: Show content in browser
TODO details: Show details
quit: quit
help: get help
""")
def getCompleter():
""" Generates a nested completer
:return:
"""
mt = MediaTagger(frab=False, subtitles=False, default=False, offline=True)
return NestedCompleter.from_nested_dict({'help': None, # Show help
'quit':None, # Quit
'tags': {key: None for (key) in mt.list_tags()+[""]}, # Search for tags
'similar':None, # Find similar content using k-nearest
})
if __name__=="__main__":
### Parsing args
parser = argparse.ArgumentParser()
parser.add_argument("--data", help="Database file name", default = "frab.json", type = str)
args = parser.parse_args()
### Load data
### Logic
BrowserCompleter = getCompleter()
mt = MediaTagger(frab=False, subtitles=False, default=False, offline=True)
mt.read_file(args.data)
while 1:
user_input = prompt('> ',
history=FileHistory("history.txt"),
auto_suggest=AutoSuggestFromHistory(),
completer=BrowserCompleter,
)
user_input = user_input.lower()
if user_input == "quit":
break
elif user_input == "help":
printHelp()
elif user_input == "tags":
# pure tags, list them
print(",".join(mt.list_tags()))
else:
print(user_input)
| [
"prompt_toolkit.auto_suggest.AutoSuggestFromHistory",
"prompt_toolkit.history.FileHistory",
"dropdata.MediaTagger",
"argparse.ArgumentParser"
] | [((1252, 1321), 'dropdata.MediaTagger', 'MediaTagger', ([], {'frab': '(False)', 'subtitles': '(False)', 'default': '(False)', 'offline': '(True)'}), '(frab=False, subtitles=False, default=False, offline=True)\n', (1263, 1321), False, 'from dropdata import MediaTagger\n'), ((1809, 1834), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (1832, 1834), False, 'import argparse\n'), ((2045, 2114), 'dropdata.MediaTagger', 'MediaTagger', ([], {'frab': '(False)', 'subtitles': '(False)', 'default': '(False)', 'offline': '(True)'}), '(frab=False, subtitles=False, default=False, offline=True)\n', (2056, 2114), False, 'from dropdata import MediaTagger\n'), ((2226, 2252), 'prompt_toolkit.history.FileHistory', 'FileHistory', (['"""history.txt"""'], {}), "('history.txt')\n", (2237, 2252), False, 'from prompt_toolkit.history import FileHistory\n'), ((2295, 2319), 'prompt_toolkit.auto_suggest.AutoSuggestFromHistory', 'AutoSuggestFromHistory', ([], {}), '()\n', (2317, 2319), False, 'from prompt_toolkit.auto_suggest import AutoSuggestFromHistory\n')] |
"""
The pycity_scheduling framework
Copyright (C) 2022,
Institute for Automation of Complex Power Systems (ACS),
E.ON Energy Research Center (E.ON ERC),
RWTH Aachen University
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation the
rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit
persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the
Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
import numpy as np
import pyomo.environ as pyomo
import pycity_base.classes.supply.electrical_heater as eh
from pycity_scheduling.util.generic_constraints import LowerActivationLimit
from pycity_scheduling.classes.thermal_entity_heating import ThermalEntityHeating
from pycity_scheduling.classes.electrical_entity import ElectricalEntity
class ElectricalHeater(ThermalEntityHeating, ElectricalEntity, eh.ElectricalHeater):
"""
Extension of pyCity_base class ElectricalHeater for scheduling purposes.
Parameters
----------
environment : pycity_scheduling.classes.Environment
Common to all other objects. Includes time and weather instances.
p_th_nom : float
Nominal thermal power output in [kW].
eta : float, optional
Efficiency of the electrical heater. Defaults to one.
lower_activation_limit : float, optional (only adhered to in integer mode)
Must be in [0, 1]. Lower activation limit of the electrical heater
as a percentage of the rated power. When the electrical heater is
in operation, its power must be zero or between the lower activation
limit and its rated power.
- `lower_activation_limit = 0`: Linear behavior
- `lower_activation_limit = 1`: Two-point controlled
Notes
-----
- EHs offer sets of constraints for operation. In the `convex` mode the
following constraints and bounds are generated by the EH:
.. math::
0 \\geq p_{th\\_heat} &\\geq& -p_{th\\_nom} \\\\
\\eta * p_{el} &=& - p_{th\\_heat}
- See also:
- pycity_scheduling.util.generic_constraints.LowerActivationLimit: Generates additional constraints for the
`lower_activation_limit` in `integer` mode.
"""
def __init__(self, environment, p_th_nom, eta=1, lower_activation_limit=0):
# Flow temperature of 55 C
super().__init__(environment, p_th_nom*1000, eta, 85, lower_activation_limit)
self._long_id = "EH_" + self._id_string
self.p_th_nom = p_th_nom
self.activation_constr = LowerActivationLimit(self, "p_th_heat", lower_activation_limit, -p_th_nom)
def populate_model(self, model, mode="convex"):
"""
Add device block to pyomo ConcreteModel.
Call parent's `populate_model` method and set thermal variables upper
bounds to `self.p_th_nom`. Also add constraint to bind electrical
demand to thermal output.
Parameters
----------
model : pyomo.ConcreteModel
mode : str, optional
Specifies which set of constraints to use.
- `convex` : Use linear constraints
- `integer` : Use integer variables representing discrete control decisions
"""
super().populate_model(model, mode)
m = self.model
if mode == "convex" or "integer":
m.p_th_heat_vars.setlb(-self.p_th_nom)
m.p_th_heat_vars.setub(0.0)
def p_coupl_rule(model, t):
return - model.p_th_heat_vars[t] == self.eta * model.p_el_vars[t]
m.p_coupl_constr = pyomo.Constraint(m.t, rule=p_coupl_rule)
self.activation_constr.apply(m, mode)
else:
raise ValueError(
"Mode %s is not implemented by class ElectricalHeater." % str(mode)
)
return
| [
"pycity_scheduling.util.generic_constraints.LowerActivationLimit",
"pyomo.environ.Constraint"
] | [((3280, 3354), 'pycity_scheduling.util.generic_constraints.LowerActivationLimit', 'LowerActivationLimit', (['self', '"""p_th_heat"""', 'lower_activation_limit', '(-p_th_nom)'], {}), "(self, 'p_th_heat', lower_activation_limit, -p_th_nom)\n", (3300, 3354), False, 'from pycity_scheduling.util.generic_constraints import LowerActivationLimit\n'), ((4321, 4361), 'pyomo.environ.Constraint', 'pyomo.Constraint', (['m.t'], {'rule': 'p_coupl_rule'}), '(m.t, rule=p_coupl_rule)\n', (4337, 4361), True, 'import pyomo.environ as pyomo\n')] |
"""
API functionality for bootcamps
"""
import logging
from datetime import datetime, timedelta
import pytz
from django.core.exceptions import ValidationError, ObjectDoesNotExist
from django.db.models import Sum
from applications.constants import AppStates
from ecommerce.models import Line, Order
from klasses.constants import DATE_RANGE_MONTH_FMT, ENROLL_CHANGE_STATUS_DEFERRED
from klasses.models import BootcampRun, BootcampRunEnrollment
from main import features
from novoed import tasks as novoed_tasks
log = logging.getLogger(__name__)
def deactivate_run_enrollment(
*, run_enrollment=None, user=None, bootcamp_run=None, change_status=None
):
"""
Helper method to deactivate a BootcampRunEnrollment. Can accept a BootcampRunEnrollment as an argument, or a
User and BootcampRun that can be used to find the enrollment.
Args:
run_enrollment (Optional[BootcampRunEnrollment]): The bootcamp run enrollment to deactivate
user (Optional[User]): The enrolled user (only required if run_enrollment is not provided)
bootcamp_run (Optional[BootcampRun]): The enrolled bootcamp run (only required if run_enrollment
is not provided)
change_status (Optional[str]): The change status to set on the enrollment when deactivating
Returns:
Optional[BootcampRunEnrollment]: The updated enrollment (or None if the enrollment doesn't exist)
"""
if run_enrollment is None and (user is None or bootcamp_run is None):
raise ValueError("Must provide run_enrollment, or both user and bootcamp_run")
if run_enrollment is None:
run_enrollment = BootcampRunEnrollment.objects.filter(
user=user, bootcamp_run=bootcamp_run
).first()
if run_enrollment is None:
return
run_enrollment.active = False
run_enrollment.change_status = change_status
run_enrollment.save()
if (
features.is_enabled(features.NOVOED_INTEGRATION)
and run_enrollment.bootcamp_run.novoed_course_stub
):
novoed_tasks.unenroll_user_from_novoed_course.delay(
user_id=run_enrollment.user.id,
novoed_course_stub=run_enrollment.bootcamp_run.novoed_course_stub,
)
return run_enrollment
def adjust_app_state_for_new_price(user, bootcamp_run, new_price=None):
"""
Given a new price for a bootcamp run, updated user's bootcamp application if (a) it exists, and (b) the new price
is such that the bootcamp application state is no longer valid (e.g.: the new price is greater than the
amount that the user has paid, but the application is in the "complete" state)
Args:
user (User): The user whose application may be affected
bootcamp_run (BootcampRun): The bootcamp run of the application that may be affected
new_price (Optional[Any[int, Decimal]]): The new total price of the bootcamp run (if None, the bootcamp run's
normal price will be used)
Returns:
Optional[BootcampApplication]: The bootcamp application for the user/run referred to by the personal price
if it was modified (otherwise, None will be returned)
"""
total_paid_qset = Line.objects.filter(
order__user=user, bootcamp_run=bootcamp_run
).aggregate(aggregate_total_paid=Sum("price"))
total_paid = total_paid_qset["aggregate_total_paid"] or 0
new_price = new_price if new_price is not None else bootcamp_run.price
needs_payment = total_paid < new_price
application = user.bootcamp_applications.filter(
bootcamp_run=bootcamp_run,
# The state needs to change if (a) it's currently complete and now needs more payment, or (b) it's currently
# awaiting payment and the new price means they don't need to pay any more.
state=(
AppStates.COMPLETE.value
if needs_payment
else AppStates.AWAITING_PAYMENT.value
),
).first()
if application is None:
return
if needs_payment:
application.await_further_payment()
else:
application.complete()
application.save()
log.info(
"Personal price update caused application state change (user: %s, run: '%s', new state: %s)",
user.email,
bootcamp_run.title,
application.state,
)
return application
def _parse_formatted_date_range(date_range_str):
"""
Parses a string representing a date range (e.g.: "May 1, 2020 - Jan 30, 2021")
Args:
date_range_str (str): A string representing a date range
Returns:
Tuple[datetime.datetime, Optional[datetime.datetime]]: A tuple containing the two dates that were parsed from
the string
"""
if "-" not in date_range_str:
date1_string = date_range_str
date2_string = None
else:
date1_string, date2_string = date_range_str.split("-")
date1_parts = date1_string.split(",")
date1_monthday = date1_parts[0].strip().split(" ")
month1, day1 = date1_monthday[0], int(date1_monthday[1])
if not date2_string:
year1 = int(date1_parts[1].strip())
month2, day2, year2 = None, None, None
else:
date2_parts = date2_string.split(",")
date2_monthday = date2_parts[0].strip().split(" ")
year2 = int(date2_parts[1].strip())
year1 = year2 if len(date1_parts) < 2 else int(date1_parts[1].strip())
if len(date2_monthday) < 2:
month2 = month1
day2 = int(date2_monthday[0])
else:
month2 = date2_monthday[0]
day2 = int(date2_monthday[1])
date1 = datetime(
year=year1,
month=datetime.strptime(month1, DATE_RANGE_MONTH_FMT).month,
day=day1,
tzinfo=pytz.UTC,
)
date2 = (
None
if not date2_string
else datetime(
year=year2,
month=datetime.strptime(month2, DATE_RANGE_MONTH_FMT).month,
day=day2,
tzinfo=pytz.UTC,
)
)
return date1, date2
def fetch_bootcamp_run(run_property):
"""
Fetches a bootcamp run that has a field value (id, title, etc.) that matches the given property
Args:
run_property (str): A string representing some field value for a specific bootcamp run
Returns:
BootcampRun: The bootcamp run matching the given property
"""
if run_property:
if run_property.isdigit():
return BootcampRun.objects.get(id=run_property)
run = BootcampRun.objects.filter(title=run_property).first()
if run is not None:
return run
# If run_property is a string and didn't match a title, it might be a 'display_title' property value.
# Attempt to parse that and match it to a run.
if run is None and "," not in run_property:
return BootcampRun.objects.get(bootcamp__title=run_property)
potential_bootcamp_title, potential_date_range = run_property.split(
",", maxsplit=1
)
potential_start_date, potential_end_date = _parse_formatted_date_range(
potential_date_range
)
run_filters = dict(bootcamp__title=potential_bootcamp_title)
if potential_start_date:
run_filters.update(
dict(
start_date__gte=potential_start_date,
start_date__lt=potential_start_date + timedelta(days=1),
)
)
else:
run_filters["start_date"] = None
if potential_end_date:
run_filters.update(
dict(
end_date__gte=potential_end_date,
end_date__lt=potential_end_date + timedelta(days=1),
)
)
else:
run_filters["end_date"] = None
try:
return BootcampRun.objects.get(**run_filters)
except BootcampRun.DoesNotExist as exc:
raise BootcampRun.DoesNotExist(
"Could not find BootcampRun with the following filters: {}".format(
run_filters
)
) from exc
def create_run_enrollment(user, run, order=None):
"""
Creates local records of a user's enrollment in bootcamp runs, and attempts to enroll them
in novoed via API
Args:
user (User): The user to enroll
run (BootcampRun): The bootcamp run to enroll in
order (ecommerce.models.Order or None): The order associated with these enrollments
Returns:
(BootcampRunEnrollment): enrollment object that were successfully created
"""
enrollment, _ = BootcampRunEnrollment.objects.update_or_create(
user=user,
bootcamp_run=run,
defaults={"active": True, "change_status": None},
)
try:
user.profile.can_skip_application_steps = True
user.profile.save()
except ObjectDoesNotExist:
pass
if order: # enrollment created and order is available
application = order.application
application.bootcamp_run = run
application.save()
if (
features.is_enabled(features.NOVOED_INTEGRATION)
and enrollment.bootcamp_run.novoed_course_stub
):
novoed_tasks.enroll_users_in_novoed_course.delay(
user_ids=[enrollment.user.id],
novoed_course_stub=enrollment.bootcamp_run.novoed_course_stub,
)
return enrollment
def create_run_enrollments(user, runs, order=None):
"""
Creates local records of a user's enrollment in bootcamp runs, and attempts to enroll them
in novoed via API
Args:
user (User): The user to enroll
runs (iterable of BootcampRun): The bootcamp runs to enroll in
order (ecommerce.models.Order or None): The order associated with these enrollments
Returns:
(list of BootcampRunEnrollment): A list of enrollment objects that were successfully
created
"""
successful_enrollments = []
for run in runs:
try:
successful_enrollments.append(create_run_enrollment(user, run, order))
except: # pylint: disable=bare-except
log.exception(
"Failed to create/update enrollment record (user: %s, run: %s, order: %s)",
user,
run.bootcamp_run_id,
order.id if order else None,
)
return successful_enrollments
def defer_enrollment(
user, from_bootcamp_run_id, to_bootcamp_run_id, order_id, force=False
):
"""
Deactivates a user's existing enrollment in one bootcamp run and enrolls the user in another.
Args:
user (User): The enrolled user
from_bootcamp_run_id (str): The bootcamp_run_id value of the currently enrolled BootcampRun
to_bootcamp_run_id (str): The bootcamp_run_id value of the desired BootcampRun
order_id (int): The order_id value for an user's order ID
force (bool): If True, the deferral will be completed even if the current enrollment is inactive
or the desired enrollment is in a different bootcamp
Returns:
(BootcampRunEnrollment, BootcampRunEnrollment): The deactivated enrollment paired with the
new enrollment that was the target of the deferral
"""
from_enrollment = BootcampRunEnrollment.objects.get(
user=user, bootcamp_run__bootcamp_run_id=from_bootcamp_run_id
)
if not force and not from_enrollment.active:
raise ValidationError(
"Cannot defer from inactive enrollment (id: {}, run: {}, user: {}). "
"Set force=True to defer anyway.".format(
from_enrollment.id,
from_enrollment.bootcamp_run.bootcamp_run_id,
user.email,
)
)
to_run = BootcampRun.objects.get(bootcamp_run_id=to_bootcamp_run_id)
if from_enrollment.bootcamp_run == to_run:
raise ValidationError(
"Cannot defer to the same bootcamp run (run: {})".format(
to_run.bootcamp_run_id
)
)
if not to_run.is_not_beyond_enrollment:
raise ValidationError(
"Cannot defer to a bootcamp run that is outside of its enrollment period (run: {}).".format(
to_run.bootcamp_run_id
)
)
if not force and from_enrollment.bootcamp_run.bootcamp != to_run.bootcamp:
raise ValidationError(
"Cannot defer to a bootcamp run of a different bootcamp ('{}' -> '{}'). "
"Set force=True to defer anyway.".format(
from_enrollment.bootcamp_run.bootcamp.title, to_run.bootcamp.title
)
)
try:
defaults = {
"id": order_id,
"user": user,
# "application__bootcamp_run": from_enrollment.bootcamp_run,
}
order = Order.objects.get(**defaults)
except ObjectDoesNotExist:
raise ValidationError(
"Order (order: {}) does not exist for user (User: {}) against bootcamp run = (run: {})".format(
order_id, user, from_bootcamp_run_id
)
)
to_enrollment = create_run_enrollment(user, to_run, order=order)
from_enrollment = deactivate_run_enrollment(
run_enrollment=from_enrollment, change_status=ENROLL_CHANGE_STATUS_DEFERRED
)
return from_enrollment, to_enrollment
| [
"logging.getLogger",
"django.db.models.Sum",
"ecommerce.models.Line.objects.filter",
"datetime.datetime.strptime",
"klasses.models.BootcampRunEnrollment.objects.filter",
"klasses.models.BootcampRunEnrollment.objects.update_or_create",
"datetime.timedelta",
"klasses.models.BootcampRun.objects.filter",
... | [((519, 546), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (536, 546), False, 'import logging\n'), ((8644, 8773), 'klasses.models.BootcampRunEnrollment.objects.update_or_create', 'BootcampRunEnrollment.objects.update_or_create', ([], {'user': 'user', 'bootcamp_run': 'run', 'defaults': "{'active': True, 'change_status': None}"}), "(user=user, bootcamp_run=run,\n defaults={'active': True, 'change_status': None})\n", (8690, 8773), False, 'from klasses.models import BootcampRun, BootcampRunEnrollment\n'), ((11323, 11424), 'klasses.models.BootcampRunEnrollment.objects.get', 'BootcampRunEnrollment.objects.get', ([], {'user': 'user', 'bootcamp_run__bootcamp_run_id': 'from_bootcamp_run_id'}), '(user=user, bootcamp_run__bootcamp_run_id=\n from_bootcamp_run_id)\n', (11356, 11424), False, 'from klasses.models import BootcampRun, BootcampRunEnrollment\n'), ((11813, 11872), 'klasses.models.BootcampRun.objects.get', 'BootcampRun.objects.get', ([], {'bootcamp_run_id': 'to_bootcamp_run_id'}), '(bootcamp_run_id=to_bootcamp_run_id)\n', (11836, 11872), False, 'from klasses.models import BootcampRun, BootcampRunEnrollment\n'), ((1921, 1969), 'main.features.is_enabled', 'features.is_enabled', (['features.NOVOED_INTEGRATION'], {}), '(features.NOVOED_INTEGRATION)\n', (1940, 1969), False, 'from main import features\n'), ((2044, 2199), 'novoed.tasks.unenroll_user_from_novoed_course.delay', 'novoed_tasks.unenroll_user_from_novoed_course.delay', ([], {'user_id': 'run_enrollment.user.id', 'novoed_course_stub': 'run_enrollment.bootcamp_run.novoed_course_stub'}), '(user_id=run_enrollment.\n user.id, novoed_course_stub=run_enrollment.bootcamp_run.novoed_course_stub)\n', (2095, 2199), True, 'from novoed import tasks as novoed_tasks\n'), ((9122, 9170), 'main.features.is_enabled', 'features.is_enabled', (['features.NOVOED_INTEGRATION'], {}), '(features.NOVOED_INTEGRATION)\n', (9141, 9170), False, 'from main import features\n'), ((9241, 9388), 'novoed.tasks.enroll_users_in_novoed_course.delay', 'novoed_tasks.enroll_users_in_novoed_course.delay', ([], {'user_ids': '[enrollment.user.id]', 'novoed_course_stub': 'enrollment.bootcamp_run.novoed_course_stub'}), '(user_ids=[enrollment.user.\n id], novoed_course_stub=enrollment.bootcamp_run.novoed_course_stub)\n', (9289, 9388), True, 'from novoed import tasks as novoed_tasks\n'), ((12867, 12896), 'ecommerce.models.Order.objects.get', 'Order.objects.get', ([], {}), '(**defaults)\n', (12884, 12896), False, 'from ecommerce.models import Line, Order\n'), ((3197, 3261), 'ecommerce.models.Line.objects.filter', 'Line.objects.filter', ([], {'order__user': 'user', 'bootcamp_run': 'bootcamp_run'}), '(order__user=user, bootcamp_run=bootcamp_run)\n', (3216, 3261), False, 'from ecommerce.models import Line, Order\n'), ((3307, 3319), 'django.db.models.Sum', 'Sum', (['"""price"""'], {}), "('price')\n", (3310, 3319), False, 'from django.db.models import Sum\n'), ((6450, 6490), 'klasses.models.BootcampRun.objects.get', 'BootcampRun.objects.get', ([], {'id': 'run_property'}), '(id=run_property)\n', (6473, 6490), False, 'from klasses.models import BootcampRun, BootcampRunEnrollment\n'), ((6847, 6900), 'klasses.models.BootcampRun.objects.get', 'BootcampRun.objects.get', ([], {'bootcamp__title': 'run_property'}), '(bootcamp__title=run_property)\n', (6870, 6900), False, 'from klasses.models import BootcampRun, BootcampRunEnrollment\n'), ((7854, 7892), 'klasses.models.BootcampRun.objects.get', 'BootcampRun.objects.get', ([], {}), '(**run_filters)\n', (7877, 7892), False, 'from klasses.models import BootcampRun, BootcampRunEnrollment\n'), ((1636, 1710), 'klasses.models.BootcampRunEnrollment.objects.filter', 'BootcampRunEnrollment.objects.filter', ([], {'user': 'user', 'bootcamp_run': 'bootcamp_run'}), '(user=user, bootcamp_run=bootcamp_run)\n', (1672, 1710), False, 'from klasses.models import BootcampRun, BootcampRunEnrollment\n'), ((5663, 5710), 'datetime.datetime.strptime', 'datetime.strptime', (['month1', 'DATE_RANGE_MONTH_FMT'], {}), '(month1, DATE_RANGE_MONTH_FMT)\n', (5680, 5710), False, 'from datetime import datetime, timedelta\n'), ((6505, 6551), 'klasses.models.BootcampRun.objects.filter', 'BootcampRun.objects.filter', ([], {'title': 'run_property'}), '(title=run_property)\n', (6531, 6551), False, 'from klasses.models import BootcampRun, BootcampRunEnrollment\n'), ((5887, 5934), 'datetime.datetime.strptime', 'datetime.strptime', (['month2', 'DATE_RANGE_MONTH_FMT'], {}), '(month2, DATE_RANGE_MONTH_FMT)\n', (5904, 5934), False, 'from datetime import datetime, timedelta\n'), ((7411, 7428), 'datetime.timedelta', 'timedelta', ([], {'days': '(1)'}), '(days=1)\n', (7420, 7428), False, 'from datetime import datetime, timedelta\n'), ((7714, 7731), 'datetime.timedelta', 'timedelta', ([], {'days': '(1)'}), '(days=1)\n', (7723, 7731), False, 'from datetime import datetime, timedelta\n')] |
# -*- coding: UTF-8 -*-
# vim: set expandtab sw=4 ts=4 sts=4:
#
# phpMyAdmin web site
#
# Copyright (C) 2008 - 2016 <NAME> <<EMAIL>>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
from dateutil import parser
from news.management.commands import FeedCommand
from news.models import Planet
URL = 'https://planet.phpmyadmin.net/rss20.xml'
class Command(FeedCommand):
help = 'Imports planet posts'
url = URL
def process_feed(self, feed):
for entry in feed.entries:
params = {
'title': entry.title,
'date': parser.parse(entry.published),
}
planet, created = Planet.objects.get_or_create(
url=entry.link,
defaults=params
)
if not created:
continue
modified = False
for key in params:
if getattr(planet, key) != params[key]:
setattr(planet, key, params[key])
modified = True
if modified:
planet.save()
| [
"dateutil.parser.parse",
"news.models.Planet.objects.get_or_create"
] | [((1307, 1368), 'news.models.Planet.objects.get_or_create', 'Planet.objects.get_or_create', ([], {'url': 'entry.link', 'defaults': 'params'}), '(url=entry.link, defaults=params)\n', (1335, 1368), False, 'from news.models import Planet\n'), ((1232, 1261), 'dateutil.parser.parse', 'parser.parse', (['entry.published'], {}), '(entry.published)\n', (1244, 1261), False, 'from dateutil import parser\n')] |
import collections
import csv
import os
import sys
from enum import Enum
from pathlib import Path
# adapt paths for jupyter
module_path = os.path.abspath(os.path.join('..'))
if module_path not in sys.path:
sys.path.append(module_path)
import face_alignment
from yawn_train.src.blazeface_detector import BlazeFaceDetector
import cv2
import dlib
import numpy as np
from imutils import face_utils
from yawn_train.src.ssd_face_detector import SSDFaceDetector
# define one constants, for mouth aspect ratio to indicate open mouth
from yawn_train.src import download_utils, detect_utils, inference_utils
from yawn_train.src.model_config import MOUTH_AR_THRESH, MAX_IMAGE_WIDTH, MAX_IMAGE_HEIGHT
class ImageResult:
def __init__(self, is_processed, is_opened_image):
self.is_processed = is_processed
self.is_opened_image = is_opened_image
@staticmethod
def not_processed():
return ImageResult(False, False)
class VideoResult:
def __init__(self, total_frames, dlib_counter, caffe_counter, blazeface_counter, opened_counter, closed_counter):
self.total_frames = total_frames
self.dlib_counter = dlib_counter
self.caffe_counter = caffe_counter
self.blazeface_counter = blazeface_counter
self.opened_counter = opened_counter
self.closed_counter = closed_counter
@staticmethod
def empty():
return VideoResult(0, 0, 0, 0, 0, 0)
class FACE_TYPE(Enum):
BLAZEFACE = 0
DLIB = 1
CAFFE = 2
@classmethod
def has_value(cls, value):
return value in cls._value2member_map_
def get_next(self):
val = self.value
if self.has_value(val + 1):
return FACE_TYPE(val + 1)
return FACE_TYPE(0)
class LNDMR_TYPE(Enum):
DLIB = 0
FACEALIGN = 1
COLOR_IMG = False
MOUTH_FOLDER = "./mouth_state_new10" + ("_color" if COLOR_IMG else "")
MOUTH_OPENED_FOLDER = os.path.join(MOUTH_FOLDER, 'opened')
MOUTH_CLOSED_FOLDER = os.path.join(MOUTH_FOLDER, 'closed')
TEMP_FOLDER = "./temp"
# https://ieee-dataport.org/open-access/yawdd-yawning-detection-dataset#files
YAWDD_DATASET_FOLDER = "./YawDD dataset"
CSV_STATS = 'video_stat.csv'
read_mouth_open_counter = 0
read_mouth_close_counter = 0
saved_mouth_open_counter = 0
saved_mouth_close_counter = 0
SAMPLE_STEP_IMG_OPENED = 1
SAMPLE_STEP_IMG_CLOSED = 4
(mStart, mEnd) = face_utils.FACIAL_LANDMARKS_IDXS["mouth"]
Path(MOUTH_FOLDER).mkdir(parents=True, exist_ok=True)
Path(MOUTH_OPENED_FOLDER).mkdir(parents=True, exist_ok=True)
Path(MOUTH_CLOSED_FOLDER).mkdir(parents=True, exist_ok=True)
dlib_landmarks_file = download_utils.download_and_unpack_dlib_68_landmarks(TEMP_FOLDER)
# dlib predictor for 68pts, mouth
predictor = dlib.shape_predictor(dlib_landmarks_file)
# initialize dlib's face detector (HOG-based)
detector = dlib.get_frontal_face_detector()
caffe_weights, caffe_config = download_utils.download_caffe(TEMP_FOLDER)
# Reads the network model stored in Caffe framework's format.
face_model = cv2.dnn.readNetFromCaffe(caffe_config, caffe_weights)
ssd_face_detector = SSDFaceDetector(face_model)
import tensorflow as tf
bf_model = download_utils.download_blazeface(TEMP_FOLDER)
blazeface_tf = tf.keras.models.load_model(bf_model, compile=False)
blazefaceDetector = BlazeFaceDetector(blazeface_tf)
# img = cv2.imread(
# '/Users/igla/Desktop/Screenshot 2021-01-14 at 12.29.25.png', cv2.IMREAD_GRAYSCALE)
# ultrafacedetector = UltraFaceDetector("/Users/igla/Downloads/version-RFB-320_simplified.onnx")
"""
Take mouth ratio only from dlib rect. Use dnn frame for output
"""
def should_process_video(video_name: str) -> bool:
is_video_sunglasses = video_name.rfind('SunGlasses') != -1
if is_video_sunglasses:
# inaccurate landmarks in sunglasses
print('Video contains sunglasses. Skip', video_name)
return False
return video_name.endswith('-Normal.avi') or \
video_name.endswith('-Talking.avi') or \
video_name.endswith('-Yawning.avi')
pred_type = collections.namedtuple('prediction_type', ['slice', 'color'])
pred_types = {'face': pred_type(slice(0, 17), (0.682, 0.780, 0.909, 0.5)),
'eyebrow1': pred_type(slice(17, 22), (1.0, 0.498, 0.055, 0.4)),
'eyebrow2': pred_type(slice(22, 27), (1.0, 0.498, 0.055, 0.4)),
'nose': pred_type(slice(27, 31), (0.345, 0.239, 0.443, 0.4)),
'nostril': pred_type(slice(31, 36), (0.345, 0.239, 0.443, 0.4)),
'eye1': pred_type(slice(36, 42), (0.596, 0.875, 0.541, 0.3)),
'eye2': pred_type(slice(42, 48), (0.596, 0.875, 0.541, 0.3)),
'lips': pred_type(slice(48, 60), (0.596, 0.875, 0.541, 0.3)),
'teeth': pred_type(slice(60, 68), (0.596, 0.875, 0.541, 0.4))
}
face_detector = 'sfd'
face_detector_kwargs = {
"filter_threshold": 0.8
}
fa = face_alignment.FaceAlignment(face_alignment.LandmarksType._3D, flip_input=True, device='cpu',
face_detector=face_detector)
def get_mouth_opened(frame, start_x, start_y, end_x, end_y) -> tuple:
mouth_shape = predictor(frame, dlib.rectangle(start_x, start_y, end_x, end_y))
mouth_shape = face_utils.shape_to_np(mouth_shape)
mouth_arr = mouth_shape[mStart:mEnd]
mouth_mar_dlib = detect_utils.mouth_aspect_ratio(mouth_arr)
mouth_mar_dlib = round(mouth_mar_dlib, 2)
# print(mouth_mar_dlib)
face_roi_dlib = frame[start_y:end_y, start_x:end_x]
height_frame, width_frame = face_roi_dlib.shape[:2]
# swapping the read and green channels
# https://stackoverflow.com/a/56933474/1461625
detected_faces = []
detected_faces.append([0, 0, width_frame, height_frame])
preds = fa.get_landmarks_from_image(face_roi_dlib, detected_faces)[-1]
pred_type = pred_types['lips']
X = preds[pred_type.slice, 0]
Y = preds[pred_type.slice, 1]
mouth_shape_3ddfa = []
for x, y in zip(X, Y):
mouth_shape_3ddfa.append((x, y))
# shape = []
# for idx, pred_type in enumerate(pred_types.values()):
# X = preds[pred_type.slice, 0]
# Y = preds[pred_type.slice, 1]
# for x, y in zip(X, Y):
# shape.append((x, y))
mouth_mar_3ddfa = detect_utils.mouth_aspect_ratio(mouth_shape_3ddfa)
mouth_mar_3ddfa = round(mouth_mar_3ddfa, 2)
# print(mouth_mar_3ddfa)
is_opened_mouth_3ddfa = mouth_mar_3ddfa >= 0.75
is_opened_mouth_dlib = mouth_mar_dlib >= MOUTH_AR_THRESH
if is_opened_mouth_3ddfa == is_opened_mouth_dlib:
return is_opened_mouth_3ddfa, mouth_mar_dlib, LNDMR_TYPE.DLIB # correct, same as dlib, return dlib ratio
else:
return is_opened_mouth_3ddfa, mouth_mar_3ddfa, LNDMR_TYPE.FACEALIGN # return 3ddfa, as it's more accurate
def recognize_image(video_id: int, video_path: str, frame, frame_id: int, face_type: FACE_TYPE, face_rect_dlib,
face_rect_dnn=None) -> ImageResult:
(start_x, start_y, end_x, end_y) = face_rect_dlib
start_x = max(start_x, 0)
start_y = max(start_y, 0)
if start_x >= end_x or start_y >= end_y:
print('Invalid detection. Skip', face_rect_dlib)
return ImageResult.not_processed()
face_roi_dlib = frame[start_y:end_y, start_x:end_x]
if face_roi_dlib is None:
print('Cropped face is None. Skip')
return ImageResult.not_processed()
height_frame, width_frame = face_roi_dlib.shape[:2]
if height_frame < 50 or width_frame < 50: # some images have invalid dlib face rect
print('Too small face. Skip')
return ImageResult.not_processed()
# https://pyimagesearch.com/wp-content/uploads/2017/04/facial_landmarks_68markup.jpg
is_mouth_opened, open_mouth_ratio, lndmk_type = get_mouth_opened(frame, start_x, start_y, end_x, end_y)
# skip frames in normal and talking, containing opened mouth (we detect only yawn)
video_name = os.path.basename(video_path)
is_video_no_yawn = video_name.endswith('-Normal.avi') or \
video_name.endswith('-Talking.avi')
if is_mouth_opened and is_video_no_yawn:
# some videos may contain opened mouth, skip these situations
return ImageResult.not_processed()
prefix = 'dlib'
target_face_roi = None
if face_rect_dnn is not None:
(start_x, start_y, end_x, end_y) = face_rect_dnn
start_x = max(start_x, 0)
start_y = max(start_y, 0)
if start_x < end_x and start_y < end_y:
face_roi_dnn = frame[start_y:end_y, start_x:end_x]
target_face_roi = face_roi_dnn
prefix = face_type.name.lower()
if target_face_roi is None:
target_face_roi = face_roi_dlib
if len(frame.shape) == 2 or COLOR_IMG: # single channel
gray_img = target_face_roi
else:
gray_img = cv2.cvtColor(target_face_roi, cv2.COLOR_BGR2GRAY)
gray_img = detect_utils.resize_img(gray_img, MAX_IMAGE_WIDTH, MAX_IMAGE_HEIGHT)
lndmk_type_name = lndmk_type.name.lower()
if is_mouth_opened:
global read_mouth_open_counter
read_mouth_open_counter = read_mouth_open_counter + 1
# reduce img count
if read_mouth_open_counter % SAMPLE_STEP_IMG_OPENED != 0:
return ImageResult.not_processed()
global saved_mouth_open_counter
saved_mouth_open_counter = saved_mouth_open_counter + 1
file_name = os.path.join(MOUTH_OPENED_FOLDER,
f'{read_mouth_open_counter}_{open_mouth_ratio}_{video_id}_{frame_id}_{prefix}_{lndmk_type_name}.jpg')
cv2.imwrite(file_name, gray_img)
return ImageResult(is_processed=True, is_opened_image=True)
else:
global read_mouth_close_counter
read_mouth_close_counter = read_mouth_close_counter + 1
# reduce img count
if read_mouth_close_counter % SAMPLE_STEP_IMG_CLOSED != 0:
return ImageResult.not_processed()
global saved_mouth_close_counter
saved_mouth_close_counter = saved_mouth_close_counter + 1
file_name = os.path.join(MOUTH_CLOSED_FOLDER,
f'{read_mouth_close_counter}_{open_mouth_ratio}_{video_id}_{frame_id}_{prefix}_{lndmk_type_name}.jpg')
cv2.imwrite(file_name, gray_img)
return ImageResult(is_processed=True, is_opened_image=False)
def detect_faces_complex(frame):
gray_frame = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
face_list_dlib = inference_utils.detect_face_dlib(detector, gray_frame)
if len(face_list_dlib) > 0:
return face_list_dlib, FACE_TYPE.DLIB
face_list_dnn_cafe = ssd_face_detector.detect_face(frame)
if len(face_list_dnn_cafe) > 0:
return face_list_dnn_cafe, FACE_TYPE.CAFFE
face_list_dnn_blaze = blazefaceDetector.detect_face(frame)
if len(face_list_dnn_blaze) > 0:
return face_list_dnn_blaze, FACE_TYPE.BLAZEFACE
return [], None
def process_video(video_id, video_path) -> VideoResult:
video_name = os.path.basename(video_path)
if should_process_video(video_name) is False:
print('Video should not be processed', video_path)
return VideoResult.empty()
cap = cv2.VideoCapture(video_path)
if cap.isOpened() is False:
print('Video is not opened', video_path)
return VideoResult.empty()
face_dlib_counter = 0
face_caffe_counter = 0
face_blazeface_counter = 0
opened_img_counter = 0
closed_img_counter = 0
frame_id = 0
face_type = FACE_TYPE.DLIB
while True:
ret, frame = cap.read()
if ret is False:
break
if frame is None:
print('No images left in', video_path)
break
if np.shape(frame) == ():
print('Empty image. Skip')
continue
frame_id = frame_id + 1
face_list, f_type = detect_faces_complex(frame)
if len(face_list) == 0:
# skip images not recognized by dlib or other detectors
continue
gray_frame = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
recognize_frame = frame if COLOR_IMG else gray_frame
if face_type == FACE_TYPE.DLIB:
image_result = recognize_image(video_id, video_path, recognize_frame, frame_id, face_type,
face_list[0])
is_processed = image_result.is_processed
if is_processed:
face_type = face_type.get_next()
face_dlib_counter = face_dlib_counter + 1
if image_result.is_opened_image:
opened_img_counter = opened_img_counter + 1
else:
closed_img_counter = closed_img_counter + 1
continue
if face_type == FACE_TYPE.CAFFE:
face_list_dnn = ssd_face_detector.detect_face(frame)
if len(face_list_dnn) == 0:
face_type = face_type.get_next()
print('Face not found with Caffe DNN')
continue
image_result = recognize_image(video_id, video_path, recognize_frame, frame_id, face_type,
face_list[0],
face_list_dnn[0])
is_processed = image_result.is_processed
if is_processed:
face_type = face_type.get_next()
face_caffe_counter = face_caffe_counter + 1
if image_result.is_opened_image:
opened_img_counter = opened_img_counter + 1
else:
closed_img_counter = closed_img_counter + 1
if face_type == FACE_TYPE.BLAZEFACE:
face_list_dnn = blazefaceDetector.detect_face(frame)
if len(face_list_dnn) == 0:
face_type = face_type.get_next()
print('Face not found with Blazeface')
continue
image_result = recognize_image(video_id, video_path, recognize_frame, frame_id, face_type,
face_list[0],
face_list_dnn[0])
is_processed = image_result.is_processed
if is_processed:
face_type = face_type.get_next()
face_blazeface_counter = face_blazeface_counter + 1
if image_result.is_opened_image:
opened_img_counter = opened_img_counter + 1
else:
closed_img_counter = closed_img_counter + 1
print(
f"Total images: {face_dlib_counter + face_caffe_counter + face_blazeface_counter}"
f', dlib: {face_dlib_counter} images'
f', blazeface: {face_blazeface_counter} images'
f', caffe: {face_caffe_counter} images in video {video_name}'
)
cap.release()
# The function is not implemented. Rebuild the library with Windows, GTK+ 2.x or Cocoa support. If you are on
# Ubuntu or Debian, install libgtk2.0-dev and pkg-config, then re-run cmake or configure script in function
# 'cvDestroyAllWindows'
try:
cv2.destroyAllWindows()
except:
print('No destroy windows')
return VideoResult(
frame_id,
face_dlib_counter,
face_blazeface_counter,
face_caffe_counter,
opened_img_counter,
closed_img_counter
)
def write_csv_stat(filename, video_count, video_result: VideoResult):
video_stat_dict_path = os.path.join(MOUTH_FOLDER, CSV_STATS)
if os.path.isfile(video_stat_dict_path) is False:
with open(video_stat_dict_path, 'w') as f:
w = csv.writer(f)
w.writerow(['Video id', 'File name', 'Total frames', 'Image saved', 'Opened img', 'Closed img'])
# mode 'a' append
with open(video_stat_dict_path, 'a') as f:
w = csv.writer(f)
img_counter = video_result.caffe_counter + video_result.dlib_counter + video_result.blazeface_counter
w.writerow((
video_count,
filename,
video_result.total_frames,
img_counter,
video_result.opened_counter,
video_result.closed_counter
))
def process_videos():
video_count = 0
total_frames = 0
for root, dirs, files in os.walk(YAWDD_DATASET_FOLDER):
for file in files:
if file.endswith(".avi"):
video_count = video_count + 1
file_name = os.path.join(root, file)
print('Current video', file_name)
video_result = process_video(video_count, file_name)
total_frames = total_frames + video_result.total_frames
write_csv_stat(file_name, video_count, video_result)
print(f'Videos processed: {video_count}')
print(f'Total read images: {total_frames}')
print(f'Total saved images: {saved_mouth_open_counter + saved_mouth_close_counter}')
print(f'Saved opened mouth images: {saved_mouth_open_counter}')
print(f'Saved closed mouth images: {saved_mouth_close_counter}')
if __name__ == '__main__':
process_videos()
| [
"yawn_train.src.download_utils.download_blazeface",
"tensorflow.keras.models.load_model",
"cv2.destroyAllWindows",
"yawn_train.src.blazeface_detector.BlazeFaceDetector",
"yawn_train.src.download_utils.download_and_unpack_dlib_68_landmarks",
"sys.path.append",
"os.walk",
"yawn_train.src.ssd_face_detect... | [((1922, 1958), 'os.path.join', 'os.path.join', (['MOUTH_FOLDER', '"""opened"""'], {}), "(MOUTH_FOLDER, 'opened')\n", (1934, 1958), False, 'import os\n'), ((1981, 2017), 'os.path.join', 'os.path.join', (['MOUTH_FOLDER', '"""closed"""'], {}), "(MOUTH_FOLDER, 'closed')\n", (1993, 2017), False, 'import os\n'), ((2624, 2689), 'yawn_train.src.download_utils.download_and_unpack_dlib_68_landmarks', 'download_utils.download_and_unpack_dlib_68_landmarks', (['TEMP_FOLDER'], {}), '(TEMP_FOLDER)\n', (2676, 2689), False, 'from yawn_train.src import download_utils, detect_utils, inference_utils\n'), ((2736, 2777), 'dlib.shape_predictor', 'dlib.shape_predictor', (['dlib_landmarks_file'], {}), '(dlib_landmarks_file)\n', (2756, 2777), False, 'import dlib\n'), ((2835, 2867), 'dlib.get_frontal_face_detector', 'dlib.get_frontal_face_detector', ([], {}), '()\n', (2865, 2867), False, 'import dlib\n'), ((2899, 2941), 'yawn_train.src.download_utils.download_caffe', 'download_utils.download_caffe', (['TEMP_FOLDER'], {}), '(TEMP_FOLDER)\n', (2928, 2941), False, 'from yawn_train.src import download_utils, detect_utils, inference_utils\n'), ((3017, 3070), 'cv2.dnn.readNetFromCaffe', 'cv2.dnn.readNetFromCaffe', (['caffe_config', 'caffe_weights'], {}), '(caffe_config, caffe_weights)\n', (3041, 3070), False, 'import cv2\n'), ((3091, 3118), 'yawn_train.src.ssd_face_detector.SSDFaceDetector', 'SSDFaceDetector', (['face_model'], {}), '(face_model)\n', (3106, 3118), False, 'from yawn_train.src.ssd_face_detector import SSDFaceDetector\n'), ((3156, 3202), 'yawn_train.src.download_utils.download_blazeface', 'download_utils.download_blazeface', (['TEMP_FOLDER'], {}), '(TEMP_FOLDER)\n', (3189, 3202), False, 'from yawn_train.src import download_utils, detect_utils, inference_utils\n'), ((3218, 3269), 'tensorflow.keras.models.load_model', 'tf.keras.models.load_model', (['bf_model'], {'compile': '(False)'}), '(bf_model, compile=False)\n', (3244, 3269), True, 'import tensorflow as tf\n'), ((3290, 3321), 'yawn_train.src.blazeface_detector.BlazeFaceDetector', 'BlazeFaceDetector', (['blazeface_tf'], {}), '(blazeface_tf)\n', (3307, 3321), False, 'from yawn_train.src.blazeface_detector import BlazeFaceDetector\n'), ((4037, 4098), 'collections.namedtuple', 'collections.namedtuple', (['"""prediction_type"""', "['slice', 'color']"], {}), "('prediction_type', ['slice', 'color'])\n", (4059, 4098), False, 'import collections\n'), ((4887, 5014), 'face_alignment.FaceAlignment', 'face_alignment.FaceAlignment', (['face_alignment.LandmarksType._3D'], {'flip_input': '(True)', 'device': '"""cpu"""', 'face_detector': 'face_detector'}), "(face_alignment.LandmarksType._3D, flip_input=\n True, device='cpu', face_detector=face_detector)\n", (4915, 5014), False, 'import face_alignment\n'), ((156, 174), 'os.path.join', 'os.path.join', (['""".."""'], {}), "('..')\n", (168, 174), False, 'import os\n'), ((212, 240), 'sys.path.append', 'sys.path.append', (['module_path'], {}), '(module_path)\n', (227, 240), False, 'import sys\n'), ((5217, 5252), 'imutils.face_utils.shape_to_np', 'face_utils.shape_to_np', (['mouth_shape'], {}), '(mouth_shape)\n', (5239, 5252), False, 'from imutils import face_utils\n'), ((5315, 5357), 'yawn_train.src.detect_utils.mouth_aspect_ratio', 'detect_utils.mouth_aspect_ratio', (['mouth_arr'], {}), '(mouth_arr)\n', (5346, 5357), False, 'from yawn_train.src import download_utils, detect_utils, inference_utils\n'), ((6247, 6297), 'yawn_train.src.detect_utils.mouth_aspect_ratio', 'detect_utils.mouth_aspect_ratio', (['mouth_shape_3ddfa'], {}), '(mouth_shape_3ddfa)\n', (6278, 6297), False, 'from yawn_train.src import download_utils, detect_utils, inference_utils\n'), ((7916, 7944), 'os.path.basename', 'os.path.basename', (['video_path'], {}), '(video_path)\n', (7932, 7944), False, 'import os\n'), ((8894, 8962), 'yawn_train.src.detect_utils.resize_img', 'detect_utils.resize_img', (['gray_img', 'MAX_IMAGE_WIDTH', 'MAX_IMAGE_HEIGHT'], {}), '(gray_img, MAX_IMAGE_WIDTH, MAX_IMAGE_HEIGHT)\n', (8917, 8962), False, 'from yawn_train.src import download_utils, detect_utils, inference_utils\n'), ((10393, 10432), 'cv2.cvtColor', 'cv2.cvtColor', (['frame', 'cv2.COLOR_BGR2GRAY'], {}), '(frame, cv2.COLOR_BGR2GRAY)\n', (10405, 10432), False, 'import cv2\n'), ((10454, 10508), 'yawn_train.src.inference_utils.detect_face_dlib', 'inference_utils.detect_face_dlib', (['detector', 'gray_frame'], {}), '(detector, gray_frame)\n', (10486, 10508), False, 'from yawn_train.src import download_utils, detect_utils, inference_utils\n'), ((10989, 11017), 'os.path.basename', 'os.path.basename', (['video_path'], {}), '(video_path)\n', (11005, 11017), False, 'import os\n'), ((11173, 11201), 'cv2.VideoCapture', 'cv2.VideoCapture', (['video_path'], {}), '(video_path)\n', (11189, 11201), False, 'import cv2\n'), ((15449, 15486), 'os.path.join', 'os.path.join', (['MOUTH_FOLDER', 'CSV_STATS'], {}), '(MOUTH_FOLDER, CSV_STATS)\n', (15461, 15486), False, 'import os\n'), ((16255, 16284), 'os.walk', 'os.walk', (['YAWDD_DATASET_FOLDER'], {}), '(YAWDD_DATASET_FOLDER)\n', (16262, 16284), False, 'import os\n'), ((2425, 2443), 'pathlib.Path', 'Path', (['MOUTH_FOLDER'], {}), '(MOUTH_FOLDER)\n', (2429, 2443), False, 'from pathlib import Path\n'), ((2479, 2504), 'pathlib.Path', 'Path', (['MOUTH_OPENED_FOLDER'], {}), '(MOUTH_OPENED_FOLDER)\n', (2483, 2504), False, 'from pathlib import Path\n'), ((2540, 2565), 'pathlib.Path', 'Path', (['MOUTH_CLOSED_FOLDER'], {}), '(MOUTH_CLOSED_FOLDER)\n', (2544, 2565), False, 'from pathlib import Path\n'), ((5151, 5197), 'dlib.rectangle', 'dlib.rectangle', (['start_x', 'start_y', 'end_x', 'end_y'], {}), '(start_x, start_y, end_x, end_y)\n', (5165, 5197), False, 'import dlib\n'), ((8829, 8878), 'cv2.cvtColor', 'cv2.cvtColor', (['target_face_roi', 'cv2.COLOR_BGR2GRAY'], {}), '(target_face_roi, cv2.COLOR_BGR2GRAY)\n', (8841, 8878), False, 'import cv2\n'), ((9400, 9544), 'os.path.join', 'os.path.join', (['MOUTH_OPENED_FOLDER', 'f"""{read_mouth_open_counter}_{open_mouth_ratio}_{video_id}_{frame_id}_{prefix}_{lndmk_type_name}.jpg"""'], {}), "(MOUTH_OPENED_FOLDER,\n f'{read_mouth_open_counter}_{open_mouth_ratio}_{video_id}_{frame_id}_{prefix}_{lndmk_type_name}.jpg'\n )\n", (9412, 9544), False, 'import os\n'), ((9577, 9609), 'cv2.imwrite', 'cv2.imwrite', (['file_name', 'gray_img'], {}), '(file_name, gray_img)\n', (9588, 9609), False, 'import cv2\n'), ((10061, 10206), 'os.path.join', 'os.path.join', (['MOUTH_CLOSED_FOLDER', 'f"""{read_mouth_close_counter}_{open_mouth_ratio}_{video_id}_{frame_id}_{prefix}_{lndmk_type_name}.jpg"""'], {}), "(MOUTH_CLOSED_FOLDER,\n f'{read_mouth_close_counter}_{open_mouth_ratio}_{video_id}_{frame_id}_{prefix}_{lndmk_type_name}.jpg'\n )\n", (10073, 10206), False, 'import os\n'), ((10239, 10271), 'cv2.imwrite', 'cv2.imwrite', (['file_name', 'gray_img'], {}), '(file_name, gray_img)\n', (10250, 10271), False, 'import cv2\n'), ((12019, 12058), 'cv2.cvtColor', 'cv2.cvtColor', (['frame', 'cv2.COLOR_BGR2GRAY'], {}), '(frame, cv2.COLOR_BGR2GRAY)\n', (12031, 12058), False, 'import cv2\n'), ((15087, 15110), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (15108, 15110), False, 'import cv2\n'), ((15494, 15530), 'os.path.isfile', 'os.path.isfile', (['video_stat_dict_path'], {}), '(video_stat_dict_path)\n', (15508, 15530), False, 'import os\n'), ((15813, 15826), 'csv.writer', 'csv.writer', (['f'], {}), '(f)\n', (15823, 15826), False, 'import csv\n'), ((11703, 11718), 'numpy.shape', 'np.shape', (['frame'], {}), '(frame)\n', (11711, 11718), True, 'import numpy as np\n'), ((15608, 15621), 'csv.writer', 'csv.writer', (['f'], {}), '(f)\n', (15618, 15621), False, 'import csv\n'), ((16425, 16449), 'os.path.join', 'os.path.join', (['root', 'file'], {}), '(root, file)\n', (16437, 16449), False, 'import os\n')] |
# Copyright (c) 2018, INRIA
# Copyright (c) 2018, University of Lille
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import logging
try:
from influxdb_client import InfluxDBClient
from influxdb_client.client.write_api import SYNCHRONOUS
#from influxdb import InfluxDBClient
from requests.exceptions import ConnectionError
except ImportError:
logging.getLogger().info("influx_client is not installed.")
from typing import List
from powerapi.database import BaseDB, DBError
from powerapi.report import Report
from powerapi.report_model import ReportModel
class CantConnectToInfluxDB2Exception(DBError):
pass
class InfluxDB2(BaseDB):
"""
MongoDB class herited from BaseDB
Allow to handle a InfluxDB database in reading or writing.
"""
def __init__(self, uri: str, port: int, token: str, org: str, bucket: str):
"""
:param str url: URL of the InfluxDB server
:param int port: port of the InfluxDB server
:param str db_name: database name in the influxdb
(ex: "powerapi")
:param str token access token Needed to connect to the influxdb instance
:param str org org that holds the data (??)
:param str bucket bucket where the data is going to be stored
:param report_model: XXXModel object. Allow to read specific
report with a specific format in a database
:type report_model: powerapi.ReportModel
"""
BaseDB.__init__(self)
self.uri = uri
self.port = port
self.complete_url ="http://%s:%s" %( self.uri , str(self.port))
#self.db_name = db_name
self.token=token
self.org = org
self.org_id = None
self.bucket = bucket
self.client = None
self.write_api= None
def _ping_client(self):
if hasattr(self.client, 'health'):
self.client.health()
else:
self.client.request(url="ping", method='GET', expected_response_code=204)
def connect(self):
"""
Override from BaseDB.
Create the connection to the influxdb database with the current
configuration (hostname/port/db_name), then check if the connection has
been created without failure.
"""
# close connection if reload
if self.client is not None:
self.client.close()
self.client = InfluxDBClient(url=self.complete_url, token=self.token, org=self.org)
#self.client = InfluxDBClient(host=self.uri, port=self.port, database=self.db_name)
# retrieve the org_id
org_api = self.client.organizations_api()
for org_response in org_api.find_organizations():
if org_response.name==self.org:
self.org_id=org_response.id
self.write_api = self.client.write_api(write_options=SYNCHRONOUS)
try:
self._ping_client()
except ConnectionError:
raise CantConnectToInfluxDB2Exception('connexion error')
# Not sure we need to keep the buckeapi object longer than this
bucket_api= self.client.buckets_api()
if bucket_api.find_bucket_by_name(self.bucket)== None:
#If we can't find the bucket, we create it.
bucket_api.create_bucket(bucket_name=self.bucket, org_id=self.org_id)
# We need the org_id in order to create a bucket
#bucket_api.create_database(self.db_name, org_id="")
# TO DO
def save(self, report: Report, report_model: ReportModel):
"""
Override from BaseDB
:param report: Report to save
:param report_model: ReportModel
"""
## Let's print the data to see its schema.
#print("printing report")
#print(report)
#print("Printing serialized report")
#print(report.serialize())
data = report_model.to_influxdb(report.serialize())
self.write_api.write(bucket= this.bucket, record= data)
#self.client.write_points([data])
# TO DO
def save_many(self, reports: List[Report], report_model: ReportModel):
"""
Save a batch of data
:param reports: Batch of data.
:param report_model: ReportModel
"""
data_list = list(map(lambda r: report_model.to_influxdb(r.serialize()), reports))
self.write_api.write(bucket= self.bucket, record= data_list)
| [
"logging.getLogger",
"powerapi.database.BaseDB.__init__",
"influxdb_client.InfluxDBClient"
] | [((3073, 3094), 'powerapi.database.BaseDB.__init__', 'BaseDB.__init__', (['self'], {}), '(self)\n', (3088, 3094), False, 'from powerapi.database import BaseDB, DBError\n'), ((4035, 4104), 'influxdb_client.InfluxDBClient', 'InfluxDBClient', ([], {'url': 'self.complete_url', 'token': 'self.token', 'org': 'self.org'}), '(url=self.complete_url, token=self.token, org=self.org)\n', (4049, 4104), False, 'from influxdb_client import InfluxDBClient\n'), ((1816, 1835), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (1833, 1835), False, 'import logging\n')] |
""" TensorMONK's :: architectures :: ESRGAN """
__all__ = ["Generator", "Discriminator", "VGG19"]
import torch
import torch.nn as nn
import torchvision
from ..layers import Convolution
class DenseBlock(nn.Module):
r"""From DenseNet - https://arxiv.org/pdf/1608.06993.pdf."""
def __init__(self, tensor_size: tuple, filter_size: int = 3,
activation: str = "lklu", normalization: str = None,
n_blocks: int = 5, beta: float = 0.2, **kwargs):
super(DenseBlock, self).__init__()
n, c, h, w = tensor_size
cnns = []
for i in range(n_blocks):
cnns.append(Convolution(
(1, c*(i+1), h, w), filter_size, out_channels=c, strides=1,
activation=None if (i + 1) == n_blocks else activation,
normalization=normalization, lklu_negslope=0.1))
self.cnns = nn.ModuleList(cnns)
self.tensor_size = tensor_size
# As defined in https://arxiv.org/pdf/1602.07261.pdf
self.beta = beta
def forward(self, tensor: torch.Tensor):
r"""Residual dense block with scaling."""
x, o = None, None
for i, cnn in enumerate(self.cnns):
x = tensor if i == 0 else torch.cat((x, o), 1)
o = cnn(x)
return tensor + (o * self.beta)
class RRDB(nn.Module):
r"""Residual-in-Residual Dense Block."""
def __init__(self, tensor_size: tuple, filter_size: int = 3,
activation: str = "lklu", normalization: str = None,
n_dense: int = 3, n_blocks: int = 5, beta: float = 0.2,
**kwargs):
super(RRDB, self).__init__()
cnns = []
for i in range(n_dense):
cnns.append(DenseBlock(
tensor_size, filter_size, activation, normalization,
n_blocks, beta, **kwargs))
self.cnn = nn.Sequential(*cnns)
self.tensor_size = tensor_size
# As defined in https://arxiv.org/pdf/1602.07261.pdf
self.beta = beta
def forward(self, tensor: torch.Tensor):
r"""Residual-in-Residual Dense Block with scaling (beta)."""
return tensor + self.cnn(tensor) * self.beta
class Generator(nn.Module):
r"""ESRGAN generator network using Residual-in-Residual Dense Blocks.
Paper: ESRGAN
URL: https://arxiv.org/pdf/1809.00219.pdf
Args:
tensor_size (tuple, required): Shape of tensor in
(None/any integer >0, channels, height, width).
n_filters (int): The number of filters used through out the network,
however, DenseBlock will have multiples of n_filters.
default = 64
n_rrdb (int): The number of Residual-in-Residual Dense Block (RRDB).
default = 16
n_dense (int): The number of dense blocks in RRDB.
default = 3
n_blocks (int): The number of convolutions in dense blocks.
default = 5
n_upscale (int): Number of upscale done on input shape using
pixel-shuffle.
default = 2
beta (float): The scale factor of output before adding to any residue.
default = 0.2
"""
def __init__(self,
tensor_size: tuple = (1, 3, 32, 32),
n_filters: int = 64,
n_rrdb: int = 16,
n_dense: int = 3,
n_blocks: int = 5,
n_upscale: int = 2,
beta: float = 0.2,
**kwargs):
super(Generator, self).__init__()
self.initial = Convolution(
tensor_size, 3, n_filters, 1, activation=None)
modules = []
t_size = self.initial.tensor_size
for _ in range(n_rrdb):
modules.append(RRDB(t_size, 3, "lklu", n_dense=n_dense,
n_blocks=n_blocks, beta=beta))
modules.append(Convolution(t_size, 3, n_filters, 1, activation=None))
self.rrdbs = nn.Sequential(*modules)
modules = []
for _ in range(n_upscale):
modules.append(
Convolution(t_size, 3, n_filters * 4, 1, activation="lklu"))
modules.append(nn.PixelShuffle(upscale_factor=2))
t_size = (t_size[0], t_size[1], t_size[2]*2, t_size[3]*2)
modules.append(Convolution(t_size, 3, n_filters, 1, activation="lklu"))
modules.append(Convolution(t_size, 3, tensor_size[1], activation=None))
self.upscale = nn.Sequential(*modules)
self.tensor_size = tensor_size
self.initialize()
def forward(self, tensor: torch.Tensor):
r"""Expects normalized tensor (mean = 0.5 and std = 0.25)."""
o = self.initial(tensor)
o = o + self.rrdbs(o)
o = self.upscale(o)
return o
def enhance(self, tensor: torch.Tensor):
with torch.no_grad():
return self(tensor).mul_(0.25).add_(0.5).clamp_(0, 1)
def initialize(self):
r"""As defined in https://arxiv.org/pdf/1809.00219.pdf."""
for m in self.modules():
if isinstance(m, nn.Conv2d) or isinstance(m, nn.ConvTranspose2d):
nn.init.kaiming_normal_(m.weight)
m.weight.data.mul_(0.1)
class Discriminator(nn.Module):
r"""ESRGAN discriminator network.
Paper: ESRGAN
URL: https://arxiv.org/pdf/1809.00219.pdf
Args:
tensor_size (tuple, required): Shape of tensor in
(None/any integer >0, channels, height, width).
"""
def __init__(self, tensor_size: tuple = (1, 3, 128, 128), **kwargs):
super(Discriminator, self).__init__()
self.t_size = tensor_size
self.tensor_size = None, 1
modules = []
t_size = self.t_size
for oc in (64, 128, 256, 512):
modules.append(Convolution(
t_size, 3, oc, 1, normalization=None if oc == 64 else "batch",
activation="lklu", lklu_negslope=0.2))
t_size = modules[-1].tensor_size
modules.append(Convolution(
t_size, 3, oc, 2, normalization="batch",
activation="lklu", lklu_negslope=0.2))
t_size = modules[-1].tensor_size
self.discriminator = nn.Sequential(*modules)
def forward(self, tensor: torch.Tensor):
r"""Expects normalized tensor (mean = 0.5 and std = 0.25)."""
return self.discriminator(tensor)
def initialize(self):
r"""As defined in https://arxiv.org/pdf/1809.00219.pdf."""
for m in self.modules():
if isinstance(m, nn.Conv2d) or isinstance(m, nn.ConvTranspose2d):
nn.init.kaiming_normal_(m.weight)
m.weight.data.mul_(0.1)
class VGG19(nn.Module):
r"""Pretrained VGG19 model from torchvision."""
def __init__(self, **kwargs):
super(VGG19, self).__init__()
self.vgg19 = torchvision.models.vgg19(pretrained=True).features[:35]
def forward(self, tensor: torch.Tensor):
r"""Expects normalized tensor (mean = 0.5 and std = 0.25)."""
return self.vgg19(tensor)
class ESRGAN:
Generator = Generator
Discriminator = Discriminator
VGG19 = VGG19
| [
"torchvision.models.vgg19",
"torch.nn.PixelShuffle",
"torch.nn.ModuleList",
"torch.nn.Sequential",
"torch.nn.init.kaiming_normal_",
"torch.no_grad",
"torch.cat"
] | [((883, 902), 'torch.nn.ModuleList', 'nn.ModuleList', (['cnns'], {}), '(cnns)\n', (896, 902), True, 'import torch.nn as nn\n'), ((1879, 1899), 'torch.nn.Sequential', 'nn.Sequential', (['*cnns'], {}), '(*cnns)\n', (1892, 1899), True, 'import torch.nn as nn\n'), ((3964, 3987), 'torch.nn.Sequential', 'nn.Sequential', (['*modules'], {}), '(*modules)\n', (3977, 3987), True, 'import torch.nn as nn\n'), ((4464, 4487), 'torch.nn.Sequential', 'nn.Sequential', (['*modules'], {}), '(*modules)\n', (4477, 4487), True, 'import torch.nn as nn\n'), ((6215, 6238), 'torch.nn.Sequential', 'nn.Sequential', (['*modules'], {}), '(*modules)\n', (6228, 6238), True, 'import torch.nn as nn\n'), ((4836, 4851), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (4849, 4851), False, 'import torch\n'), ((1233, 1253), 'torch.cat', 'torch.cat', (['(x, o)', '(1)'], {}), '((x, o), 1)\n', (1242, 1253), False, 'import torch\n'), ((4176, 4209), 'torch.nn.PixelShuffle', 'nn.PixelShuffle', ([], {'upscale_factor': '(2)'}), '(upscale_factor=2)\n', (4191, 4209), True, 'import torch.nn as nn\n'), ((5140, 5173), 'torch.nn.init.kaiming_normal_', 'nn.init.kaiming_normal_', (['m.weight'], {}), '(m.weight)\n', (5163, 5173), True, 'import torch.nn as nn\n'), ((6618, 6651), 'torch.nn.init.kaiming_normal_', 'nn.init.kaiming_normal_', (['m.weight'], {}), '(m.weight)\n', (6641, 6651), True, 'import torch.nn as nn\n'), ((6863, 6904), 'torchvision.models.vgg19', 'torchvision.models.vgg19', ([], {'pretrained': '(True)'}), '(pretrained=True)\n', (6887, 6904), False, 'import torchvision\n')] |
"""Example for Pytest-Gherkin"""
import ast
from pytest import approx
from pt_gh import step, value_options
operator = value_options("add", "subtract")
@step("I have {num1:d} and {num2:d}")
def given_numbers_i(num1, num2, context):
"""Example of parameter types converted based on annotation
and context is a fixture as in Pytest"""
context["nums"] = []
context["nums"].append(num1)
context["nums"].append(num2)
context["ans"] = 0
@step("I have floats {num1:f} and {num2:f}")
def given_numbers_f(num1, num2, context):
"""Example of parameter types converted based on annotation
and context is a fixture as in Pytest"""
context["nums"] = []
context["nums"].append(num1)
context["nums"].append(num2)
context["ans"] = 0.0
@step("I have list of floats {float_list}")
def i_have_list_of_floats(float_list, context):
"""I have list of floats"""
float_list = ast.literal_eval(float_list)
context["nums"] = float_list
context["ans"] = 0.0
@step("I {operator:operator} them", dict(operator=operator))
def i_en_de_crypt(operator, context):
"""Example of parameter created and checked based on ValueList
and context is a fixture as in Pytest"""
if operator == "add":
for num in context["nums"]:
context["ans"] += num
else:
context["ans"] = context["nums"][0]
for num in context["nums"][1:]:
context["ans"] -= num
@step("I have {result:d} as result")
def i_get_answer_i(result, context):
"""Example of parameter types converted based on annotation
and context is a fixture as in Pytest"""
assert context["ans"] == result
@step("I have float {result:f} as result")
def i_get_answer_f(result, context):
"""Example of parameter types converted based on annotation
and context is a fixture as in Pytest"""
assert context["ans"] == approx(result)
@step("I have a matrix:")
def i_have_a_matrix(data_table, context):
"""data_table is a special parameter, a Python 2D list
created from Gherkin data table
and context is a fixture as in Pytest
Note: data_table contains strings, user has to convert"""
context["matrix"] = [[int(x) for x in row] for row in data_table]
@step("I sum all rows")
def i_sum_all_rows(context):
"""Just a simple fixture parameter"""
context["vector"] = [sum(row) for row in context["matrix"]]
@step("I have a vector:")
def i_have_a_vector(data_table, context):
"""data_table is a special parameter, a Python 2D list
created from Gherkin data table
and context is a fixture as in Pytest
Note: data_table contains strings, user has to convert"""
assert context["vector"] == [int(x[0]) for x in data_table]
| [
"ast.literal_eval",
"pytest.approx",
"pt_gh.value_options",
"pt_gh.step"
] | [((124, 156), 'pt_gh.value_options', 'value_options', (['"""add"""', '"""subtract"""'], {}), "('add', 'subtract')\n", (137, 156), False, 'from pt_gh import step, value_options\n'), ((160, 196), 'pt_gh.step', 'step', (['"""I have {num1:d} and {num2:d}"""'], {}), "('I have {num1:d} and {num2:d}')\n", (164, 196), False, 'from pt_gh import step, value_options\n'), ((465, 508), 'pt_gh.step', 'step', (['"""I have floats {num1:f} and {num2:f}"""'], {}), "('I have floats {num1:f} and {num2:f}')\n", (469, 508), False, 'from pt_gh import step, value_options\n'), ((779, 821), 'pt_gh.step', 'step', (['"""I have list of floats {float_list}"""'], {}), "('I have list of floats {float_list}')\n", (783, 821), False, 'from pt_gh import step, value_options\n'), ((1446, 1481), 'pt_gh.step', 'step', (['"""I have {result:d} as result"""'], {}), "('I have {result:d} as result')\n", (1450, 1481), False, 'from pt_gh import step, value_options\n'), ((1667, 1708), 'pt_gh.step', 'step', (['"""I have float {result:f} as result"""'], {}), "('I have float {result:f} as result')\n", (1671, 1708), False, 'from pt_gh import step, value_options\n'), ((1902, 1926), 'pt_gh.step', 'step', (['"""I have a matrix:"""'], {}), "('I have a matrix:')\n", (1906, 1926), False, 'from pt_gh import step, value_options\n'), ((2241, 2263), 'pt_gh.step', 'step', (['"""I sum all rows"""'], {}), "('I sum all rows')\n", (2245, 2263), False, 'from pt_gh import step, value_options\n'), ((2402, 2426), 'pt_gh.step', 'step', (['"""I have a vector:"""'], {}), "('I have a vector:')\n", (2406, 2426), False, 'from pt_gh import step, value_options\n'), ((919, 947), 'ast.literal_eval', 'ast.literal_eval', (['float_list'], {}), '(float_list)\n', (935, 947), False, 'import ast\n'), ((1884, 1898), 'pytest.approx', 'approx', (['result'], {}), '(result)\n', (1890, 1898), False, 'from pytest import approx\n')] |
#!/usr/bin/env python3
import logging
import subprocess
import re
import boto.utils
from jinja2 import Environment, FileSystemLoader
from taupage import get_config
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
TPL_NAME = 'td-agent.conf.jinja2'
TD_AGENT_TEMPLATE_PATH = '/etc/td-agent/templates/'
TD_AGENT_OUTPUT_PATH = '/etc/td-agent/td-agent.conf'
def restart_td_agent_process():
''' Restart Fluentd '''
process = subprocess.Popen(['service', 'td-agent', 'restart'])
exit_code = process.wait(timeout=5)
if exit_code:
raise Exception("'service td-agent restart' failed with exit code: {0}".format(exit_code))
def get_scalyr_api_key():
''' Read Scalyr API key from Taupage config and set in template file '''
main_config = get_config()
config = main_config.get('logging')
scalyr_api_key = config.get('scalyr_account_key', main_config.get('scalyr_account_key'))
if scalyr_api_key:
# If scalyr_api_key starts with "aws:kms:" then decrypt key
match_kms_key = re.search('aws:kms:', scalyr_api_key, re.IGNORECASE)
if match_kms_key:
scalyr_api_key = re.sub(r'aws:kms:', '', scalyr_api_key)
try:
scalyr_api_key = subprocess.check_output(['python3',
'/opt/taupage/bin/decrypt-kms.py',
scalyr_api_key]).decode('UTF-8').strip()
except Exception:
logger.error('Failed to run /opt/taupage/bin/decrypt-kms.py')
raise SystemExit()
if scalyr_api_key == "Invalid KMS key.":
logger.error('Failed to decrypt KMS Key')
raise SystemExit(1)
return scalyr_api_key
def update_configuration_from_template(s3_default):
''' Update Jinja Template to create configuration file for Scalyr '''
fluentd_destinations = dict(scalyr=False, s3=False, rsyslog=False, scalyr_s3=False)
config = get_config()
logging_config = config.get('logging', {})
application_id = config.get('application_id')
application_version = config.get('application_version')
stack = config.get('notify_cfn', {}).get('stack')
source = config.get('source')
image = config.get('source').split(':', 1)[0]
instance_data = boto.utils.get_instance_identity()['document']
aws_region = instance_data['region']
aws_account = instance_data['accountId']
hostname = boto.utils.get_instance_metadata()['local-hostname'].split('.')[0]
customlog = config.get('mount_custom_log')
if config.get('rsyslog_aws_metadata'):
scalyr_syslog_log_parser = 'systemLogMetadata'
else:
scalyr_syslog_log_parser = 'systemLog'
scalyr_application_log_parser = logging_config.get('scalyr_application_log_parser', 'slf4j')
scalyr_custom_log_parser = logging_config.get('scalyr_custom_log_parser', 'slf4j')
fluentd_log_destination = logging_config.get('log_destination', 's3')
fluentd_syslog_destination = logging_config.get('syslog_destination', fluentd_log_destination)
fluentd_applog_destination = logging_config.get('applog_destination', fluentd_log_destination)
fluentd_authlog_destination = logging_config.get('authlog_destination', fluentd_log_destination)
fluentd_customlog_destination = logging_config.get('customlog_destination', fluentd_log_destination)
fluentd_applog_filter_exclude = logging_config.get('applog_filter_exclude', None)
fluentd_customlog_filter_exclude = logging_config.get('customlog_filter_exclude', None)
fluentd_loglevel = logging_config.get('fluentd_loglevel', 'error')
fluentd_s3_raw_log_format = logging_config.get('s3_raw_log_format', 'true')
fluentd_s3_region = logging_config.get('s3_region', aws_region)
fluentd_s3_bucket = logging_config.get('s3_bucket', 'zalando-logging-'+aws_account+'-'+aws_region)
fluentd_s3_timekey = logging_config.get('s3_timekey', '5m')
fluentd_s3_acl = logging_config.get('s3_acl', 'bucket-owner-full-control')
fluentd_rsyslog_host = logging_config.get('rsyslog_host')
fluentd_rsyslog_port = logging_config.get('rsyslog_port', '514')
fluentd_rsyslog_protocol = logging_config.get('rsyslog_protocol', 'tcp')
fluentd_rsyslog_severity = logging_config.get('rsyslog_severity', 'notice')
fluentd_rsyslog_program = logging_config.get('rsyslog_program', 'fluentd')
fluentd_rsyslog_hostname = logging_config.get('rsyslog_hostname', hostname)
for destination in (fluentd_applog_destination,
fluentd_authlog_destination,
fluentd_customlog_destination,
fluentd_syslog_destination):
fluentd_destinations[destination] = True
# Get Scalyr key only if configured
if fluentd_destinations.get('scalyr') or fluentd_destinations.get('scalyr_s3'):
scalyr_api_key = get_scalyr_api_key()
else:
scalyr_api_key = None
if fluentd_destinations.get('s3') or fluentd_destinations.get('scalyr_s3'):
try:
with open('/etc/cron.d/s3-iam-check', 'w') as file:
file.write('#!/bin/bash\n')
file.write('PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\n')
file.write('*/5 * * * * root /opt/taupage/bin/s3-iam-check.py test {!s}\n'.format(fluentd_s3_bucket))
except Exception:
logger.exception('Failed to write file /etc/cron.d/s3-iam-check')
raise SystemExit(1)
env = Environment(loader=FileSystemLoader(TD_AGENT_TEMPLATE_PATH), trim_blocks=True)
template_data = env.get_template(TPL_NAME).render(
scalyr_api_key=scalyr_api_key,
application_id=application_id,
application_version=application_version,
stack=stack,
source=source,
image=image,
aws_region=aws_region,
aws_account=aws_account,
customlog=customlog,
scalyr_application_log_parser=scalyr_application_log_parser,
scalyr_syslog_log_parser=scalyr_syslog_log_parser,
scalyr_custom_log_parser=scalyr_custom_log_parser,
fluentd_syslog_destination=fluentd_syslog_destination,
fluentd_applog_destination=fluentd_applog_destination,
fluentd_applog_filter_exclude=fluentd_applog_filter_exclude,
fluentd_authlog_destination=fluentd_authlog_destination,
fluentd_customlog_destination=fluentd_customlog_destination,
fluentd_customlog_filter_exclude=fluentd_customlog_filter_exclude,
fluentd_loglevel=fluentd_loglevel,
fluentd_s3_raw_log_format=fluentd_s3_raw_log_format,
fluentd_s3_region=fluentd_s3_region,
fluentd_s3_bucket=fluentd_s3_bucket,
fluentd_s3_timekey=fluentd_s3_timekey,
fluentd_s3_acl=fluentd_s3_acl,
fluentd_rsyslog_host=fluentd_rsyslog_host,
fluentd_rsyslog_port=fluentd_rsyslog_port,
fluentd_rsyslog_protocol=fluentd_rsyslog_protocol,
fluentd_rsyslog_severity=fluentd_rsyslog_severity,
fluentd_rsyslog_program=fluentd_rsyslog_program,
fluentd_rsyslog_hostname=fluentd_rsyslog_hostname,
fluentd_destinations=fluentd_destinations
)
try:
with open(TD_AGENT_OUTPUT_PATH, 'w') as f:
f.write(template_data)
except Exception:
logger.exception('Failed to write file td-agent.conf')
raise SystemExit(1)
if __name__ == '__main__':
hostname = boto.utils.get_instance_metadata()['local-hostname'].split('.')[0]
config = get_config()
logging_config = config.get('logging')
s3_default = False
if logging_config:
if not logging_config.get('fluentd_enabled'):
logger.info('Fluentd disabled; skipping Fluentd initialization')
raise SystemExit()
if not logging_config:
logger.info('Found no logging section in senza.yaml; enable dafault logging to s3')
s3_default = True
try:
with open('/var/local/textfile_collector/fluentd_default_s3.prom', 'w') as file:
file.write('fluentd_default_s3_logging{{tag=\"td-agent\",hostname=\"{!s}\"}} 1.0\n'
.format(hostname))
except Exception:
logger.exception('Failed to write file /var/local/textfile_collector/fluentd_default_s3.prom')
raise SystemExit(1)
try:
with open('/etc/cron.d/get_fluentd_metrics', 'w') as file:
file.write('#!/bin/bash\n')
file.write('PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\n')
file.write('* * * * * root /opt/taupage/bin/get-fluentd-metrics.sh\n')
except Exception:
logger.exception('Failed to write file /etc/cron.d/get_fluentd_metrics')
raise SystemExit(1)
update_configuration_from_template(s3_default)
restart_td_agent_process()
| [
"logging.basicConfig",
"logging.getLogger",
"subprocess.check_output",
"subprocess.Popen",
"jinja2.FileSystemLoader",
"re.sub",
"taupage.get_config",
"re.search"
] | [((167, 206), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.INFO'}), '(level=logging.INFO)\n', (186, 206), False, 'import logging\n'), ((216, 243), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (233, 243), False, 'import logging\n'), ((460, 512), 'subprocess.Popen', 'subprocess.Popen', (["['service', 'td-agent', 'restart']"], {}), "(['service', 'td-agent', 'restart'])\n", (476, 512), False, 'import subprocess\n'), ((793, 805), 'taupage.get_config', 'get_config', ([], {}), '()\n', (803, 805), False, 'from taupage import get_config\n'), ((2018, 2030), 'taupage.get_config', 'get_config', ([], {}), '()\n', (2028, 2030), False, 'from taupage import get_config\n'), ((7569, 7581), 'taupage.get_config', 'get_config', ([], {}), '()\n', (7579, 7581), False, 'from taupage import get_config\n'), ((1055, 1107), 're.search', 're.search', (['"""aws:kms:"""', 'scalyr_api_key', 're.IGNORECASE'], {}), "('aws:kms:', scalyr_api_key, re.IGNORECASE)\n", (1064, 1107), False, 'import re\n'), ((1163, 1201), 're.sub', 're.sub', (['"""aws:kms:"""', '""""""', 'scalyr_api_key'], {}), "('aws:kms:', '', scalyr_api_key)\n", (1169, 1201), False, 'import re\n'), ((5573, 5613), 'jinja2.FileSystemLoader', 'FileSystemLoader', (['TD_AGENT_TEMPLATE_PATH'], {}), '(TD_AGENT_TEMPLATE_PATH)\n', (5589, 5613), False, 'from jinja2 import Environment, FileSystemLoader\n'), ((1253, 1344), 'subprocess.check_output', 'subprocess.check_output', (["['python3', '/opt/taupage/bin/decrypt-kms.py', scalyr_api_key]"], {}), "(['python3', '/opt/taupage/bin/decrypt-kms.py',\n scalyr_api_key])\n", (1276, 1344), False, 'import subprocess\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# The MIT License
#
# Copyright (c) 2016 <NAME>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
###############################################################################
import csv
from six import StringIO
from yagocd.resources import BaseManager
from yagocd.util import RequireParamMixin, since
@since('14.3.0')
class PropertyManager(BaseManager, RequireParamMixin):
"""
The properties API allows managing of job properties.
`Official documentation. <https://api.go.cd/current/#properties>`_
:versionadded: 14.3.0.
This class implements dictionary like methods for similar use.
"""
RESOURCE_PATH = '{base_api}/properties/{pipeline_name}/{pipeline_counter}/{stage_name}/{stage_counter}/{job_name}'
PATH_PARAMETERS = ['pipeline_name', 'pipeline_counter', 'stage_name', 'stage_counter', 'job_name']
def __init__(
self,
session,
pipeline_name=None,
pipeline_counter=None,
stage_name=None,
stage_counter=None,
job_name=None
):
"""
Constructs instance of ``PropertyManager``.
Parameters to the constructor and methods of the class could be duplicated. That is because of two use cases
of this class:
1. When the class being instantiated from :class:`yagocd.client.Client`, we don't know all the necessary
parameters yet, but we need an instance to work with. So we skip parameters instantiation in constructor,
but require them for each method.
2. When the class being used from :class:`yagocd.resources.job.JobInstance` - in this case we already
know all required parameters, so we can instantiate `PropertyManager` with them.
:param session: session object from client.
:type session: yagocd.session.Session.
:param pipeline_name: name of the pipeline.
:param pipeline_counter: pipeline counter.
:param stage_name: name of the stage.
:param stage_counter: stage counter.
:param job_name: name of the job.
"""
super(PropertyManager, self).__init__(session)
self.base_api = self._session.base_api(api_path='')
self._pipeline_name = pipeline_name
self._pipeline_counter = pipeline_counter
self._stage_name = stage_name
self._stage_counter = stage_counter
self._job_name = job_name
def __len__(self):
return len(self.list())
def __iter__(self):
"""
Method for iterating over all properties.
:return: dictionary of properties.
:rtype: dict[str, str]
"""
return iter(self.list())
def __getitem__(self, name):
"""
Method for accessing to specific property in array-like manner by name.
Method for downloading artifact or directory zip by given path.
:param name: name of property to get.
:return: single property as a dictionary.
"""
return self.get(name=name)
def __contains__(self, key):
return key in self.list()
def keys(self):
return self.list().keys()
def values(self):
return self.list().values()
def items(self):
return self.list().items()
def list(
self,
pipeline_name=None,
pipeline_counter=None,
stage_name=None,
stage_counter=None,
job_name=None
):
"""
Lists all job properties.
:versionadded: 14.3.0.
:param pipeline_name: name of the pipeline.
:param pipeline_counter: pipeline counter.
:param stage_name: name of the stage.
:param stage_counter: stage counter.
:param job_name: name of the job.
:return: dictionary of properties.
:rtype: dict[str, str]
"""
func_args = locals()
parameters = {p: self._require_param(p, func_args) for p in self.PATH_PARAMETERS}
response = self._session.get(
path=self.RESOURCE_PATH.format(base_api=self.base_api, **parameters),
headers={'Accept': 'application/json'},
)
text = StringIO(response.text)
parsed = list(csv.reader(text))
properties = dict(zip(parsed[0], parsed[1]))
return properties
def get(
self,
name,
pipeline_name=None,
pipeline_counter=None,
stage_name=None,
stage_counter=None,
job_name=None
):
"""
Gets a property value by it's name.
:info: You can use keyword `latest` as a pipeline counter or a stage counter.
:versionadded: 14.3.0.
:param name: name of property to get.
:param pipeline_name: name of the pipeline.
:param pipeline_counter: pipeline counter.
:param stage_name: name of the stage.
:param stage_counter: stage counter.
:param job_name: name of the job.
:return: value of requested property.
"""
func_args = locals()
parameters = {p: self._require_param(p, func_args) for p in self.PATH_PARAMETERS}
response = self._session.get(
path=self._session.urljoin(self.RESOURCE_PATH, name).format(base_api=self.base_api, **parameters),
headers={'Accept': 'application/json'},
)
text = StringIO(response.text)
parsed = list(csv.reader(text))
try:
return parsed[1][0]
except IndexError:
return None
def historical(self, pipeline_name=None, stage_name=None, job_name=None, limit_pipeline=None, limit_count=None):
"""
Get historical properties.
:info: `limitPipeline` and `limitCount` are optional parameters. The default value of
`limitPipeline` is latest pipeline instance’s counter. The default value of `limitCount` is `100`.
:versionadded: 14.3.0.
:param pipeline_name: name of the pipeline.
:param stage_name: name of the stage.
:param job_name: name of the job.
:param limit_pipeline: pipeline limit for returned properties.
:param limit_count: count limit for returned properties.
:return: list of dictionaries as historical values.
"""
func_args = locals()
parameters = {
'pipelineName': self._require_param('pipeline_name', func_args),
'stageName': self._require_param('stage_name', func_args),
'jobName': self._require_param('job_name', func_args),
}
if limit_pipeline is not None:
parameters['limitPipeline'] = limit_pipeline
if limit_count is not None:
parameters['limitCount'] = limit_count
response = self._session.get(
path='{base_api}/properties/search'.format(base_api=self.base_api),
params=parameters,
headers={'Accept': 'application/json'},
)
text = StringIO(response.text)
result = list(csv.DictReader(text))
return result
def create(
self,
name,
value,
pipeline_name=None,
pipeline_counter=None,
stage_name=None,
stage_counter=None,
job_name=None
):
"""
Defines a property on a specific job instance.
:versionadded: 14.3.0.
:param name: name of property.
:param value: value of property.
:param pipeline_name: name of the pipeline.
:param pipeline_counter: pipeline counter.
:param stage_name: name of the stage.
:param stage_counter: stage counter.
:param job_name: name of the job.
:return: an acknowledgement that the property was created.
"""
func_args = locals()
parameters = {p: self._require_param(p, func_args) for p in self.PATH_PARAMETERS}
response = self._session.post(
path=self._session.urljoin(self.RESOURCE_PATH, name).format(base_api=self.base_api, **parameters),
data={'value': value},
headers={
'Accept': 'application/json',
'Confirm': 'true'
},
)
return response.text
| [
"yagocd.util.since",
"csv.DictReader",
"csv.reader",
"six.StringIO"
] | [((1446, 1461), 'yagocd.util.since', 'since', (['"""14.3.0"""'], {}), "('14.3.0')\n", (1451, 1461), False, 'from yagocd.util import RequireParamMixin, since\n'), ((5275, 5298), 'six.StringIO', 'StringIO', (['response.text'], {}), '(response.text)\n', (5283, 5298), False, 'from six import StringIO\n'), ((6464, 6487), 'six.StringIO', 'StringIO', (['response.text'], {}), '(response.text)\n', (6472, 6487), False, 'from six import StringIO\n'), ((8061, 8084), 'six.StringIO', 'StringIO', (['response.text'], {}), '(response.text)\n', (8069, 8084), False, 'from six import StringIO\n'), ((5321, 5337), 'csv.reader', 'csv.reader', (['text'], {}), '(text)\n', (5331, 5337), False, 'import csv\n'), ((6510, 6526), 'csv.reader', 'csv.reader', (['text'], {}), '(text)\n', (6520, 6526), False, 'import csv\n'), ((8107, 8127), 'csv.DictReader', 'csv.DictReader', (['text'], {}), '(text)\n', (8121, 8127), False, 'import csv\n')] |
import discord
import asyncio
import json
import asyncpg
from discord.ext import commands
from discord.ext import tasks
class Tasks(commands.Cog):
def __init__(self, client):
self.client = client
print(f'{__name__} 로드 완료!')
self.change_status.add_exception_type(asyncpg.PostgresConnectionError)
self.change_status.start()
def cog_unload(self):
self.change_status.cancel()
@tasks.loop()
async def change_status(self):
with open('botsetup.json', 'r') as f:
data = json.load(f)
prefix = data['default prefix']
await self.client.change_presence(status=discord.Status.online, activity=discord.Game(f'"{prefix}도움" 이라고 말해보세요!'))
await asyncio.sleep(5)
await self.client.change_presence(status=discord.Status.online, activity=discord.Game(f'{len(self.client.guilds)}개 서버에서 작동'))
await asyncio.sleep(5)
await self.client.change_presence(status=discord.Status.online, activity=discord.Game(f'유저 {len(list(self.client.get_all_members()))}명과 함께 '))
await asyncio.sleep(5)
@change_status.before_loop
async def before_change_status(self):
await self.client.wait_until_ready()
def setup(client):
client.add_cog(Tasks(client))
| [
"json.load",
"discord.Game",
"discord.ext.tasks.loop",
"asyncio.sleep"
] | [((449, 461), 'discord.ext.tasks.loop', 'tasks.loop', ([], {}), '()\n', (459, 461), False, 'from discord.ext import tasks\n'), ((565, 577), 'json.load', 'json.load', (['f'], {}), '(f)\n', (574, 577), False, 'import json\n'), ((762, 778), 'asyncio.sleep', 'asyncio.sleep', (['(5)'], {}), '(5)\n', (775, 778), False, 'import asyncio\n'), ((929, 945), 'asyncio.sleep', 'asyncio.sleep', (['(5)'], {}), '(5)\n', (942, 945), False, 'import asyncio\n'), ((1113, 1129), 'asyncio.sleep', 'asyncio.sleep', (['(5)'], {}), '(5)\n', (1126, 1129), False, 'import asyncio\n'), ((705, 745), 'discord.Game', 'discord.Game', (['f""""{prefix}도움" 이라고 말해보세요!"""'], {}), '(f\'"{prefix}도움" 이라고 말해보세요!\')\n', (717, 745), False, 'import discord\n')] |
import numpy as np
import math
from scipy.optimize import minimize
class Optimize():
def __init__(self):
self.c_rad2deg = 180.0 / np.pi
self.c_deg2rad = np.pi / 180.0
def isRotationMatrix(self, R) :
Rt = np.transpose(R)
shouldBeIdentity = np.dot(Rt, R)
I = np.identity(3, dtype = R.dtype)
n = np.linalg.norm(I - shouldBeIdentity)
# print('n: ' + str(n))
return n < 1e-6
def Rot_Matrix_2_Euler_Angles(self, R):
assert(self.isRotationMatrix(R))
pitch = -math.asin(R[1, 2])
roll = -math.atan2(R[1, 0], R[1, 1])
yaw = -math.atan2(R[0, 2], R[2, 2])
return np.array([roll, pitch, yaw])
def Get_Init_Guess(self, l_vec, b_vec, f_vec):
f_vec = np.cross(b_vec, l_vec)
l_vec = np.cross(f_vec, b_vec)
l_norm = np.linalg.norm(l_vec)
l_vec /= l_norm
b_norm = np.linalg.norm(b_vec)
b_vec /= b_norm
f_norm = np.linalg.norm(f_vec)
f_vec /= f_norm
l_vec = l_vec.reshape(3, 1)
b_vec = b_vec.reshape(3, 1)
f_vec = f_vec.reshape(3, 1)
l = np.array([1, 0, 0]).reshape(1, 3)
b = np.array([0, 1, 0]).reshape(1, 3)
f = np.array([0, 0, 1]).reshape(1, 3)
R = l_vec @ l + b_vec @ b + f_vec @ f
assert (R.shape == (3, 3))
roll, pitch, yaw = self.Rot_Matrix_2_Euler_Angles(R)
return np.array([roll, pitch, yaw])
def Euler_Angles_2_Vectors(self, rx, ry, rz):
'''
rx: pitch
ry: yaw
rz: roll
'''
ry *= -1
rz *= -1
R_x = np.array([[1.0, 0.0, 0.0],
[0.0, np.cos(rx), -np.sin(rx)],
[0.0, np.sin(rx), np.cos(rx)]])
R_y = np.array([[np.cos(ry), 0.0, np.sin(ry)],
[0.0, 1.0, 0.0],
[-np.sin(ry), 0.0, np.cos(ry)]])
R_z = np.array([[np.cos(rz), -np.sin(rz), 0.0],
[np.sin(rz), np.cos(rz), 0.0],
[0.0, 0.0, 1.0]])
R = R_y @ R_x @ R_z
l_vec = R @ np.array([1, 0, 0])
b_vec = R @ np.array([0, 1, 0])
f_vec = R @ np.array([0, 0, 1])
return np.array([l_vec, b_vec, f_vec])
def Objective(self, x, l_vec, b_vec, f_vec):
rx = x[0]
ry = x[1]
rz = x[2]
l_hat, b_hat, f_hat = self.Euler_Angles_2_Vectors(rx, ry, rz)
l_vec_dot = np.clip(l_hat[0] * l_vec[0] + l_hat[1] * l_vec[1] + l_hat[2] * l_vec[2], -1, 1)
b_vec_dot = np.clip(b_hat[0] * b_vec[0] + b_hat[1] * b_vec[1] + b_hat[2] * b_vec[2], -1, 1)
f_vec_dot = np.clip(f_hat[0] * f_vec[0] + f_hat[1] * f_vec[1] + f_hat[2] * f_vec[2], -1, 1)
return math.acos(l_vec_dot) ** 2 + math.acos(b_vec_dot) ** 2 + math.acos(f_vec_dot) ** 2
def Get_Ortho_Vectors(self, l_vec, b_vec, f_vec):
x0 = self.Get_Init_Guess(l_vec, b_vec, f_vec)
sol = minimize(self.Objective, x0, args=(l_vec, b_vec, f_vec), method='nelder-mead', options={'xatol': 1e-7, 'disp': False})
pitch_rad, yaw_rad, roll_rad = sol.x
v1, v2, v3 = self.Euler_Angles_2_Vectors(pitch_rad, yaw_rad, roll_rad)
return np.array([v1, v2, v3]) | [
"numpy.identity",
"numpy.clip",
"numpy.cross",
"math.acos",
"scipy.optimize.minimize",
"math.asin",
"numpy.array",
"numpy.dot",
"math.atan2",
"numpy.cos",
"numpy.linalg.norm",
"numpy.sin",
"numpy.transpose"
] | [((246, 261), 'numpy.transpose', 'np.transpose', (['R'], {}), '(R)\n', (258, 261), True, 'import numpy as np\n'), ((289, 302), 'numpy.dot', 'np.dot', (['Rt', 'R'], {}), '(Rt, R)\n', (295, 302), True, 'import numpy as np\n'), ((315, 344), 'numpy.identity', 'np.identity', (['(3)'], {'dtype': 'R.dtype'}), '(3, dtype=R.dtype)\n', (326, 344), True, 'import numpy as np\n'), ((359, 395), 'numpy.linalg.norm', 'np.linalg.norm', (['(I - shouldBeIdentity)'], {}), '(I - shouldBeIdentity)\n', (373, 395), True, 'import numpy as np\n'), ((680, 708), 'numpy.array', 'np.array', (['[roll, pitch, yaw]'], {}), '([roll, pitch, yaw])\n', (688, 708), True, 'import numpy as np\n'), ((778, 800), 'numpy.cross', 'np.cross', (['b_vec', 'l_vec'], {}), '(b_vec, l_vec)\n', (786, 800), True, 'import numpy as np\n'), ((817, 839), 'numpy.cross', 'np.cross', (['f_vec', 'b_vec'], {}), '(f_vec, b_vec)\n', (825, 839), True, 'import numpy as np\n'), ((866, 887), 'numpy.linalg.norm', 'np.linalg.norm', (['l_vec'], {}), '(l_vec)\n', (880, 887), True, 'import numpy as np\n'), ((929, 950), 'numpy.linalg.norm', 'np.linalg.norm', (['b_vec'], {}), '(b_vec)\n', (943, 950), True, 'import numpy as np\n'), ((992, 1013), 'numpy.linalg.norm', 'np.linalg.norm', (['f_vec'], {}), '(f_vec)\n', (1006, 1013), True, 'import numpy as np\n'), ((1487, 1515), 'numpy.array', 'np.array', (['[roll, pitch, yaw]'], {}), '([roll, pitch, yaw])\n', (1495, 1515), True, 'import numpy as np\n'), ((2325, 2356), 'numpy.array', 'np.array', (['[l_vec, b_vec, f_vec]'], {}), '([l_vec, b_vec, f_vec])\n', (2333, 2356), True, 'import numpy as np\n'), ((2554, 2633), 'numpy.clip', 'np.clip', (['(l_hat[0] * l_vec[0] + l_hat[1] * l_vec[1] + l_hat[2] * l_vec[2])', '(-1)', '(1)'], {}), '(l_hat[0] * l_vec[0] + l_hat[1] * l_vec[1] + l_hat[2] * l_vec[2], -1, 1)\n', (2561, 2633), True, 'import numpy as np\n'), ((2654, 2733), 'numpy.clip', 'np.clip', (['(b_hat[0] * b_vec[0] + b_hat[1] * b_vec[1] + b_hat[2] * b_vec[2])', '(-1)', '(1)'], {}), '(b_hat[0] * b_vec[0] + b_hat[1] * b_vec[1] + b_hat[2] * b_vec[2], -1, 1)\n', (2661, 2733), True, 'import numpy as np\n'), ((2755, 2834), 'numpy.clip', 'np.clip', (['(f_hat[0] * f_vec[0] + f_hat[1] * f_vec[1] + f_hat[2] * f_vec[2])', '(-1)', '(1)'], {}), '(f_hat[0] * f_vec[0] + f_hat[1] * f_vec[1] + f_hat[2] * f_vec[2], -1, 1)\n', (2762, 2834), True, 'import numpy as np\n'), ((3074, 3198), 'scipy.optimize.minimize', 'minimize', (['self.Objective', 'x0'], {'args': '(l_vec, b_vec, f_vec)', 'method': '"""nelder-mead"""', 'options': "{'xatol': 1e-07, 'disp': False}"}), "(self.Objective, x0, args=(l_vec, b_vec, f_vec), method=\n 'nelder-mead', options={'xatol': 1e-07, 'disp': False})\n", (3082, 3198), False, 'from scipy.optimize import minimize\n'), ((3335, 3357), 'numpy.array', 'np.array', (['[v1, v2, v3]'], {}), '([v1, v2, v3])\n', (3343, 3357), True, 'import numpy as np\n'), ((556, 574), 'math.asin', 'math.asin', (['R[1, 2]'], {}), '(R[1, 2])\n', (565, 574), False, 'import math\n'), ((591, 619), 'math.atan2', 'math.atan2', (['R[1, 0]', 'R[1, 1]'], {}), '(R[1, 0], R[1, 1])\n', (601, 619), False, 'import math\n'), ((635, 663), 'math.atan2', 'math.atan2', (['R[0, 2]', 'R[2, 2]'], {}), '(R[0, 2], R[2, 2])\n', (645, 663), False, 'import math\n'), ((2210, 2229), 'numpy.array', 'np.array', (['[1, 0, 0]'], {}), '([1, 0, 0])\n', (2218, 2229), True, 'import numpy as np\n'), ((2250, 2269), 'numpy.array', 'np.array', (['[0, 1, 0]'], {}), '([0, 1, 0])\n', (2258, 2269), True, 'import numpy as np\n'), ((2290, 2309), 'numpy.array', 'np.array', (['[0, 0, 1]'], {}), '([0, 0, 1])\n', (2298, 2309), True, 'import numpy as np\n'), ((1168, 1187), 'numpy.array', 'np.array', (['[1, 0, 0]'], {}), '([1, 0, 0])\n', (1176, 1187), True, 'import numpy as np\n'), ((1214, 1233), 'numpy.array', 'np.array', (['[0, 1, 0]'], {}), '([0, 1, 0])\n', (1222, 1233), True, 'import numpy as np\n'), ((1260, 1279), 'numpy.array', 'np.array', (['[0, 0, 1]'], {}), '([0, 0, 1])\n', (1268, 1279), True, 'import numpy as np\n'), ((2915, 2935), 'math.acos', 'math.acos', (['f_vec_dot'], {}), '(f_vec_dot)\n', (2924, 2935), False, 'import math\n'), ((1747, 1757), 'numpy.cos', 'np.cos', (['rx'], {}), '(rx)\n', (1753, 1757), True, 'import numpy as np\n'), ((1803, 1813), 'numpy.sin', 'np.sin', (['rx'], {}), '(rx)\n', (1809, 1813), True, 'import numpy as np\n'), ((1815, 1825), 'numpy.cos', 'np.cos', (['rx'], {}), '(rx)\n', (1821, 1825), True, 'import numpy as np\n'), ((1855, 1865), 'numpy.cos', 'np.cos', (['ry'], {}), '(ry)\n', (1861, 1865), True, 'import numpy as np\n'), ((1872, 1882), 'numpy.sin', 'np.sin', (['ry'], {}), '(ry)\n', (1878, 1882), True, 'import numpy as np\n'), ((1969, 1979), 'numpy.cos', 'np.cos', (['ry'], {}), '(ry)\n', (1975, 1979), True, 'import numpy as np\n'), ((2009, 2019), 'numpy.cos', 'np.cos', (['rz'], {}), '(rz)\n', (2015, 2019), True, 'import numpy as np\n'), ((2065, 2075), 'numpy.sin', 'np.sin', (['rz'], {}), '(rz)\n', (2071, 2075), True, 'import numpy as np\n'), ((2077, 2087), 'numpy.cos', 'np.cos', (['rz'], {}), '(rz)\n', (2083, 2087), True, 'import numpy as np\n'), ((2859, 2879), 'math.acos', 'math.acos', (['l_vec_dot'], {}), '(l_vec_dot)\n', (2868, 2879), False, 'import math\n'), ((2887, 2907), 'math.acos', 'math.acos', (['b_vec_dot'], {}), '(b_vec_dot)\n', (2896, 2907), False, 'import math\n'), ((1760, 1770), 'numpy.sin', 'np.sin', (['rx'], {}), '(rx)\n', (1766, 1770), True, 'import numpy as np\n'), ((1952, 1962), 'numpy.sin', 'np.sin', (['ry'], {}), '(ry)\n', (1958, 1962), True, 'import numpy as np\n'), ((2022, 2032), 'numpy.sin', 'np.sin', (['rz'], {}), '(rz)\n', (2028, 2032), True, 'import numpy as np\n')] |
"""
Freyr - A Free stock API
"""
import random
import requests.utils
header = [
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.14; rv:83.0) Gecko/20100101 Firefox/83.0",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:82.0) Gecko/20100101 Firefox/82.0",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:83.0) Gecko/20100101 Firefox/83.0",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:84.0) Gecko/20100101 Firefox/84.0",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.16; rv:83.0) Gecko/20100101 Firefox/83.0",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.198 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.67 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/13.1.2 Safari/605.1.15",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.198 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.67 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.0.1 Safari/605.1.15",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.198 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.198 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.67 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_6) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.0 Safari/605.1.15",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_6) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.0.1 Safari/605.1.15",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_6) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.0.2 Safari/605.1.15",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.183 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.198 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.67 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.0 Safari/605.1.15",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.0.1 Safari/605.1.15",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 11_0_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.198 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 11_0_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.67 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 11_0_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 11_1_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.198 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; rv:11.0) like Gecko",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/76.0.3809.100 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/85.0.4183.121 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.111 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.183 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.183 Safari/537.36 OPR/72.0.3815.320",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.193 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.198 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.198 Safari/537.36 Edg/86.0.622.69",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.198 Safari/537.36 OPR/72.0.3815.400",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.75 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.101 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.66 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.66 Safari/537.36 Edg/87.0.664.41",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.67 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.67 Safari/537.36 Edg/87.0.664.47",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.67 Safari/537.36 Edg/87.0.664.52",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.67 Safari/537.36 Edg/87.0.664.55",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36 Edg/87.0.664.57",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36 Edg/87.0.664.60",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:78.0) Gecko/20100101 Firefox/78.0",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:82.0) Gecko/20100101 Firefox/82.0",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:83.0) Gecko/20100101 Firefox/83.0",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:84.0) Gecko/20100101 Firefox/84.0",
"Mozilla/5.0 (Windows NT 10.0; rv:78.0) Gecko/20100101 Firefox/78.0",
"Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.198 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.198 Safari/537.36 OPR/72.0.3815.400",
"Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.66 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:83.0) Gecko/20100101 Firefox/83.0",
"Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.3; Win64; x64; rv:83.0) Gecko/20100101 Firefox/83.0",
"Mozilla/5.0 (X11; Fedora; Linux x86_64; rv:83.0) Gecko/20100101 Firefox/83.0",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.92 Safari/537.36",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.111 Safari/537.36",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.198 Safari/537.36",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.66 Safari/537.36",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.67 Safari/537.36",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36",
"Mozilla/5.0 (X11; Linux x86_64; rv:78.0) Gecko/20100101 Firefox/78.0",
"Mozilla/5.0 (X11; Linux x86_64; rv:82.0) Gecko/20100101 Firefox/82.0",
"Mozilla/5.0 (X11; Linux x86_64; rv:83.0) Gecko/20100101 Firefox/83.0",
"Mozilla/5.0 (X11; Linux x86_64; rv:84.0) Gecko/20100101 Firefox/84.0",
"Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:82.0) Gecko/20100101 Firefox/82.0",
"Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:83.0) Gecko/20100101 Firefox/83.0",
"Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:84.0) Gecko/20100101 Firefox/84.0",
]
def default_user_agent():
"""
Pick and set a random header user agent
:return:
"""
requests.utils.default_user_agent = lambda: random.choice(header)
return requests.utils.default_user_agent
| [
"random.choice"
] | [((9066, 9087), 'random.choice', 'random.choice', (['header'], {}), '(header)\n', (9079, 9087), False, 'import random\n')] |
from mixcoatl.admin.billing_code import BillingCode
from mixcoatl.geography.region import Region
from mixcoatl.admin.group import Group
from mixcoatl.admin.user import User
def get_servers(servers, **kwargs):
""" Returns a list of servers
Arguments:
:param servers: a list of servers that needs to be filtered.
Keyword arguments:
:param account_user_id: owning user's account user ID.
:param vm_login_id: owning user's VM login ID.
:param email: owning user's email address.
:param group_id: owning group's group ID.
:param budget_id: budget ID.
:returns: a list of filtered servers.
:rtype: list
"""
filtered_servers = servers
if 'account_user_id' in kwargs and kwargs['account_user_id'] is not None:
filtered_servers = [server for server in servers if hasattr(server, 'owning_user') and
'account_user_id' in server.owning_user and
server.owning_user['account_user_id'] == kwargs['account_user_id']]
if 'vm_login_id' in kwargs and kwargs['vm_login_id'] is not None:
if filtered_servers is not None:
servers = filtered_servers
filtered_servers = [server for server in servers if hasattr(server, 'owning_user') and
'vm_login_id' in server.owning_user and
server.owning_user['vm_login_id'] == kwargs['vm_login_id']]
if 'email' in kwargs and kwargs['email'] is not None:
if filtered_servers is not None:
servers = filtered_servers
filtered_servers = [server for server in servers if hasattr(server, 'owning_user') and
server.owning_user.has_key('email') and
server.owning_user['email'] == kwargs['email']]
if 'group_id' in kwargs and kwargs['group_id'] is not None:
if filtered_servers is not None:
servers = filtered_servers
filtered_servers = [server for server in servers if hasattr(server, 'owning_groups')
for group in server.owning_groups if group['group_id'] == int(kwargs['group_id'])]
if 'budget_id' in kwargs and kwargs['budget_id'] is not None:
if filtered_servers is not None:
servers = filtered_servers
filtered_servers = [server for server in servers if hasattr(server, 'budget') and
server.budget == int(kwargs['budget_id'])]
return filtered_servers
def get_snapshots(snapshots, **kwargs):
""" Returns a list of snapshots
Arguments:
:param snapshots: a list of snapshots that needs to be filtered.
Keyword arguments:
:param group_id: owning group's group ID.
:param budget_id: budget ID.
:returns: a list of filtered snapshots.
:rtype: list
"""
filtered_snapshots = snapshots
if 'group_id' in kwargs and kwargs['group_id'] is not None:
filtered_snapshots = [snapshot for snapshot in snapshots if hasattr(snapshot, 'owning_groups')
for g in snapshot.owning_groups if g['group_id'] == int(kwargs['group_id'])]
if 'budget_id' in kwargs and kwargs['budget_id'] is not None:
if filtered_snapshots is not None: snapshots = filtered_snapshots
filtered_snapshots = [snapshot for snapshot in snapshots if hasattr(snapshot, 'budget') and
snapshot.budget == int(kwargs['budget_id'])]
return filtered_snapshots
def get_volumes(volumes, **kwargs):
""" Returns a list of volumes
Arguments:
:param volumes: a list of volumes that needs to be filtered.
Keyword arguments:
:param vm_login_id: owning user's VM login ID.
:param email: owning user's email address.
:param group_id: owning group's group ID.
:param budget_id: budget ID.
:param size: minimum size of the volume.
:returns: a list of filtered volumes.
:rtype: list
"""
filtered_volumes = volumes
if 'vm_login_id' in kwargs and kwargs['vm_login_id'] is not None:
filtered_volumes = [volume for volume in volumes if hasattr(volume, 'owning_user') and
'vm_login_id' in volume.owning_user and
volume.owning_user['vm_login_id'] == kwargs['vm_login_id']]
if 'email' in kwargs and kwargs['email'] is not None:
if filtered_volumes is not None:
volumes = filtered_volumes
filtered_volumes = [volume for volume in volumes if hasattr(volume, 'owning_user') and
'email' in volume.owning_user and
volume.owning_user['email'] == kwargs['email']]
if 'group_id' in kwargs and kwargs['group_id'] is not None:
if filtered_volumes is not None:
volumes = filtered_volumes
filtered_volumes = [volume for volume in volumes if hasattr(volume, 'owning_groups')
for group in volume.owning_groups if group['group_id'] == int(kwargs['group_id'])]
if 'budget_id' in kwargs and kwargs['budget_id'] is not None:
if filtered_volumes is not None:
volumes = filtered_volumes
filtered_volumes = [volume for volume in volumes if hasattr(volume, 'budget') and
volume.budget == int(kwargs['budget_id'])]
if 'size' in kwargs and kwargs['size'] is not None:
if filtered_volumes is not None:
volumes = filtered_volumes
filtered_volumes = [volume for volume in volumes if volume.size_in_gb >= int(kwargs['size'])]
return filtered_volumes
def get_user(users, **kwargs):
""" Returns a user that matches with arguments.
Arguments:
:param users: a list of users that needs to be filtered.
Keyword arguments:
:param vm_login_id: owning user's VM login ID.
:param email: owning user's email address.
:returns: a list of filtered users.
:rtype: list
"""
selected_user = users
if 'vm_login_id' in kwargs and kwargs['vm_login_id'] is not None:
for user in users:
if hasattr(user, 'vm_login_id') and user.vm_login_id == kwargs['vm_login_id']:
selected_user = user
elif 'email' in kwargs and kwargs['email'] is not None:
for user in users:
if hasattr(user, 'email') and user.email == kwargs['email']:
selected_user = user
return selected_user
def get_account_user_id(**kwargs):
""" Returns account_user_id from arguments
Keyword arguments:
:param vm_login_id: user's VM login ID like p100
:param email: user's E-Mail address
:returns: account_user_id
:rtype: int
"""
if 'vm_login_id' in kwargs:
users = User.all()
selected_user = get_user(users, vm_login_id=kwargs['vm_login_id'])
elif 'email' in kwargs:
users = User.all()
selected_user = get_user(users, email=kwargs['email'])
return selected_user.account_user_id
def get_vm_login_id(**kwargs):
""" Returns vm_login_id from arguments
Keyword arguments:
:param email: user's E-Mail address
:returns: vm_login_id
:rtype: str
"""
if 'email' in kwargs:
users = User.all()
selected_user = get_user(users, email=kwargs['email'])
return selected_user.vm_login_id
def get_budget_id(budget_name):
""" Returns budget_id from arguments
Arguments:
:param budget_name: budget name
:returns: budget_id
:rtype: int
"""
budgets = BillingCode.all(detail='basic')
for budget in budgets:
if hasattr(budget, 'name') and budget.name == budget_name:
selected_budget = budget
return selected_budget.billing_code_id
def get_group_id(group_name):
""" Returns a group ID from group name
Arguments:
:param group_name: name of the group
:returns: group_id
:rtype: int
"""
groups = Group.all(detail='basic')
for group in groups:
if hasattr(group, 'name') and group.name == group_name:
selected_group = group
return selected_group.group_id
def get_region_id(region_pid):
""" Returns a region ID from provider_id such as us-east-1.
Arguments:
:param region_pid: provider ID of the region such as us-east-1
:returns: region_id such as 19343
:rtype: int
"""
regions = Region.all(detail='basic')
for region in regions:
if hasattr(region, 'provider_id') and region.provider_id == region_pid:
selected_region = region
return selected_region.region_id
| [
"mixcoatl.admin.billing_code.BillingCode.all",
"mixcoatl.geography.region.Region.all",
"mixcoatl.admin.group.Group.all",
"mixcoatl.admin.user.User.all"
] | [((7507, 7538), 'mixcoatl.admin.billing_code.BillingCode.all', 'BillingCode.all', ([], {'detail': '"""basic"""'}), "(detail='basic')\n", (7522, 7538), False, 'from mixcoatl.admin.billing_code import BillingCode\n'), ((7914, 7939), 'mixcoatl.admin.group.Group.all', 'Group.all', ([], {'detail': '"""basic"""'}), "(detail='basic')\n", (7923, 7939), False, 'from mixcoatl.admin.group import Group\n'), ((8364, 8390), 'mixcoatl.geography.region.Region.all', 'Region.all', ([], {'detail': '"""basic"""'}), "(detail='basic')\n", (8374, 8390), False, 'from mixcoatl.geography.region import Region\n'), ((6728, 6738), 'mixcoatl.admin.user.User.all', 'User.all', ([], {}), '()\n', (6736, 6738), False, 'from mixcoatl.admin.user import User\n'), ((7206, 7216), 'mixcoatl.admin.user.User.all', 'User.all', ([], {}), '()\n', (7214, 7216), False, 'from mixcoatl.admin.user import User\n'), ((6858, 6868), 'mixcoatl.admin.user.User.all', 'User.all', ([], {}), '()\n', (6866, 6868), False, 'from mixcoatl.admin.user import User\n')] |
from app import db
# Junction Tables for many-to-many relationships
campaign_users = db.Table('campaign_users',
db.Column('campaign', db.Integer, db.ForeignKey('campaigns.id'), primary_key=True),
db.Column('user', db.Integer, db.ForeignKey('users.username'), primary_key=True),
)
campaign_vehicles = db.Table('campaign_vehicles',
db.Column('campaign', db.Integer, db.ForeignKey('campaigns.id'), primary_key=True),
db.Column('vehicle', db.Integer, db.ForeignKey('vehicles.id'), primary_key=True),
)
class Campaign(db.Model):
''' A Campaign is a context in which to tally a carbon footprint total.
For example, a personal total, or an activity shared across multiple people
'''
__tablename__ = 'campaigns'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String, unique=True)
offsets_available = db.Column(db.Float, nullable=False, default=0.0)
users = db.relationship('User', secondary=campaign_users, lazy='subquery', backref=db.backref('campaigns', lazy=True))
vehicles = db.relationship('Vehicle', secondary=campaign_vehicles, lazy='subquery', backref=db.backref('campaigns', lazy=True))
# Other resources
# consumptions = TODO m2m relationship to consumptions
# offsets = db.relationship('Offset', backref='campaign', lazy=True)
def __repr__(self):
return '<Campaign {}>'.format(self.name)
class User(db.Model):
''' A person with an account
'''
__tablename__ = 'users'
username = db.Column(db.String, primary_key=True)
# campaigns attribute is backreferenced
def __repr__(self):
return '<User {}>'.format(self.username)
class Vehicle(db.Model):
''' A Vehicle is a type of Resource
Resources have an increasing counter (i.e. odometer, gas meter) that can be snapshotted over time to measure usage
Usage can be converted into CO2 emitted according with some linear factors
'''
__tablename__ = 'vehicles'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String, nullable=False)
units = db.Column(db.String, nullable=False, default='km') # Should be km for all vehicles for now. This column exists for extensibility to other resources.
notes = db.Column(db.String, nullable=True)
fuel_l_per_100km = db.Column(db.Float, nullable=False, default=10.6) # Default average car 22 mpg
carbon_to_manufacture = db.Column(db.Float, nullable=False, default=10000) # Default wild estimate based on quick search
expected_life_km = db.Column(db.Float, nullable=False, default=321868) # Default based on guess of 200k miles
def get_carbon_per_unit(self):
''' Calculate the CO2 emission per mile of driving
as the sum of contributions of burning gas and deprecating a vehicle
that emitted lots of carbon during manufacure
Units of kg CO2 per km
'''
GAL_PER_L = 0.2641729
CARBON_PER_LITRE = 8.9 * GAL_PER_L # 8.9 kg CO2 per gallon of gas: https://www.epa.gov/greenvehicles/greenhouse-gas-emissions-typical-passenger-vehicle-0
gas_contribution = ( self.fuel_l_per_100km / 100.0 ) * CARBON_PER_LITRE
deprecation_contribution = self.carbon_to_manufacture / self.expected_life_km
return gas_contribution + deprecation_contribution
def __repr__(self):
return '<Vehicle {}>'.format(self.id)
class ResourceMeasurement(db.Model):
''' This table stores the timeseries of all measurements for all resources (i.e. car odometer readings over time)
'''
__tablename__ = 'resource_measurements'
id = db.Column(db.Integer, primary_key=True)
date = db.Column(db.DateTime, nullable=False)
value = db.Column(db.Float, nullable=False)
resource = db.Column(db.Integer, db.ForeignKey('vehicles.id'), nullable=False)
## TODO FUTURE - Add additional models
# The following models are stubbed out, but likely don't work yet
# They're needed to complete a larger carbon footprint picture for a Campaign
# Add new consumptions to track each contribution to your carbon footprint (as discrete events)
# They can be of different types, (in an enum table) mostly just for future potential categorization features and stats across users
# Add Offsets each time you buy an offset and apply it to a campaign
# class Consumption(db.Model):
# ''' A Consumption is a thing that has a carbon footprint. I.e. a flight, or a cheeseburger, or a bonfire '''
# __tablename__ = 'consumptions'
# id = db.Column(db.Integer, primary_key=True)
# name = db.Column(db.String, nullable=False)
# date = db.Column(db.DateTime, nullable=False)
# category = # TODO foreign key
# quantity = db.Column(db.Float, nullable=False)
# units = db.Column(db.String, nullable=False)
# carbon_per_unit = db.Column(db.Float, nullable=False)
# # Footprint off the consumption can be derived from the product of quantity and carbon_per_unity
# def __repr__(self):
# return '<Consumption {}, {}>'.format(self.name, self.id)
# class ConsumptionCategories(db.Model):
# ''' Enumeration of categories that Consumptions can fall into. One category to many Consumptions.'''
# id = db.Column(db.Integer, primary_key=True)
# name = db.Column(db.String, nullable=False)
# def __repr__(self):
# return '<Consumption Category {}, {}>'.format(self.name, self.id)
# class Offset(db.Model):
# __tablename__ = 'offsets'
# id = db.Column(db.Integer, primary_key=True)
# name = db.Column(db.String, nullable=False)
# date = db.Column(db.DateTime, nullable=False)
# price_usd = db.Column(db.Float, nullable=True)
# carbon_offset_quantity = db.Column(db.Float, nullable=False)
# reference = db.Column(db.String)
# # Foreign key to Campaigns. Many offsets to one campaign.
# campaign = db.Column(db.Integer, db.ForeignKey('campaigns.id'), nullable=False)
# def __repr__(self):
# return '<Offset #{}: {} ({}) kg CO2>'.format(self.id, self.name, self.carbon_offset_quantity)
# ======================================================
# TODO FUTURE
# Potentially I might want to abstract the Vehicle Resource into a Resource base class that Vehicles and Utilities can extend
# # Abstract Class
# class Resource(db.Model):
# id = db.Column(db.Integer, primary_key=True)
# carbon_per_unit = db.Column(db.Float, nullable=False)
# units = db.Column(db.String, nullable=False)
# notes = db.Column(db.String, nullable=True)
# def __repr__(self):
# return '<Resource {}>'.format(self.id)
# class Vehicle(Resource):
# __tablename__ = 'vehicles'
# fuel_l_per_100km = db.Column(db.Float, nullable=False)
# carbon_to_manufacture = db.Column(db.Float, nullable=False)
# expected_life_km = db.Column(db.Float, nullable=False)
# units = 'miles' # Somehow set this.
# def __repr__(self):
# return '<Vehicle {}>'.format(self.id)
# class Utility(Resource):
# __tablename__ = 'electric'
# username = db.Column(db.String, primary_key=True)
# def __repr__(self):
# return '<User {}>'.format(self.id) | [
"app.db.Column",
"app.db.ForeignKey",
"app.db.backref"
] | [((752, 791), 'app.db.Column', 'db.Column', (['db.Integer'], {'primary_key': '(True)'}), '(db.Integer, primary_key=True)\n', (761, 791), False, 'from app import db\n'), ((803, 836), 'app.db.Column', 'db.Column', (['db.String'], {'unique': '(True)'}), '(db.String, unique=True)\n', (812, 836), False, 'from app import db\n'), ((861, 909), 'app.db.Column', 'db.Column', (['db.Float'], {'nullable': '(False)', 'default': '(0.0)'}), '(db.Float, nullable=False, default=0.0)\n', (870, 909), False, 'from app import db\n'), ((1502, 1540), 'app.db.Column', 'db.Column', (['db.String'], {'primary_key': '(True)'}), '(db.String, primary_key=True)\n', (1511, 1540), False, 'from app import db\n'), ((1985, 2024), 'app.db.Column', 'db.Column', (['db.Integer'], {'primary_key': '(True)'}), '(db.Integer, primary_key=True)\n', (1994, 2024), False, 'from app import db\n'), ((2036, 2072), 'app.db.Column', 'db.Column', (['db.String'], {'nullable': '(False)'}), '(db.String, nullable=False)\n', (2045, 2072), False, 'from app import db\n'), ((2085, 2135), 'app.db.Column', 'db.Column', (['db.String'], {'nullable': '(False)', 'default': '"""km"""'}), "(db.String, nullable=False, default='km')\n", (2094, 2135), False, 'from app import db\n'), ((2246, 2281), 'app.db.Column', 'db.Column', (['db.String'], {'nullable': '(True)'}), '(db.String, nullable=True)\n', (2255, 2281), False, 'from app import db\n'), ((2305, 2354), 'app.db.Column', 'db.Column', (['db.Float'], {'nullable': '(False)', 'default': '(10.6)'}), '(db.Float, nullable=False, default=10.6)\n', (2314, 2354), False, 'from app import db\n'), ((2413, 2463), 'app.db.Column', 'db.Column', (['db.Float'], {'nullable': '(False)', 'default': '(10000)'}), '(db.Float, nullable=False, default=10000)\n', (2422, 2463), False, 'from app import db\n'), ((2534, 2585), 'app.db.Column', 'db.Column', (['db.Float'], {'nullable': '(False)', 'default': '(321868)'}), '(db.Float, nullable=False, default=321868)\n', (2543, 2585), False, 'from app import db\n'), ((3617, 3656), 'app.db.Column', 'db.Column', (['db.Integer'], {'primary_key': '(True)'}), '(db.Integer, primary_key=True)\n', (3626, 3656), False, 'from app import db\n'), ((3668, 3706), 'app.db.Column', 'db.Column', (['db.DateTime'], {'nullable': '(False)'}), '(db.DateTime, nullable=False)\n', (3677, 3706), False, 'from app import db\n'), ((3719, 3754), 'app.db.Column', 'db.Column', (['db.Float'], {'nullable': '(False)'}), '(db.Float, nullable=False)\n', (3728, 3754), False, 'from app import db\n'), ((151, 180), 'app.db.ForeignKey', 'db.ForeignKey', (['"""campaigns.id"""'], {}), "('campaigns.id')\n", (164, 180), False, 'from app import db\n'), ((235, 266), 'app.db.ForeignKey', 'db.ForeignKey', (['"""users.username"""'], {}), "('users.username')\n", (248, 266), False, 'from app import db\n'), ((378, 407), 'app.db.ForeignKey', 'db.ForeignKey', (['"""campaigns.id"""'], {}), "('campaigns.id')\n", (391, 407), False, 'from app import db\n'), ((465, 493), 'app.db.ForeignKey', 'db.ForeignKey', (['"""vehicles.id"""'], {}), "('vehicles.id')\n", (478, 493), False, 'from app import db\n'), ((3792, 3820), 'app.db.ForeignKey', 'db.ForeignKey', (['"""vehicles.id"""'], {}), "('vehicles.id')\n", (3805, 3820), False, 'from app import db\n'), ((998, 1032), 'app.db.backref', 'db.backref', (['"""campaigns"""'], {'lazy': '(True)'}), "('campaigns', lazy=True)\n", (1008, 1032), False, 'from app import db\n'), ((1130, 1164), 'app.db.backref', 'db.backref', (['"""campaigns"""'], {'lazy': '(True)'}), "('campaigns', lazy=True)\n", (1140, 1164), False, 'from app import db\n')] |
from typing import List
from django.shortcuts import render
from django.views.generic.detail import DetailView
from django.views.generic.list import ListView
from assignment.models import Assignment
from course.models import Course
class CourseListView(ListView):
template_name = 'course/course_list.html'
model = Course
context_object_name = 'course'
class CourseDetailView(DetailView):
template_name = 'course/course_detail.html'
model = Course
context_object_name = 'course'
def get(self, request, *args, **kwargs):
self.pk = kwargs["pk"]
return super().get(request, *args, **kwargs)
def get_context_data(self, **kwargs):
kwargs["assignment"] = Assignment.objects.filter(course__id=self.pk)
return super().get_context_data(**kwargs)
| [
"assignment.models.Assignment.objects.filter"
] | [((709, 754), 'assignment.models.Assignment.objects.filter', 'Assignment.objects.filter', ([], {'course__id': 'self.pk'}), '(course__id=self.pk)\n', (734, 754), False, 'from assignment.models import Assignment\n')] |
# Copyright (c) 2010, <NAME>, <NAME>, and <NAME>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials
# provided with the distribution.
#
# * Neither the name of Sun Microsystems nor the names of its
# contributors may be used to endorse or promote products
# derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL SUN
# MICROSYSTEMS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
# USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
# OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
import sys, os.path
rootpath = os.path.split(os.path.dirname(os.path.abspath(__file__)))[0]
sys.path.append(rootpath)
import unittest
import replicant
class TestPosition(unittest.TestCase):
"Test case for binlog positions class."
def _checkPos(self, p, s):
"""Check that a position is valid, have the expected
representation, and can be converted from string
representation to class and back."""
from replicant import Position
self.assertEqual(repr(p), s)
self.assertEqual(p, eval(repr(p)))
self.assertEqual(s, repr(eval(s)))
def testSimple(self):
from replicant import Position
positions = [Position('master-bin.00001', 4711),
Position('master-bin.00001', 9393),
Position('master-bin.00002', 102)]
strings = ["Position('master-bin.00001', 4711)",
"Position('master-bin.00001', 9393)",
"Position('master-bin.00002', 102)"]
for i in range(0,len(positions)-1):
self._checkPos(positions[i], strings[i])
# Check that comparison works as expected.
for i in range(0, len(positions)-1):
for j in range(0, len(positions)-1):
if i < j:
self.assertTrue(positions[i] < positions[j])
elif i == j:
self.assertEqual(positions[i], positions[j])
else:
self.assertTrue(positions[i] > positions[j])
def suite():
return unittest.makeSuite(TestPosition)
if __name__ == '__main__':
unittest.main(defaultTest='suite')
| [
"unittest.main",
"unittest.makeSuite",
"sys.path.append",
"replicant.Position"
] | [((1676, 1701), 'sys.path.append', 'sys.path.append', (['rootpath'], {}), '(rootpath)\n', (1691, 1701), False, 'import sys, os.path\n'), ((3133, 3165), 'unittest.makeSuite', 'unittest.makeSuite', (['TestPosition'], {}), '(TestPosition)\n', (3151, 3165), False, 'import unittest\n'), ((3198, 3232), 'unittest.main', 'unittest.main', ([], {'defaultTest': '"""suite"""'}), "(defaultTest='suite')\n", (3211, 3232), False, 'import unittest\n'), ((2272, 2306), 'replicant.Position', 'Position', (['"""master-bin.00001"""', '(4711)'], {}), "('master-bin.00001', 4711)\n", (2280, 2306), False, 'from replicant import Position\n'), ((2329, 2363), 'replicant.Position', 'Position', (['"""master-bin.00001"""', '(9393)'], {}), "('master-bin.00001', 9393)\n", (2337, 2363), False, 'from replicant import Position\n'), ((2386, 2419), 'replicant.Position', 'Position', (['"""master-bin.00002"""', '(102)'], {}), "('master-bin.00002', 102)\n", (2394, 2419), False, 'from replicant import Position\n')] |
#!/usr/bin/env python
import sys, time
from backend import daemon
import itchat
import time
from ipcqueue import posixmq
import logging
import datetime as dt
import threading
import time
logFileDir = "/opt/crontab/IpcToItchat/"
nowDateTime = dt.datetime.now().strftime('%Y%m%d%H%M%S')
pyFilename = sys.argv[0].split('/')[-1].split('.')[0]
logFileName = '{1}_{0}.log'.format(nowDateTime , pyFilename)
logging.basicConfig(level=logging.DEBUG, format='[%(levelname)-8s] [%(asctime)s]: %(message)s',\
datefmt='%Y-%m-%d %H:%M:%S', filename=logFileDir + logFileName, filemode='w')
class MyDaemon(daemon):
def run(self):
logging.info('run begin...')
q = posixmq.Queue('/ipcmsg')
itchat.load_login_status(fileDir='/opt/crontab/IpcToItchat/itchat.pkl');
while True:
rcvMsg = q.get()
logging.debug('Get msg: {}'.format(rcvMsg))
itchat.send(rcvMsg[1], 'filehelper')
if int(rcvMsg[0]) == 1: # beatheart
itchat.send(rcvMsg[1], 'filehelper')
if int(rcvMsg[0]) == 2: # spider
for room in itchat.get_chatrooms():
nickName = room['NickName']
if room['NickName'] == "liuyi":
author = itchat.search_chatrooms(userName=room['UserName'])
author.send(rcvMsg[1])
logging.debug('Send msg: {}'.format(rcvMsg[1]))
logging.info('run exit')
if __name__ == "__main__":
logging.info('Game start...')
itchat.auto_login(enableCmdQR=2, hotReload=True)
daemon = MyDaemon('/tmp/daemon-example.pid')
if len(sys.argv) == 2:
if 'start' == sys.argv[1]:
daemon.start()
elif 'stop' == sys.argv[1]:
daemon.stop()
elif 'restart' == sys.argv[1]:
daemon.restart()
else:
print("Unknown command")
sys.exit(2)
sys.exit(0)
else:
print("usage: %s start|stop|restart" % sys.argv[0])
sys.exit(2)
logging.info('Game over.')
| [
"logging.basicConfig",
"itchat.send",
"itchat.get_chatrooms",
"itchat.auto_login",
"backend.daemon.restart",
"itchat.load_login_status",
"itchat.search_chatrooms",
"datetime.datetime.now",
"backend.daemon.start",
"sys.exit",
"backend.daemon.stop",
"logging.info",
"ipcqueue.posixmq.Queue"
] | [((402, 585), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.DEBUG', 'format': '"""[%(levelname)-8s] [%(asctime)s]: %(message)s"""', 'datefmt': '"""%Y-%m-%d %H:%M:%S"""', 'filename': '(logFileDir + logFileName)', 'filemode': '"""w"""'}), "(level=logging.DEBUG, format=\n '[%(levelname)-8s] [%(asctime)s]: %(message)s', datefmt=\n '%Y-%m-%d %H:%M:%S', filename=logFileDir + logFileName, filemode='w')\n", (421, 585), False, 'import logging\n'), ((1510, 1539), 'logging.info', 'logging.info', (['"""Game start..."""'], {}), "('Game start...')\n", (1522, 1539), False, 'import logging\n'), ((1544, 1592), 'itchat.auto_login', 'itchat.auto_login', ([], {'enableCmdQR': '(2)', 'hotReload': '(True)'}), '(enableCmdQR=2, hotReload=True)\n', (1561, 1592), False, 'import itchat\n'), ((2050, 2076), 'logging.info', 'logging.info', (['"""Game over."""'], {}), "('Game over.')\n", (2062, 2076), False, 'import logging\n'), ((244, 261), 'datetime.datetime.now', 'dt.datetime.now', ([], {}), '()\n', (259, 261), True, 'import datetime as dt\n'), ((633, 661), 'logging.info', 'logging.info', (['"""run begin..."""'], {}), "('run begin...')\n", (645, 661), False, 'import logging\n'), ((674, 698), 'ipcqueue.posixmq.Queue', 'posixmq.Queue', (['"""/ipcmsg"""'], {}), "('/ipcmsg')\n", (687, 698), False, 'from ipcqueue import posixmq\n'), ((707, 778), 'itchat.load_login_status', 'itchat.load_login_status', ([], {'fileDir': '"""/opt/crontab/IpcToItchat/itchat.pkl"""'}), "(fileDir='/opt/crontab/IpcToItchat/itchat.pkl')\n", (731, 778), False, 'import itchat\n'), ((1444, 1468), 'logging.info', 'logging.info', (['"""run exit"""'], {}), "('run exit')\n", (1456, 1468), False, 'import logging\n'), ((1944, 1955), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (1952, 1955), False, 'import sys, time\n'), ((2034, 2045), 'sys.exit', 'sys.exit', (['(2)'], {}), '(2)\n', (2042, 2045), False, 'import sys, time\n'), ((897, 933), 'itchat.send', 'itchat.send', (['rcvMsg[1]', '"""filehelper"""'], {}), "(rcvMsg[1], 'filehelper')\n", (908, 933), False, 'import itchat\n'), ((1716, 1730), 'backend.daemon.start', 'daemon.start', ([], {}), '()\n', (1728, 1730), False, 'from backend import daemon\n'), ((998, 1034), 'itchat.send', 'itchat.send', (['rcvMsg[1]', '"""filehelper"""'], {}), "(rcvMsg[1], 'filehelper')\n", (1009, 1034), False, 'import itchat\n'), ((1109, 1131), 'itchat.get_chatrooms', 'itchat.get_chatrooms', ([], {}), '()\n', (1129, 1131), False, 'import itchat\n'), ((1779, 1792), 'backend.daemon.stop', 'daemon.stop', ([], {}), '()\n', (1790, 1792), False, 'from backend import daemon\n'), ((1844, 1860), 'backend.daemon.restart', 'daemon.restart', ([], {}), '()\n', (1858, 1860), False, 'from backend import daemon\n'), ((1924, 1935), 'sys.exit', 'sys.exit', (['(2)'], {}), '(2)\n', (1932, 1935), False, 'import sys, time\n'), ((1266, 1316), 'itchat.search_chatrooms', 'itchat.search_chatrooms', ([], {'userName': "room['UserName']"}), "(userName=room['UserName'])\n", (1289, 1316), False, 'import itchat\n')] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""mlbgame functions for the people API endpoints.
This module's functions gets the JSON payloads for the mlb.com games API
endpoints.
.. _Google Python Style Guide:
http://google.github.io/styleguide/pyguide.html
"""
from mlbgame.data import request
def get_person(person_id, params=None):
"""This endpoint allows you to pull the information for a player.
Args:
person_id (int): Unique Player Identifier
params (dict): Contains the person_ids, season, group, and fields
parameters described below.
params:
person_id (required)
Description: Unique Player Identifier
Parameter Type: path
Data Type: integer
person_ids
Description: Comma delimited list of person ID.
Format: 1234, 2345
Parameter Type: query
Data Type: array[integer]
season
Description: Season of play
Parameter Type: query
Data Type: string
group *may not yet do anything
Description: Category of statistics to return. 0: hitting, 1: pitching,
2: fielding, 3: running
Format: 0, 1, 2, 3
Parameter Type: query
Data Type: array[string]
fields
Description: Comma delimited list of specific fields to be returned.
Format: topLevelNode, childNode, attribute
Parameter Type: query
Data Type: array[string]
Returns:
json
"""
return request(7, primary_key=person_id, params=params)
def get_current_game_stats(person_id, params=None):
"""This endpoint allows you to pull the current game status for a given
player.
Args:
person_id (int): Unique Player Identifier
params (dict): Contains the person_ids, season, group, and fields
parameters described below.
params:
person_id (required)
Description: Unique Player Identifier
Parameter Type: path
Data Type: integer
group *may not yet do anything
Description: Category of statistics to return. 0: hitting, 1: pitching,
2: fielding, 3: running
Format: 0, 1, 2, 3
Parameter Type: query
Data Type: array[string]
timecode
Description: Use this parameter to return a snapshot of the data at the
specified time.
Format: YYYYMMDD_HHMMSS
fields
Description: Comma delimited list of specific fields to be returned.
Format: topLevelNode, childNode, attribute
Parameter Type: query
Data Type: array[string]
Returns:
json
"""
return request(7, 'stats/game/current', primary_key=person_id,
params=params)
def get_game_stats(person_id, game_pk, params=None):
"""This endpoint allows you to pull the game stats for a given player and
game.
Args:
person_id (int): Unique Player Identifier
game_pk (int): Unique Primary Key representing a game.
params (dict): Contains the group, and fields parameters described
below.
params:
person_id (required)
Description: Unique Player Identifier
Parameter Type: path
Data Type: integer
game_pk (required)
Description: Unique Primary Key representing a game.
Parameter Type: path
Data Type: integer
group *may not yet do anything
Description: Category of statistics to return. 0: hitting, 1: pitching,
2: fielding, 3: running
Format: 0, 1, 2, 3
Parameter Type: query
Data Type: array[string]
fields
Description: Comma delimited list of specific fields to be returned.
Format: topLevelNode, childNode, attribute
Parameter Type: query
Data Type: array[string]
Returns:
json
"""
return request(7, 'stats/game', primary_key=person_id,
secondary_key=game_pk, params=params)
| [
"mlbgame.data.request"
] | [((1494, 1542), 'mlbgame.data.request', 'request', (['(7)'], {'primary_key': 'person_id', 'params': 'params'}), '(7, primary_key=person_id, params=params)\n', (1501, 1542), False, 'from mlbgame.data import request\n'), ((2648, 2718), 'mlbgame.data.request', 'request', (['(7)', '"""stats/game/current"""'], {'primary_key': 'person_id', 'params': 'params'}), "(7, 'stats/game/current', primary_key=person_id, params=params)\n", (2655, 2718), False, 'from mlbgame.data import request\n'), ((3874, 3963), 'mlbgame.data.request', 'request', (['(7)', '"""stats/game"""'], {'primary_key': 'person_id', 'secondary_key': 'game_pk', 'params': 'params'}), "(7, 'stats/game', primary_key=person_id, secondary_key=game_pk,\n params=params)\n", (3881, 3963), False, 'from mlbgame.data import request\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
import csv
import getopt
import io
import itertools
import logging
import numbers
import os
import re
import sqlite3
import string
import sys
import textwrap
import time
try:
import readline
except ImportError:
pass
try:
import wcwidth
WCWIDTH_SUPPORT = True
except ImportError:
WCWIDTH_SUPPORT = False
PYTHON_3 = sys.version_info >= (3, )
EXIT_GENERAL_FAILURE = 1
EXIT_DATABASE_ERROR = 2
__all__ = ["PYTHON_3", "EXIT_GENERAL_FAILURE", "EXIT_DATABASE_ERROR",
"SQLite3CSVImporter", "pretty_print_table", "query_split",
"metaquery_conversion", "sqlite3_repl", "WCWIDTH_SUPPORT"]
__license__ = "BSD 2-Clause"
class SQLite3CSVImporter:
sniffer = csv.Sniffer()
typemap = [
("INTEGER", int),
("REAL", float),
("TEXT", (lambda v: v.encode("utf-8")) if PYTHON_3 else unicode),
("BLOB", (lambda v: v.encode("utf-8", errors="surrogateescape"))
if PYTHON_3 else str),
]
def __init__(self, dbc, ignore_errors=True, log_warnings=True):
"""
Setup SQLite3CSVImporter. When `ignore_errors` is set, any SQL errors
encountered while inserting rows into the database will be ignored and,
if `log_warnings` is set, a warning containing information about the
failed INSERT will be logged.
"""
self.dbc = dbc
self.ignore_errors = ignore_errors
self.log_warnings = log_warnings
@classmethod
def detect_types(cls, table):
"""
Return list of SQL type definition clauses that can safely be applied
to each of the columns in the `table`.
"""
typedefs = list()
for column in zip(*table):
rows_with_content = [value for value in column if value]
if not rows_with_content:
scanned_columns = ("", )
else:
scanned_columns = rows_with_content
for typedef, caster in cls.typemap:
try:
for value in scanned_columns:
caster(value)
colschema = typedef
break
except Exception:
pass
else:
raise ValueError("Could not detect type of %r" % (column,))
typedefs.append(typedef)
return typedefs
@staticmethod
def quote_identifier(identifier):
"""
Return ANSI-quoted SQL identifier.
"""
return '"' + identifier.replace('"', '""') + '"'
def create_table(self, tablename, types, columns=None, if_not_exists=True):
"""
Create a table named `tablename` with a column named after each element
in `columns` with corresponding type defintions in the `types` list. If
`columns` is not specified, the column names will be generated
automatically. When `if_not_exists` is set, the "IF NOT EXISTS" infix
will be added to the "CREATE TABLE" query.
"""
if not types:
raise ValueError("Must specify types.")
if not columns:
for char in tablename:
if char.isalpha():
char = char.lower()
break
else:
char = "n"
columns = (char + str(n) for n in itertools.count(1))
else:
# Restrict column identifiers to "word" characters.
_columns = list()
for column in columns:
word_column = re.sub("\W+", "_", column, re.M | re.U).strip("_")
column = word_column
base = 1
while column in _columns:
base += 1
column = word_column + "_" + str(base)
_columns.append(column)
columns = _columns
columns = (self.quote_identifier(column) for column in columns)
table = self.quote_identifier(tablename)
body = ",\n ".join(("%s %s" % (c, t) for c, t in zip(columns, types)))
infix = "IF NOT EXISTS " if if_not_exists else ""
cursor = self.dbc.cursor()
cursor.execute("CREATE TABLE %s%s (\n %s\n)" % (infix, table, body))
def loadfile(self, filename, tablename, create_table=True):
"""
Load a CSV file into the specified database table. When `create_table`
is set, this method will auto-detect the CSV schema and create the
`tablename` if it does not already exist. Please note that this method
**will not** work on un-seekable files in Python 3.
"""
def csv_open(path):
"""
Open `path` in a manner best suited for use with csv module.
"""
if PYTHON_3:
# https://docs.python.org/3/library/csv.html#csv.reader
return open(path, newline="", errors="surrogateescape")
else:
return open(path, mode="rbU")
with csv_open(filename) as iostream:
# Use first 20 lines to determine CSV dialect.
sample_lines = "".join(itertools.islice(iostream, 20))
dialect = self.sniffer.sniff(sample_lines)
# In Python 2, this method supports reading data from unseekable
# files by buffering the sampled data into a BytesIO object. I
# could not figure out how to get BytesIO in Python 3 to play
# nicely with the csv module, so I gave up supporting unseekable
# files in Python 3.
if PYTHON_3:
sample_reader_io = iostream
else:
sample_reader_io = io.BytesIO(sample_lines)
# Read the first 20 CSV records.
sample_reader_io.seek(0)
sample_reader = csv.reader(sample_reader_io, dialect)
sample_rows = list(itertools.islice(sample_reader, 20))
# Figure out the table schema using the sniffed records.
sample_reader_io.seek(0)
types_with_row_one = self.detect_types(sample_rows)
types_sans_row_one = self.detect_types(sample_rows[1:])
has_header = types_sans_row_one != types_with_row_one
types = types_sans_row_one or types_with_row_one
if has_header:
try:
next(sample_reader)
except StopIteration:
pass
first_line_number = 2
columns = sample_rows[0]
else:
first_line_number = 1
columns = None
with self.dbc:
cursor = self.dbc.cursor()
if create_table:
self.create_table(tablename, columns=columns, types=types)
stream_reader = csv.reader(iostream, dialect)
rowgen = itertools.chain(sample_reader, stream_reader)
table = self.quote_identifier(tablename)
binds = ", ".join("?" * len(sample_rows[0]))
query = "INSERT INTO %s VALUES (%s)" % (tablename, binds)
try:
original_text_factory = self.dbc.text_factory
if not PYTHON_3:
self.dbc.text_factory = str
for lineno, row in enumerate(rowgen, first_line_number):
parameters = [val if val else None for val in row]
logging.debug("Inserting row: %r", parameters)
try:
cursor.execute(query, parameters)
except Exception as e:
if not self.ignore_errors or self.log_warnings:
if not e.args:
e.args = ("", )
suffix = " (%s, row %d) " % (filename, lineno)
e.args = e.args[:-1] + (e.args[-1] + suffix,)
if not self.ignore_errors:
self.dbc.text_factory = original_text_factory
raise
elif self.log_warnings:
logging.warning("%s", e)
finally:
self.dbc.text_factory = original_text_factory
def pretty_print_table(table, breakafter=[0], dest=None, tabsize=8):
"""
Pretty-print data from a table in a style similar to MySQL CLI. The
`breakafter` option is used to determine where row-breaks should be
inserted. When set to `False`, no breaks will be inserted after any
rows. When set to `True`, a break is set after everywhere. The
`breakafter` option can also be an iterable containing row numbers
after which a break should be inserted. Assuming the first entry in
`table` is the tabular data's header, the function can be executed as
follows to insert a break just after the header:
>>> table = [
... ["Name", "Age", "Favorite Color"],
... ["Bob", 10, "Blue"],
... ["Rob", 25, "Red"],
... ["Penny", 70, "Purple"]]
>>> pretty_print_table(table)
+-------+-----+----------------+
| Name | Age | Favorite Color |
+-------+-----+----------------+
| Bob | 10 | Blue |
| Rob | 25 | Red |
| Penny | 70 | Purple |
+-------+-----+----------------+
By default, the table is printed to stdout, but this can be changed by
providing a file-like object as the `dest` parameter.
The `tabsize` parameter controls how many spaces tabs are expanded to.
"""
# The textwidth function returns the number of printed columns the given
# text will span in a monospaced terminal. When the wcwidth module is not
# available, this falls back to the len builtin which will be inaccurate
# for many non-Latin characters.
if not WCWIDTH_SUPPORT:
textwidth = len
elif PYTHON_3:
def textwidth(text):
length = wcwidth.wcswidth(text)
return len(text) if length == -1 else length
else:
def textwidth(text):
if isinstance(text, unicode):
length = wcwidth.wcswidth(text)
return len(text) if length == -1 else length
else:
text = text.decode("utf-8", "replace")
length = wcwidth.wcswidth(text)
return len(text) if length == -1 else length
table = list(table)
last = len(table) - 1
colwidths = list()
table_lines = list()
left_aligned = [True] * len(table[0]) if table else []
for rowindex, row in enumerate(table):
# Split each cell into lines
cells = list()
for colindex, column in enumerate(row):
if column is None:
column = "NULL"
else:
if isinstance(column, numbers.Number):
left_aligned[colindex] = False
if PYTHON_3 or not isinstance(column, unicode):
column = str(column)
column = column.expandtabs(tabsize)
cells.append(column.split("\n"))
# Check if row-break should be inserted after row
separate = ((breakafter is True) or
(rowindex == last) or
(breakafter and rowindex in breakafter))
# Find tallest cell in the row
row_height = max(map(len, cells))
# Update the column widths if any of the cells are wider than the
# widest, previously encountered cell in each column.
initialize = not table_lines
for index, contents in enumerate(cells):
width = max(map(textwidth, contents))
if initialize:
colwidths.append(width)
else:
colwidths[index] = max(width, colwidths[index])
if initialize:
table_lines.append([None])
# Pad line count of each cell in the row to match the row_height
cells[index] += [""] * (row_height - len(contents))
# Add lines to line table and insert a break if needed
table_lines[index].extend(cells[index] + [None] * separate)
# Transpose the table and print each row. Rows containing `None` indicate a
# row break should be inserted.
for row in zip(*table_lines):
printcols = list()
if row[0] is None:
print("+-", end="", file=dest)
for index, column in enumerate(row):
printcols.append("-" * colwidths[index])
print(*printcols, sep="-+-", end="-+\n", file=dest)
else:
print("| ", end="", file=dest)
for index, column in enumerate(row):
if not PYTHON_3 and isinstance(column, unicode):
column = column.encode("utf-8", "replace")
padding = " " * (colwidths[index] - textwidth(column))
if left_aligned[index]:
printcols.append(column + padding)
else:
printcols.append(padding + column)
print(*printcols, sep=" | ", end=" |\n", file=dest)
def query_split(text):
"""
Yield individual SQLite3 queries found in the given `text`. The last
yielded query may be incomplete. Use `sqlite3.complete_statement` to verify
whether or not it is a fragment.
"""
segments = re.split("(;)", text)
length = len(segments)
j = 0
for k in range(length + 1):
query = ''.join(segments[j:k]).strip()
if query and sqlite3.complete_statement(query):
yield query
j = k
if j != length:
tail = ''.join(segments[j:])
if tail.strip():
yield tail
def metaquery_conversion(original_query, original_params=tuple()):
"""
Convert queries matching various, normally unsupported grammars to queries
SQLite3 understands. The currently supported grammars are as follows:
- {DESC | DESCRIBE} table_name
- SHOW CREATE TABLE table_name
- SHOW TABLES
"""
flags = re.IGNORECASE | re.MULTILINE
original_query = re.sub("[;\s]+$", "", original_query, flags)
match = re.match("DESC(?:RIBE)?\s+(\S+)$", original_query, flags)
if match:
query = "PRAGMA table_info(" + match.group(1) + ")"
return query, original_params
match = re.match("SHOW\s+CREATE\s+TABLE\s+(\S+)$", original_query, flags)
if match:
table = match.group(1)
if table[0] in "`\"":
table = table[1:-1]
query = (
"SELECT sql || ';' AS `SHOW CREATE TABLE` "
"FROM sqlite_master WHERE tbl_name = ? "
"COLLATE NOCASE"
)
if table == "?":
params = original_params
else:
params = (table, )
return query, params
match = re.match("SHOW\s+TABLES$", original_query, flags)
if match:
query = (
"SELECT tbl_name AS `Tables` "
"FROM sqlite_master "
"WHERE type = 'table'"
)
return query, original_params
return original_query, original_params
def sqlite3_repl(connection, input_function=None, dest=None):
"""
Interactive REPL loop for SQLite3 designed to emulate the MySQL CLI
REPL. Ctrl+C clears the current line buffer, and Ctrl+D exits the loop.
When an incomplete query spans multiple lines, the prompt will change
to provide a hint to the user about what token is missing to terminate
the query. This function accepts a SQLite3 connection instance.
"""
try:
clock = time.monotonic
except AttributeError:
clock = time.time
if not input_function:
input_function = input if PYTHON_3 else raw_input
linebuffer = ""
original_connection_isolation_level = connection.isolation_level
connection.isolation_level = None
cursor = connection.cursor()
while True:
prompt = "sqlite> "
if linebuffer.strip():
for query in query_split(linebuffer):
params = tuple()
if sqlite3.complete_statement(query):
try:
query, params = metaquery_conversion(query, params)
start = clock()
results = cursor.execute(query, params)
duration = clock() - start
if cursor.rowcount > -1:
n = cursor.rowcount
s = "" if n == 1 else "s"
prefix = "Query OK, %d row%s affected" % (n, s)
elif cursor.description:
results = list(results)
n = len(results)
s = "" if n == 1 else "s"
prefix = "%d row%s in set" % (n, s)
headers = [d[0] for d in cursor.description]
tbl = [headers] + results
pretty_print_table(tbl, dest=dest)
else:
prefix = "Query OK, but no data returned"
if duration >= 0:
text = "%s (%0.2f sec)" % (prefix, duration)
else:
text = "%s (execution time unknown)" % (prefix,)
except sqlite3.Error as exc:
text = "%s" % exc
print(text, end="\n\n", file=dest)
linebuffer = ""
elif query:
linebuffer = query
# Figure out what token is needed to complete the query and
# adjust the prompt accordingly.
terminators = (";", '"', "'", "`", '\\"', "\\'", "\\`")
for chars in terminators:
if sqlite3.complete_statement(query + chars + ";"):
prompt = " " + chars[-1] + "> "
break
else:
prompt = " -> "
try:
linebuffer += input_function(prompt) + "\n"
except EOFError:
# ^D to exit
print("\n", end="", file=dest)
connection.isolation_level = original_connection_isolation_level
return
except KeyboardInterrupt:
# ^C to reset the line buffer
linebuffer = ""
print("\n", end="", file=dest)
def cli(argv, dest=None):
"""
Command line interface for __file__
Usage: __file__ [OPTIONS...] [QUERIES...]
Any trailing, non-option arguments will be executed as SQLite3 queries
after the data has been imported.
Options:
--help, -h Show this documentation and exit.
-A FILE, ..., -Z FILE All capital, single-letter options are used to load
the specified file into the SQLite3 database. If no
"--table" option has been specified immediately
preceding the option, the letter name will be used
as the table name; loading a file with "-A" will
populate the table "A". Similarly, the table schema
will be auto-detected when no "--schema" option
immediately precedes this option.
--table=TABLE Name of table used to store the contents of the
next specified CSV file.
--invalid=METHOD Determines how rows of invalid data handled. The
METHOD can be "warn", "ignore", or "fail" which
will cause the script to emit a warning and skip
the record, silently skip the record or terminate
script execution respectively. When unspecified,
defaults to "warn."
--loglevel=LEVEL Set logging verbosity level. In order from the
highest verbosity to the lowest verbosity, can be
one of "DEBUG", "INFO", "WARNING", "ERROR",
"CRITICAL". The default value is "WARNING."
--pretty Pretty-print results of queries passed as command
line arguments instead of tab-separating the
results.
--database=FILE Path of the SQLite3 database the queries should be
executed on. When unspecified, the data is stored
volatile memory and becomes inaccessible after the
program stops running.
-i Enter interactive mode after importing data. When
the "--database" flag is not specified, this is
implied.
-v Increase logging verbosity. Can be used repeatedly
to further increase verbosity.
-q Decrease logging verbosity. Can be used repeatedly
to further decrease verbosity.
"""
if PYTHON_3:
letters = string.ascii_uppercase
else:
letters = string.uppercase
colopts = ":".join(letters) + ":hvqi"
longopts = ["table=", "invalid=", "help", "pretty", "database="]
options, arguments = getopt.gnu_getopt(argv[1:], colopts, longopts)
if not argv[1:] or ("--help", "") in options or ("-h", "") in options:
me = os.path.basename(argv[0] or __file__)
docstring = cli.__doc__.replace("__file__", me)
print(textwrap.dedent(docstring).strip(), file=dest)
sys.exit(0 if argv[1:] else 1)
loglevels = ("DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL")
loglevel = loglevels.index("WARNING")
database = None
prettify = False
interact = False
table = None
loadfile_args = list()
importer_kwargs = dict()
for option, value in options:
# Long options
if option.startswith("--"):
if option == "--invalid":
if value not in ("ignore", "warn", "fail"):
raise getopt.GetoptError("Invalid value for --invalid")
importer_kwargs["ignore_errors"] = value in ("ignore", "warn")
importer_kwargs["log_warnings"] = value == "warn"
elif option == "--table":
table = value
elif option == "--loglevel":
try:
loglevel = loglevels.index(value.upper())
except ValueError:
raise getopt.GetoptError("Invalid log level '%s'" % value)
elif option == "--pretty":
prettify = True
elif option == "--database":
database = value
# Logging verbosity modifiers and Interactivity
elif option in ("-v", "-q", "-i"):
if option == "-v":
loglevel -= loglevel > 0
elif option == "-q":
loglevel += loglevel < (len(loglevels) - 1)
elif option == "-i":
interact = True
# All of the short options that accept arguments are just used for
# table aliases
else:
loadfile_args.append((value, table or option[1]))
table = None
if not interact and database is None:
interact = True
loglevel = loglevels[loglevel]
logging.getLogger().setLevel(getattr(logging, loglevel))
logging.debug("Log level set to %s.", loglevel)
connection = sqlite3.connect(database or ":memory:")
importer = SQLite3CSVImporter(dbc=connection, **importer_kwargs)
for args in loadfile_args:
importer.loadfile(*args)
cursor = connection.cursor()
for query in arguments:
if len(arguments) > 1:
logging.info("Executing '%s'", query)
else:
logging.debug("Executing '%s'", query)
results = cursor.execute(query)
if prettify:
results = list(results)
if results:
headers = [d[0] for d in cursor.description]
pretty_print_table([headers] + results, dest=dest)
else:
def printable(var):
"""
Return print function-friendly variable.
"""
if not PYTHON_3 and isinstance(var, unicode):
return var.encode("utf-8", "replace")
else:
return var
for r in results:
columns = ("" if c is None else printable(c) for c in r)
print(*columns, sep="\t", file=dest)
if interact:
sqlite3_repl(connection, dest=dest)
def main():
logging.basicConfig(format="%(message)s")
try:
cli(sys.argv)
except getopt.GetoptError as exc:
logging.fatal("Could not parse command line options: %s", exc)
sys.exit(EXIT_GENERAL_FAILURE)
except sqlite3.DatabaseError as exc:
logging.fatal("Error updating database: %s", exc)
sys.exit(EXIT_DATABASE_ERROR)
except EnvironmentError as exc:
logging.fatal("%s", exc)
sys.exit(EXIT_GENERAL_FAILURE)
if __name__ == "__main__":
main()
| [
"logging.getLogger",
"itertools.chain",
"logging.debug",
"io.BytesIO",
"csv.Sniffer",
"sys.exit",
"getopt.gnu_getopt",
"wcwidth.wcswidth",
"logging.info",
"re.split",
"textwrap.dedent",
"csv.reader",
"re.match",
"sqlite3.complete_statement",
"logging.warning",
"logging.fatal",
"getop... | [((766, 779), 'csv.Sniffer', 'csv.Sniffer', ([], {}), '()\n', (777, 779), False, 'import csv\n'), ((13520, 13541), 're.split', 're.split', (['"""(;)"""', 'text'], {}), "('(;)', text)\n", (13528, 13541), False, 'import re\n'), ((14251, 14296), 're.sub', 're.sub', (['"""[;\\\\s]+$"""', '""""""', 'original_query', 'flags'], {}), "('[;\\\\s]+$', '', original_query, flags)\n", (14257, 14296), False, 'import re\n'), ((14309, 14368), 're.match', 're.match', (['"""DESC(?:RIBE)?\\\\s+(\\\\S+)$"""', 'original_query', 'flags'], {}), "('DESC(?:RIBE)?\\\\s+(\\\\S+)$', original_query, flags)\n", (14317, 14368), False, 'import re\n'), ((14492, 14561), 're.match', 're.match', (['"""SHOW\\\\s+CREATE\\\\s+TABLE\\\\s+(\\\\S+)$"""', 'original_query', 'flags'], {}), "('SHOW\\\\s+CREATE\\\\s+TABLE\\\\s+(\\\\S+)$', original_query, flags)\n", (14500, 14561), False, 'import re\n'), ((14982, 15032), 're.match', 're.match', (['"""SHOW\\\\s+TABLES$"""', 'original_query', 'flags'], {}), "('SHOW\\\\s+TABLES$', original_query, flags)\n", (14990, 15032), False, 'import re\n'), ((21669, 21715), 'getopt.gnu_getopt', 'getopt.gnu_getopt', (['argv[1:]', 'colopts', 'longopts'], {}), '(argv[1:], colopts, longopts)\n', (21686, 21715), False, 'import getopt\n'), ((23812, 23859), 'logging.debug', 'logging.debug', (['"""Log level set to %s."""', 'loglevel'], {}), "('Log level set to %s.', loglevel)\n", (23825, 23859), False, 'import logging\n'), ((23878, 23917), 'sqlite3.connect', 'sqlite3.connect', (["(database or ':memory:')"], {}), "(database or ':memory:')\n", (23893, 23917), False, 'import sqlite3\n'), ((25064, 25105), 'logging.basicConfig', 'logging.basicConfig', ([], {'format': '"""%(message)s"""'}), "(format='%(message)s')\n", (25083, 25105), False, 'import logging\n'), ((21805, 21842), 'os.path.basename', 'os.path.basename', (['(argv[0] or __file__)'], {}), '(argv[0] or __file__)\n', (21821, 21842), False, 'import os\n'), ((21968, 21998), 'sys.exit', 'sys.exit', (['(0 if argv[1:] else 1)'], {}), '(0 if argv[1:] else 1)\n', (21976, 21998), False, 'import sys\n'), ((5861, 5898), 'csv.reader', 'csv.reader', (['sample_reader_io', 'dialect'], {}), '(sample_reader_io, dialect)\n', (5871, 5898), False, 'import csv\n'), ((13679, 13712), 'sqlite3.complete_statement', 'sqlite3.complete_statement', (['query'], {}), '(query)\n', (13705, 13712), False, 'import sqlite3\n'), ((23751, 23770), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (23768, 23770), False, 'import logging\n'), ((24157, 24194), 'logging.info', 'logging.info', (['"""Executing \'%s\'"""', 'query'], {}), '("Executing \'%s\'", query)\n', (24169, 24194), False, 'import logging\n'), ((24221, 24259), 'logging.debug', 'logging.debug', (['"""Executing \'%s\'"""', 'query'], {}), '("Executing \'%s\'", query)\n', (24234, 24259), False, 'import logging\n'), ((25184, 25246), 'logging.fatal', 'logging.fatal', (['"""Could not parse command line options: %s"""', 'exc'], {}), "('Could not parse command line options: %s', exc)\n", (25197, 25246), False, 'import logging\n'), ((25255, 25285), 'sys.exit', 'sys.exit', (['EXIT_GENERAL_FAILURE'], {}), '(EXIT_GENERAL_FAILURE)\n', (25263, 25285), False, 'import sys\n'), ((25335, 25384), 'logging.fatal', 'logging.fatal', (['"""Error updating database: %s"""', 'exc'], {}), "('Error updating database: %s', exc)\n", (25348, 25384), False, 'import logging\n'), ((25393, 25422), 'sys.exit', 'sys.exit', (['EXIT_DATABASE_ERROR'], {}), '(EXIT_DATABASE_ERROR)\n', (25401, 25422), False, 'import sys\n'), ((25467, 25491), 'logging.fatal', 'logging.fatal', (['"""%s"""', 'exc'], {}), "('%s', exc)\n", (25480, 25491), False, 'import logging\n'), ((25500, 25530), 'sys.exit', 'sys.exit', (['EXIT_GENERAL_FAILURE'], {}), '(EXIT_GENERAL_FAILURE)\n', (25508, 25530), False, 'import sys\n'), ((5179, 5209), 'itertools.islice', 'itertools.islice', (['iostream', '(20)'], {}), '(iostream, 20)\n', (5195, 5209), False, 'import itertools\n'), ((5725, 5749), 'io.BytesIO', 'io.BytesIO', (['sample_lines'], {}), '(sample_lines)\n', (5735, 5749), False, 'import io\n'), ((5930, 5965), 'itertools.islice', 'itertools.islice', (['sample_reader', '(20)'], {}), '(sample_reader, 20)\n', (5946, 5965), False, 'import itertools\n'), ((6868, 6897), 'csv.reader', 'csv.reader', (['iostream', 'dialect'], {}), '(iostream, dialect)\n', (6878, 6897), False, 'import csv\n'), ((6923, 6968), 'itertools.chain', 'itertools.chain', (['sample_reader', 'stream_reader'], {}), '(sample_reader, stream_reader)\n', (6938, 6968), False, 'import itertools\n'), ((10093, 10115), 'wcwidth.wcswidth', 'wcwidth.wcswidth', (['text'], {}), '(text)\n', (10109, 10115), False, 'import wcwidth\n'), ((16230, 16263), 'sqlite3.complete_statement', 'sqlite3.complete_statement', (['query'], {}), '(query)\n', (16256, 16263), False, 'import sqlite3\n'), ((3407, 3425), 'itertools.count', 'itertools.count', (['(1)'], {}), '(1)\n', (3422, 3425), False, 'import itertools\n'), ((10280, 10302), 'wcwidth.wcswidth', 'wcwidth.wcswidth', (['text'], {}), '(text)\n', (10296, 10302), False, 'import wcwidth\n'), ((10462, 10484), 'wcwidth.wcswidth', 'wcwidth.wcswidth', (['text'], {}), '(text)\n', (10478, 10484), False, 'import wcwidth\n'), ((21913, 21939), 'textwrap.dedent', 'textwrap.dedent', (['docstring'], {}), '(docstring)\n', (21928, 21939), False, 'import textwrap\n'), ((22462, 22511), 'getopt.GetoptError', 'getopt.GetoptError', (['"""Invalid value for --invalid"""'], {}), "('Invalid value for --invalid')\n", (22480, 22511), False, 'import getopt\n'), ((3601, 3641), 're.sub', 're.sub', (['"""\\\\W+"""', '"""_"""', 'column', '(re.M | re.U)'], {}), "('\\\\W+', '_', column, re.M | re.U)\n", (3607, 3641), False, 'import re\n'), ((7515, 7561), 'logging.debug', 'logging.debug', (['"""Inserting row: %r"""', 'parameters'], {}), "('Inserting row: %r', parameters)\n", (7528, 7561), False, 'import logging\n'), ((18064, 18111), 'sqlite3.complete_statement', 'sqlite3.complete_statement', (["(query + chars + ';')"], {}), "(query + chars + ';')\n", (18090, 18111), False, 'import sqlite3\n'), ((22913, 22965), 'getopt.GetoptError', 'getopt.GetoptError', (['("Invalid log level \'%s\'" % value)'], {}), '("Invalid log level \'%s\'" % value)\n', (22931, 22965), False, 'import getopt\n'), ((8290, 8314), 'logging.warning', 'logging.warning', (['"""%s"""', 'e'], {}), "('%s', e)\n", (8305, 8314), False, 'import logging\n')] |
"""
Definition of an
:class:`~django_analyses.filters.output.output_definition.OutputDefinitionFilter`
for the :class:`~django_analyses.models.output.definitions.OutputDefinition`
model.
"""
from django_analyses.models.output.definitions.output_definition import \
OutputDefinition
from django_filters import rest_framework as filters
class OutputDefinitionFilter(filters.FilterSet):
"""
Provides useful filtering options for the
:class:`~django_analyses.models.output.definitions.output_definition.OutputDefinition`
model.
"""
output_specification = filters.AllValuesFilter("specification_set")
class Meta:
model = OutputDefinition
fields = "key", "output_specification"
| [
"django_filters.rest_framework.AllValuesFilter"
] | [((584, 628), 'django_filters.rest_framework.AllValuesFilter', 'filters.AllValuesFilter', (['"""specification_set"""'], {}), "('specification_set')\n", (607, 628), True, 'from django_filters import rest_framework as filters\n')] |
"""
Created on Mar 12, 2018
@author: SirIsaacNeutron
"""
import tkinter
import tkinter.messagebox
import hanoi
DEFAULT_FONT = ('Helvetica', 14)
class DiskDialog:
"""A dialog window meant to get the number of Disks per Tower for the
Tower of Hanoi puzzle.
"""
def __init__(self):
self._dialog_window = tkinter.Toplevel()
how_many_disks_label = tkinter.Label(master=self._dialog_window,
text='How many Disks per Tower do you want?',
font=DEFAULT_FONT)
how_many_disks_label.grid(row=0, column=0, columnspan=2,
padx=10, pady=10)
self.disk_entry = tkinter.Entry(master=self._dialog_window, width=20,
font=DEFAULT_FONT)
self.disk_entry.grid(row=1, column=0, columnspan=2,
padx=10, pady=1)
button_frame = tkinter.Frame(master=self._dialog_window)
button_frame.grid(row=2, column=0, padx=10, pady=10)
set_up_button = tkinter.Button(master=button_frame, text='Set Up Game',
font=DEFAULT_FONT,
command=self._on_set_up_button)
set_up_button.grid(row=0, column=0, padx=10, pady=10)
exit_button = tkinter.Button(master=button_frame, text='Exit Game',
font=DEFAULT_FONT,
command=self._on_exit_button)
exit_button.grid(row=0, column=1, padx=10, pady=10)
self.exited_intentionally = False # Did the user click the exit button?
# Shown when user input is invalid
self._error_message = 'You have to enter an integer greater than or equal to 0'
def show(self) -> None:
self._dialog_window.grab_set()
self._dialog_window.wait_window()
def _on_set_up_button(self) -> None:
self.num_disks_per_tower = self.disk_entry.get()
try:
self.num_disks_per_tower = int(self.num_disks_per_tower)
if self.num_disks_per_tower <= 0:
tkinter.messagebox.showerror('Error', self._error_message + '.')
# We have to return None in order to prevent the self._dialog_window
# from being destroyed.
return None
except ValueError: # Entry was a string or a decimal, not an integer
tkinter.messagebox.showerror('Error', self._error_message + ', not text or decimals.')
return None
self._dialog_window.destroy()
def _on_exit_button(self):
self._dialog_window.destroy()
self.exited_intentionally = True
class HanoiWindow:
_BACKGROUND_COLOR = '#FFF3E6' # Light beige
def __init__(self):
self._running = True
self._root_window = tkinter.Tk()
self._root_window.title('Tower of Hanoi')
self._set_up_buttons()
self._hanoi_canvas = tkinter.Canvas(master=self._root_window, width=500, height=400,
background=HanoiWindow._BACKGROUND_COLOR)
self._move_string = tkinter.StringVar()
self._move_string.set('No move selected.')
move_label = tkinter.Label(master=self._root_window, textvariable=self._move_string,
font=DEFAULT_FONT)
move_label.grid(row=2, column=0, padx=5, pady=5)
# Note: row here depends on the tower_button_frame's row
self._hanoi_canvas.grid(row=3, column=0, padx=10, pady=10)
self._draw_towers()
# Were the Disks already drawn? (Used to ensure Disk sizes are printed correctly)
self._disks_already_drawn = False
def _set_up_buttons(self) -> None:
"""Add buttons to the top of the window."""
button_frame = tkinter.Frame(master=self._root_window)
button_frame.grid(row=0, column=0, padx=10, pady=10)
help_button = tkinter.Button(master=button_frame, text='Help', font=DEFAULT_FONT,
command=self._on_help_button)
help_button.pack(side=tkinter.LEFT)
restart_button = tkinter.Button(master=button_frame, text='Restart', font=DEFAULT_FONT,
command=self._on_restart_button)
restart_button.pack(side=tkinter.LEFT)
tower_one_button = tkinter.Button(master=button_frame, text='Tower 1',
font=DEFAULT_FONT, command=self._on_tower_one)
tower_one_button.pack(side=tkinter.LEFT)
tower_two_button = tkinter.Button(master=button_frame, text='Tower 2',
font=DEFAULT_FONT, command=self._on_tower_two)
tower_two_button.pack(side=tkinter.LEFT)
tower_three_button = tkinter.Button(master=button_frame, text='Tower 3',
font=DEFAULT_FONT, command=self._on_tower_three)
tower_three_button.pack(side=tkinter.LEFT)
self._origin = ''
self._destination = ''
def _on_tower_one(self) -> None:
self._set_origin_and_or_destination('Tower 1')
def _set_origin_and_or_destination(self, tower_str: str) -> None:
"""Set self._origin and/or self._destination to be some tower_str."""
TOWER_DICT = {'Tower 1': self._game.tower_one, 'Tower 2': self._game.tower_two,
'Tower 3': self._game.tower_three}
if self._origin == '':
self._origin = tower_str
self._move_string.set('Moving from ' + self._origin + ' into... ')
else:
self._destination = tower_str
if self._origin != self._destination and self._destination != '':
self._make_move(TOWER_DICT)
else:
self._move_string.set('Move canceled.')
self._origin = ''
self._destination = ''
def _make_move(self, tower_dict: dict) -> None:
try:
tower_dict[self._origin].move_disk_to(tower_dict[self._destination])
except hanoi.InvalidMoveError:
self._move_string.set("Invalid move! You can't put a bigger Disk on top of a "
+ 'smaller Disk.')
return None
except hanoi.InvalidFirstMoveError:
self._move_string.set('Error: you have to make your first move from Tower 1!')
return None
except hanoi.NoDisksError:
self._move_string.set('Error: ' + self._origin + ' has no Disks!')
return None
self._move_string.set('Moved from ' + self._origin + ' to ' + self._destination
+ '.')
self._game.num_moves_made += 1
if self._game.is_over():
self._move_string.set('Congratulations! You solved Tower of Hanoi!\n'
+ 'Moves Taken: ' + str(self._game.num_moves_made) + '\n'
+ 'Min. # of Moves Required: '
+ str(self._game.min_moves_required))
self._draw_disks()
def _draw_disks(self) -> None:
tower_tower_xs = {self._tower_one_x: self._game.tower_one,
self._tower_two_x: self._game.tower_two,
self._tower_three_x: self._game.tower_three}
# 'Disk_1', 'Disk_2', 'Disk_3', and so on.
# We need underscores here because tags cannot contain whitespace.
# These tags are used to prevent the Disk size text from overwriting itself
# when the player makes moves.
disk_tags = ['Disk_' + str(disk.size) for tower in tower_tower_xs.values()
for disk in tower if disk != hanoi.EMPTY]
if self._disks_already_drawn:
for tag in disk_tags:
self._hanoi_canvas.delete(tag)
current_tag_index = 0
for tower_x, tower in tower_tower_xs.items():
topmost_y = 35
for disk in tower:
if disk == hanoi.EMPTY:
# We have to increment topmost_y here in order to represent the Disks
# falling down as far as possible.
topmost_y += 15
continue
else:
# We need to do 'tower_x + 5' here because tower_x is the x-coordinate
# of tower's upper-left corner. If we did not add 5 to tower_x, the text
# would be in the wrong place.
self._hanoi_canvas.create_text(tower_x + 5, topmost_y, anchor=tkinter.W,
font=DEFAULT_FONT,
text=str(disk.size),
tag=disk_tags[current_tag_index])
topmost_y += 15
current_tag_index += 1
self._disks_already_drawn = True
def _on_tower_two(self) -> None:
self._set_origin_and_or_destination('Tower 2')
def _on_tower_three(self) -> None:
self._set_origin_and_or_destination('Tower 3')
def _draw_towers(self) -> None:
"""Note: the width of each Tower is 25. The upper-left corner
of each Tower has a y-coordinate of 25, and the upper-right corner
a y-coordinate of 400.
"""
TOWER_COLOR = 'white'
self._tower_one_x = 50
self._hanoi_canvas.create_rectangle(self._tower_one_x, 25, 75, 400, fill=TOWER_COLOR,
tags='Tower 1')
self._tower_two_x = 240
self._hanoi_canvas.create_rectangle(self._tower_two_x, 25, 265, 400, fill=TOWER_COLOR,
tags='Tower 2')
self._tower_three_x = 425
self._hanoi_canvas.create_rectangle(self._tower_three_x, 25, 450, 400, fill=TOWER_COLOR,
tags='Tower 3')
def run(self) -> None:
"""Run a session of Tower of Hanoi."""
disk_dialog = DiskDialog()
disk_dialog.show()
if not disk_dialog.exited_intentionally:
self._num_disks_per_tower = disk_dialog.num_disks_per_tower
self._game = hanoi.Game(self._num_disks_per_tower)
self._draw_disks()
self._root_window.mainloop()
def _on_help_button(self) -> None:
help_message = (hanoi.HELP_MESSAGE + '\n\nThe Towers are white rectangles, and the Disks are '
+ "numbers that represent the Disks' sizes.\n\n"
+ "To select a Tower to move from, click on one of the 'Tower' buttons. "
+ "Then, to select the Tower to move to, click on another one of the 'Tower' buttons."
+ " In short, the first Tower button you click is the Tower you're moving from,"
+ " and the second is the one you're moving to. \n\nTo cancel a move from a Tower,"
+ " click on the button of the Tower you're moving from again.")
tkinter.messagebox.showinfo('Welcome to the Tower of Hanoi!',
help_message)
def _on_restart_button(self) -> None:
self._game = hanoi.Game(self._num_disks_per_tower)
self._move_string.set('Restarted the game.')
self._draw_disks()
if __name__ == '__main__':
HanoiWindow().run()
| [
"tkinter.messagebox.showerror",
"tkinter.Entry",
"tkinter.Toplevel",
"tkinter.Button",
"hanoi.Game",
"tkinter.Canvas",
"tkinter.StringVar",
"tkinter.Tk",
"tkinter.Label",
"tkinter.messagebox.showinfo",
"tkinter.Frame"
] | [((330, 348), 'tkinter.Toplevel', 'tkinter.Toplevel', ([], {}), '()\n', (346, 348), False, 'import tkinter\n'), ((389, 500), 'tkinter.Label', 'tkinter.Label', ([], {'master': 'self._dialog_window', 'text': '"""How many Disks per Tower do you want?"""', 'font': 'DEFAULT_FONT'}), "(master=self._dialog_window, text=\n 'How many Disks per Tower do you want?', font=DEFAULT_FONT)\n", (402, 500), False, 'import tkinter\n'), ((747, 817), 'tkinter.Entry', 'tkinter.Entry', ([], {'master': 'self._dialog_window', 'width': '(20)', 'font': 'DEFAULT_FONT'}), '(master=self._dialog_window, width=20, font=DEFAULT_FONT)\n', (760, 817), False, 'import tkinter\n'), ((996, 1037), 'tkinter.Frame', 'tkinter.Frame', ([], {'master': 'self._dialog_window'}), '(master=self._dialog_window)\n', (1009, 1037), False, 'import tkinter\n'), ((1132, 1242), 'tkinter.Button', 'tkinter.Button', ([], {'master': 'button_frame', 'text': '"""Set Up Game"""', 'font': 'DEFAULT_FONT', 'command': 'self._on_set_up_button'}), "(master=button_frame, text='Set Up Game', font=DEFAULT_FONT,\n command=self._on_set_up_button)\n", (1146, 1242), False, 'import tkinter\n'), ((1410, 1516), 'tkinter.Button', 'tkinter.Button', ([], {'master': 'button_frame', 'text': '"""Exit Game"""', 'font': 'DEFAULT_FONT', 'command': 'self._on_exit_button'}), "(master=button_frame, text='Exit Game', font=DEFAULT_FONT,\n command=self._on_exit_button)\n", (1424, 1516), False, 'import tkinter\n'), ((3030, 3042), 'tkinter.Tk', 'tkinter.Tk', ([], {}), '()\n', (3040, 3042), False, 'import tkinter\n'), ((3171, 3281), 'tkinter.Canvas', 'tkinter.Canvas', ([], {'master': 'self._root_window', 'width': '(500)', 'height': '(400)', 'background': 'HanoiWindow._BACKGROUND_COLOR'}), '(master=self._root_window, width=500, height=400, background=\n HanoiWindow._BACKGROUND_COLOR)\n', (3185, 3281), False, 'import tkinter\n'), ((3358, 3377), 'tkinter.StringVar', 'tkinter.StringVar', ([], {}), '()\n', (3375, 3377), False, 'import tkinter\n'), ((3450, 3544), 'tkinter.Label', 'tkinter.Label', ([], {'master': 'self._root_window', 'textvariable': 'self._move_string', 'font': 'DEFAULT_FONT'}), '(master=self._root_window, textvariable=self._move_string,\n font=DEFAULT_FONT)\n', (3463, 3544), False, 'import tkinter\n'), ((4075, 4114), 'tkinter.Frame', 'tkinter.Frame', ([], {'master': 'self._root_window'}), '(master=self._root_window)\n', (4088, 4114), False, 'import tkinter\n'), ((4207, 4309), 'tkinter.Button', 'tkinter.Button', ([], {'master': 'button_frame', 'text': '"""Help"""', 'font': 'DEFAULT_FONT', 'command': 'self._on_help_button'}), "(master=button_frame, text='Help', font=DEFAULT_FONT, command\n =self._on_help_button)\n", (4221, 4309), False, 'import tkinter\n'), ((4420, 4527), 'tkinter.Button', 'tkinter.Button', ([], {'master': 'button_frame', 'text': '"""Restart"""', 'font': 'DEFAULT_FONT', 'command': 'self._on_restart_button'}), "(master=button_frame, text='Restart', font=DEFAULT_FONT,\n command=self._on_restart_button)\n", (4434, 4527), False, 'import tkinter\n'), ((4647, 4749), 'tkinter.Button', 'tkinter.Button', ([], {'master': 'button_frame', 'text': '"""Tower 1"""', 'font': 'DEFAULT_FONT', 'command': 'self._on_tower_one'}), "(master=button_frame, text='Tower 1', font=DEFAULT_FONT,\n command=self._on_tower_one)\n", (4661, 4749), False, 'import tkinter\n'), ((4873, 4975), 'tkinter.Button', 'tkinter.Button', ([], {'master': 'button_frame', 'text': '"""Tower 2"""', 'font': 'DEFAULT_FONT', 'command': 'self._on_tower_two'}), "(master=button_frame, text='Tower 2', font=DEFAULT_FONT,\n command=self._on_tower_two)\n", (4887, 4975), False, 'import tkinter\n'), ((5101, 5205), 'tkinter.Button', 'tkinter.Button', ([], {'master': 'button_frame', 'text': '"""Tower 3"""', 'font': 'DEFAULT_FONT', 'command': 'self._on_tower_three'}), "(master=button_frame, text='Tower 3', font=DEFAULT_FONT,\n command=self._on_tower_three)\n", (5115, 5205), False, 'import tkinter\n'), ((11645, 11720), 'tkinter.messagebox.showinfo', 'tkinter.messagebox.showinfo', (['"""Welcome to the Tower of Hanoi!"""', 'help_message'], {}), "('Welcome to the Tower of Hanoi!', help_message)\n", (11672, 11720), False, 'import tkinter\n'), ((11825, 11862), 'hanoi.Game', 'hanoi.Game', (['self._num_disks_per_tower'], {}), '(self._num_disks_per_tower)\n', (11835, 11862), False, 'import hanoi\n'), ((10778, 10815), 'hanoi.Game', 'hanoi.Game', (['self._num_disks_per_tower'], {}), '(self._num_disks_per_tower)\n', (10788, 10815), False, 'import hanoi\n'), ((2255, 2319), 'tkinter.messagebox.showerror', 'tkinter.messagebox.showerror', (['"""Error"""', "(self._error_message + '.')"], {}), "('Error', self._error_message + '.')\n", (2283, 2319), False, 'import tkinter\n'), ((2576, 2666), 'tkinter.messagebox.showerror', 'tkinter.messagebox.showerror', (['"""Error"""', "(self._error_message + ', not text or decimals.')"], {}), "('Error', self._error_message +\n ', not text or decimals.')\n", (2604, 2666), False, 'import tkinter\n')] |
import os
import pickle
from Utils import DirectoryUtils, IOUtils, LoggingUtils
# Write to the disk the structure such that will be
# persistent.
from Utils.FilesUtils import readConfigFile
def writePersistentStructure(filename, structure):
try:
writeSerializer = open(filename, "wb")
pickle.dump(structure, writeSerializer)
except:
print("Unable to write persistent data structure in the following location: {}".format(filename))
writeSerializer.close()
return
# Load a persistent data structure into memory.
def readPersistentStructure(filename):
dirname = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'Indexes'))
path = os.path.join(dirname, filename)
try:
readSerializer = open(path, "rb")
inMemoryStructure = pickle.load(readSerializer)
except:
print("Unable to read persistent data structure in the following location: {}".format(path))
return inMemoryStructure
shouldRead = True
if shouldRead:
conf = readConfigFile()
readPath = conf['copyPath']
destPath = conf['destPath']
LoggingUtils.log('Start reading the following directory: {}'.format(readPath))
list = DirectoryUtils.readDirectoryMetadataObj(readPath)
print(len(list))
# Retrieve the current path.
dirname = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'Indexes'))
path = os.path.join(dirname, 'directoriesIdx.idx')
entry = pickle.dumps(list)
writePersistentStructure(path, entry)
else:
memoryStructure = readPersistentStructure('directoriesIdx.idx')
newObj = pickle.loads(memoryStructure)
print(len(newObj))
| [
"Utils.FilesUtils.readConfigFile",
"pickle.dump",
"pickle.dumps",
"Utils.DirectoryUtils.readDirectoryMetadataObj",
"os.path.join",
"pickle.load",
"os.path.dirname",
"pickle.loads"
] | [((695, 726), 'os.path.join', 'os.path.join', (['dirname', 'filename'], {}), '(dirname, filename)\n', (707, 726), False, 'import os\n'), ((1025, 1041), 'Utils.FilesUtils.readConfigFile', 'readConfigFile', ([], {}), '()\n', (1039, 1041), False, 'from Utils.FilesUtils import readConfigFile\n'), ((1201, 1250), 'Utils.DirectoryUtils.readDirectoryMetadataObj', 'DirectoryUtils.readDirectoryMetadataObj', (['readPath'], {}), '(readPath)\n', (1240, 1250), False, 'from Utils import DirectoryUtils, IOUtils, LoggingUtils\n'), ((1405, 1448), 'os.path.join', 'os.path.join', (['dirname', '"""directoriesIdx.idx"""'], {}), "(dirname, 'directoriesIdx.idx')\n", (1417, 1448), False, 'import os\n'), ((1462, 1480), 'pickle.dumps', 'pickle.dumps', (['list'], {}), '(list)\n', (1474, 1480), False, 'import pickle\n'), ((1611, 1640), 'pickle.loads', 'pickle.loads', (['memoryStructure'], {}), '(memoryStructure)\n', (1623, 1640), False, 'import pickle\n'), ((310, 349), 'pickle.dump', 'pickle.dump', (['structure', 'writeSerializer'], {}), '(structure, writeSerializer)\n', (321, 349), False, 'import pickle\n'), ((807, 834), 'pickle.load', 'pickle.load', (['readSerializer'], {}), '(readSerializer)\n', (818, 834), False, 'import pickle\n'), ((639, 664), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (654, 664), False, 'import os\n'), ((1349, 1374), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (1364, 1374), False, 'import os\n')] |
import matplotlib.pyplot as plt
import numpy as np
from flatland.core.grid.grid4_utils import get_new_position
from flatland.envs.agent_utils import TrainState
from flatland.utils.rendertools import RenderTool, AgentRenderVariant
from utils.fast_methods import fast_count_nonzero, fast_argmax
class AgentCanChooseHelper:
def __init__(self):
self.render_debug_information = False
def reset(self, env):
self.env = env
if self.env is not None:
self.env.dev_obs_dict = {}
self.switches = {}
self.switches_neighbours = {}
self.switch_cluster = {}
self.switch_cluster_occupied = {}
self.switch_cluster_lock = {}
self.switch_cluster_grid = None
self.agent_positions = None
self.reset_swicht_cluster_lock()
self.reset_switch_cluster_occupied()
if self.env is not None:
self.find_all_cell_where_agent_can_choose()
self.calculate_agent_positions()
def get_agent_positions(self):
return self.agent_positions
def calculate_agent_positions(self):
self.agent_positions: np.ndarray = np.full((self.env.height, self.env.width), -1)
for agent_handle in self.env.get_agent_handles():
agent = self.env.agents[agent_handle]
if agent.state in [TrainState.MOVING, TrainState.STOPPED, TrainState.MALFUNCTION]:
position = agent.position
if position is None:
position = agent.initial_position
self.agent_positions[position] = agent_handle
def clear_switch_cluster_lock(self):
'''
clean up switch cluster lock
'''
self.switch_cluster_lock = {}
def clear_switch_cluster_occupied(self):
'''
clean up switch cluster occupied
'''
self.switch_cluster_occupied = {}
def lock_switch_cluster(self, handle, agent_pos, agent_dir):
'''
Lock the switch cluster if possible
:param handle: Agent handle
:param agent_pos: position to lock
:param agent_dir: direction
:return: True if lock is successfully done otherwise false (it might still have a lock)
'''
cluster_id, grid_cell_members = self.get_switch_cluster(agent_pos)
if cluster_id < 1:
return True
lock_handle = self.switch_cluster_lock.get(cluster_id, None)
if lock_handle is None:
self.switch_cluster_lock.update({cluster_id: handle})
return True
if lock_handle == handle:
return True
return False
def unlock_switch_cluster(self, handle, agent_pos, agent_dir):
'''
Lock the switch cluster if possible
:param handle: Agent handle
:param agent_pos: position to lock
:param agent_dir: direction
:return: True if unlock is successfully done otherwise false (it might still have a lock own by another agent)
'''
cluster_id, grid_cell_members = self.get_switch_cluster(agent_pos)
if cluster_id < 1:
return True
lock_handle = self.switch_cluster_lock.get(cluster_id, None)
if lock_handle == handle:
self.switch_cluster_lock.update({cluster_id, None})
return True
return False
def get_agent_position_and_direction(self, handle):
'''
Returns the agent position - if not yet started (active) it returns the initial position
:param handle: agent reference (handle)
:return: agent_pos, agent_dir, agent_state
'''
agent = self.env.agents[handle]
agent_pos = agent.position
agent_dir = agent.direction
if agent_pos is None:
agent_pos = agent.initial_position
agent_dir = agent.initial_direction
return agent_pos, agent_dir, agent.state, agent.target
def has_agent_switch_cluster_lock(self, handle, agent_pos=None, agent_dir=None):
'''
Checks if the agent passed by the handle has the switch cluster lock
:param handle: agent reference (handle)
:param agent_pos: position to check
:param agent_dir: direction property
:return: True if handle owns the lock otherwise false
'''
if agent_pos is None or agent_dir is None:
agent_pos, agent_dir, agent_state, agent_target = self.get_agent_position_and_direction(handle)
cluster_id, grid_cell_members = self.get_switch_cluster(agent_pos)
if cluster_id < 1:
return False
lock_handle = self.switch_cluster_lock.get(cluster_id, None)
return lock_handle == handle
def get_switch_cluster_occupiers_next_cell(self, handle, agent_pos, agent_dir):
'''
Returns all occupiers for the next cell
:param handle: agent reference (handle)
:param agent_pos: position to check
:param agent_dir: direction property
:return: a list of all agents (handles) which occupied the next cell switch cluster
'''
possible_transitions = self.env.rail.get_transitions(*agent_pos, agent_dir)
occupiers = []
for new_direction in range(4):
if possible_transitions[new_direction] == 1:
new_position = get_new_position(agent_pos, new_direction)
occupiers += self.get_switch_cluster_occupiers(handle,
new_position,
new_direction)
return occupiers
def mark_switch_next_cluster_occupied(self, handle):
agent_position, agent_direciton, agent_state, agent_target = \
self.get_agent_position_and_direction(handle)
possible_transitions = self.env.rail.get_transitions(*agent_position, agent_direciton)
for new_direction in range(4):
if possible_transitions[new_direction] == 1:
new_position = get_new_position(agent_position, new_direction)
self.mark_switch_cluster_occupied(handle, new_position, new_direction)
def can_agent_enter_next_cluster(self, handle):
agent_position, agent_direciton, agent_state, agent_target = \
self.get_agent_position_and_direction(handle)
occupiers = self.get_switch_cluster_occupiers_next_cell(handle,
agent_position,
agent_direciton)
if len(occupiers) > 0 and handle not in occupiers:
return False
return True
def get_switch_cluster_occupiers(self, handle, agent_pos, agent_dir):
'''
:param handle: agent reference (handle)
:param agent_pos: position to check
:param agent_dir: direction property
:return: a list of all agents (handles) which occupied the switch cluster
'''
cluster_id, grid_cell_members = self.get_switch_cluster(agent_pos)
if cluster_id < 1:
return []
return self.switch_cluster_occupied.get(cluster_id, [])
def mark_switch_cluster_occupied(self, handle, agent_pos, agent_dir):
'''
Add the agent handle to the switch cluster occupied data. Set the agent (handle) as occupier
:param handle: agent reference (handle)
:param agent_pos: position to check
:param agent_dir: direction property
:return:
'''
cluster_id, grid_cell_members = self.get_switch_cluster(agent_pos)
if cluster_id < 1:
return
agent_handles = self.switch_cluster_occupied.get(cluster_id, [])
agent_handles.append(handle)
self.switch_cluster_occupied.update({cluster_id: agent_handles})
def reset_swicht_cluster_lock(self):
'''
Reset the explicit lock data switch_cluster_lock
'''
self.clear_switch_cluster_lock()
def reset_switch_cluster_occupied(self, handle_only_active_agents=False):
'''
Reset the occupied flag by recomputing the switch_cluster_occupied map
:param handle_only_active_agents: if true only agent with status ACTIVE will be mapped
'''
self.clear_switch_cluster_occupied()
for handle in range(self.env.get_num_agents()):
agent_pos, agent_dir, agent_state, agent_target = self.get_agent_position_and_direction(handle)
if handle_only_active_agents:
if agent_state in [TrainState.MOVING, TrainState.STOPPED, TrainState.MALFUNCTION]:
self.mark_switch_cluster_occupied(handle, agent_pos, agent_dir)
else:
if agent_state < TrainState.DONE:
self.mark_switch_cluster_occupied(handle, agent_pos, agent_dir)
def get_switch_cluster(self, pos):
'''
Returns the switch cluster at position pos
:param pos: the position for which the switch cluster must be returned
:return: if the position is not None and the switch cluster are computed it returns the cluster_id and the
grid cell members otherwise -1 and an empty list
'''
if pos is None:
return -1, []
if self.switch_cluster_grid is None:
return -1, []
cluster_id = self.switch_cluster_grid[pos]
grid_cell_members = self.switch_cluster.get(cluster_id, [])
return cluster_id, grid_cell_members
def find_all_switches(self):
'''
Search the environment (rail grid) for all switch cells. A switch is a cell where more than one tranisation
exists and collect all direction where the switch is a switch.
'''
self.switches = {}
for h in range(self.env.height):
for w in range(self.env.width):
pos = (h, w)
for dir in range(4):
possible_transitions = self.env.rail.get_transitions(*pos, dir)
num_transitions = fast_count_nonzero(possible_transitions)
if num_transitions > 1:
directions = self.switches.get(pos, [])
directions.append(dir)
self.switches.update({pos: directions})
def find_all_switch_neighbours(self):
'''
Collect all cells where is a neighbour to a switch cell. All cells are neighbour where the agent can make
just one step and he stands on a switch. A switch is a cell where the agents has more than one transition.
'''
self.switches_neighbours = {}
for h in range(self.env.height):
for w in range(self.env.width):
# look one step forward
for dir in range(4):
pos = (h, w)
possible_transitions = self.env.rail.get_transitions(*pos, dir)
for d in range(4):
if possible_transitions[d] == 1:
new_cell = get_new_position(pos, d)
if new_cell in self.switches.keys():
directions = self.switches_neighbours.get(pos, [])
directions.append(dir)
self.switches_neighbours.update({pos: directions})
def find_cluster_label(self, in_label) -> int:
label = int(in_label)
while 0 != self.label_dict[label]:
label = self.label_dict[label]
return label
def union_cluster_label(self, root, slave) -> None:
root_label = self.find_cluster_label(root)
slave_label = self.find_cluster_label(slave)
if slave_label != root_label:
self.label_dict[slave_label] = root_label
def find_connected_clusters_and_label(self, binary_image):
padded_binary_image = np.pad(binary_image, ((1, 0), (1, 0)), 'constant', constant_values=(0, 0))
w = np.size(binary_image, 1)
h = np.size(binary_image, 0)
self.label_dict = [int(i) for i in np.zeros(w * h)]
label = 1
# first pass
for cow in range(1, h + 1):
for col in range(1, w + 1):
working_position = (cow, col)
working_pixel = padded_binary_image[working_position]
if working_pixel != 0:
left_pixel_pos = (cow, col - 1)
up_pixel_pos = (cow - 1, col)
left_pixel = padded_binary_image[left_pixel_pos]
up_pixel = padded_binary_image[up_pixel_pos]
# Use connections (rails) for clustering (only real connected pixels builds a real cluster)
if (cow < self.env.height) and (col < self.env.width):
left_ok = 0
up_ok = 0
# correct padded image position (railenv)
t_working_position = (working_position[0] - 1, working_position[1] - 1)
t_left_pixel_pos = (left_pixel_pos[0] - 1, left_pixel_pos[1] - 1)
t_up_pixel_pos = (up_pixel_pos[0] - 1, up_pixel_pos[1] - 1)
for direction_loop in range(4):
possible_transitions = self.env.rail.get_transitions(*t_working_position, direction_loop)
orientation = direction_loop
if fast_count_nonzero(possible_transitions) == 1:
orientation = fast_argmax(possible_transitions)
for dir_loop, new_direction in enumerate(
[(orientation + dir_loop) % 4 for dir_loop in range(-1, 3)]):
if possible_transitions[new_direction] == 1:
new_pos = get_new_position(t_working_position, new_direction)
if new_pos == t_left_pixel_pos:
left_ok = 1
if new_pos == t_up_pixel_pos:
up_ok = 1
left_pixel *= left_ok
up_pixel *= up_ok
# build clusters
if left_pixel == 0 and up_pixel == 0:
padded_binary_image[working_position] = label
label += 1
if left_pixel != 0 and up_pixel != 0:
smaller = left_pixel if left_pixel < up_pixel else up_pixel
bigger = left_pixel if left_pixel > up_pixel else up_pixel
padded_binary_image[working_position] = smaller
self.union_cluster_label(smaller, bigger)
if up_pixel != 0 and left_pixel == 0:
padded_binary_image[working_position] = up_pixel
if up_pixel == 0 and left_pixel != 0:
padded_binary_image[working_position] = left_pixel
for cow in range(1, h + 1):
for col in range(1, w + 1):
root = self.find_cluster_label(padded_binary_image[cow][col])
padded_binary_image[cow][col] = root
self.switch_cluster_grid = padded_binary_image[1:, 1:]
for h in range(self.env.height):
for w in range(self.env.width):
working_position = (h, w)
root = self.switch_cluster_grid[working_position]
if root > 0:
pos_data = self.switch_cluster.get(root, [])
pos_data.append(working_position)
self.switch_cluster.update({root: pos_data})
def cluster_all_switches(self):
info_image = np.zeros((self.env.height, self.env.width))
# for h in range(self.env.height):
# for w in range(self.env.width):
# # look one step forward
# if self.env.rail.grid[h][w] > 0:
# info_image[(h,w)] = -1
for key in self.switches.keys():
info_image[key] = 1
# build clusters
self.find_connected_clusters_and_label(info_image)
if self.render_debug_information:
# Setup renderer
env_renderer = RenderTool(self.env, gl="PGL",
agent_render_variant=AgentRenderVariant.AGENT_SHOWS_OPTIONS_AND_BOX)
env_renderer.set_new_rail()
env_renderer.render_env(
show=True,
frames=False,
show_observations=True,
show_predictions=False
)
plt.subplot(1, 2, 1)
plt.imshow(info_image)
plt.subplot(1, 2, 2)
plt.imshow(self.switch_cluster_grid)
plt.show()
plt.pause(0.01)
def find_all_cell_where_agent_can_choose(self):
'''
prepare the memory - collect all cells where the agent can choose more than FORWARD/STOP.
'''
self.find_all_switches()
self.find_all_switch_neighbours()
self.cluster_all_switches()
def check_agent_decision(self, position, direction):
'''
Decide whether the agent is
- on a switch
- at a switch neighbour (near to switch). The switch must be a switch where the agent has more option than
FORWARD/STOP
- all switch : doesn't matter whether the agent has more options than FORWARD/STOP
- all switch neightbors : doesn't matter the agent has more then one options (transistion) when he reach the
switch
:param position: (x,y) cell coordinate
:param direction: Flatland direction
:return: agents_on_switch, agents_near_to_switch, agents_near_to_switch_all, agents_on_switch_all
'''
agents_on_switch = False
agents_on_switch_all = False
agents_near_to_switch = False
agents_near_to_switch_all = False
if position in self.switches.keys():
agents_on_switch = direction in self.switches[position]
agents_on_switch_all = True
if position in self.switches_neighbours.keys():
new_cell = get_new_position(position, direction)
if new_cell in self.switches.keys():
if not direction in self.switches[new_cell]:
agents_near_to_switch = direction in self.switches_neighbours[position]
else:
agents_near_to_switch = direction in self.switches_neighbours[position]
agents_near_to_switch_all = direction in self.switches_neighbours[position]
return agents_on_switch, agents_near_to_switch, agents_near_to_switch_all, agents_on_switch_all
def requires_agent_decision(self):
'''
Returns for all agents its check_agent_decision values
:return: dicts with check_agent_decision values stored (each agents)
'''
agents_can_choose = {}
agents_on_switch = {}
agents_on_switch_all = {}
agents_near_to_switch = {}
agents_near_to_switch_all = {}
for a in range(self.env.get_num_agents()):
ret_agents_on_switch, ret_agents_near_to_switch, ret_agents_near_to_switch_all, ret_agents_on_switch_all = \
self.check_agent_decision(
self.env.agents[a].position,
self.env.agents[a].direction)
agents_on_switch.update({a: ret_agents_on_switch})
agents_on_switch_all.update({a: ret_agents_on_switch_all})
ready_to_depart = self.env.agents[a].state == TrainState.READY_TO_DEPART
agents_near_to_switch.update({a: (ret_agents_near_to_switch and not ready_to_depart)})
agents_can_choose.update({a: agents_on_switch[a] or agents_near_to_switch[a]})
agents_near_to_switch_all.update({a: (ret_agents_near_to_switch_all and not ready_to_depart)})
return agents_can_choose, agents_on_switch, agents_near_to_switch, agents_near_to_switch_all, agents_on_switch_all
| [
"matplotlib.pyplot.imshow",
"numpy.size",
"utils.fast_methods.fast_count_nonzero",
"numpy.zeros",
"flatland.utils.rendertools.RenderTool",
"utils.fast_methods.fast_argmax",
"matplotlib.pyplot.pause",
"numpy.full",
"numpy.pad",
"flatland.core.grid.grid4_utils.get_new_position",
"matplotlib.pyplot... | [((1145, 1191), 'numpy.full', 'np.full', (['(self.env.height, self.env.width)', '(-1)'], {}), '((self.env.height, self.env.width), -1)\n', (1152, 1191), True, 'import numpy as np\n'), ((11908, 11982), 'numpy.pad', 'np.pad', (['binary_image', '((1, 0), (1, 0))', '"""constant"""'], {'constant_values': '(0, 0)'}), "(binary_image, ((1, 0), (1, 0)), 'constant', constant_values=(0, 0))\n", (11914, 11982), True, 'import numpy as np\n'), ((11995, 12019), 'numpy.size', 'np.size', (['binary_image', '(1)'], {}), '(binary_image, 1)\n', (12002, 12019), True, 'import numpy as np\n'), ((12032, 12056), 'numpy.size', 'np.size', (['binary_image', '(0)'], {}), '(binary_image, 0)\n', (12039, 12056), True, 'import numpy as np\n'), ((15842, 15885), 'numpy.zeros', 'np.zeros', (['(self.env.height, self.env.width)'], {}), '((self.env.height, self.env.width))\n', (15850, 15885), True, 'import numpy as np\n'), ((16370, 16474), 'flatland.utils.rendertools.RenderTool', 'RenderTool', (['self.env'], {'gl': '"""PGL"""', 'agent_render_variant': 'AgentRenderVariant.AGENT_SHOWS_OPTIONS_AND_BOX'}), "(self.env, gl='PGL', agent_render_variant=AgentRenderVariant.\n AGENT_SHOWS_OPTIONS_AND_BOX)\n", (16380, 16474), False, 'from flatland.utils.rendertools import RenderTool, AgentRenderVariant\n'), ((16748, 16768), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(1)', '(2)', '(1)'], {}), '(1, 2, 1)\n', (16759, 16768), True, 'import matplotlib.pyplot as plt\n'), ((16781, 16803), 'matplotlib.pyplot.imshow', 'plt.imshow', (['info_image'], {}), '(info_image)\n', (16791, 16803), True, 'import matplotlib.pyplot as plt\n'), ((16816, 16836), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(1)', '(2)', '(2)'], {}), '(1, 2, 2)\n', (16827, 16836), True, 'import matplotlib.pyplot as plt\n'), ((16849, 16885), 'matplotlib.pyplot.imshow', 'plt.imshow', (['self.switch_cluster_grid'], {}), '(self.switch_cluster_grid)\n', (16859, 16885), True, 'import matplotlib.pyplot as plt\n'), ((16898, 16908), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (16906, 16908), True, 'import matplotlib.pyplot as plt\n'), ((16921, 16936), 'matplotlib.pyplot.pause', 'plt.pause', (['(0.01)'], {}), '(0.01)\n', (16930, 16936), True, 'import matplotlib.pyplot as plt\n'), ((18314, 18351), 'flatland.core.grid.grid4_utils.get_new_position', 'get_new_position', (['position', 'direction'], {}), '(position, direction)\n', (18330, 18351), False, 'from flatland.core.grid.grid4_utils import get_new_position\n'), ((5307, 5349), 'flatland.core.grid.grid4_utils.get_new_position', 'get_new_position', (['agent_pos', 'new_direction'], {}), '(agent_pos, new_direction)\n', (5323, 5349), False, 'from flatland.core.grid.grid4_utils import get_new_position\n'), ((6011, 6058), 'flatland.core.grid.grid4_utils.get_new_position', 'get_new_position', (['agent_position', 'new_direction'], {}), '(agent_position, new_direction)\n', (6027, 6058), False, 'from flatland.core.grid.grid4_utils import get_new_position\n'), ((12100, 12115), 'numpy.zeros', 'np.zeros', (['(w * h)'], {}), '(w * h)\n', (12108, 12115), True, 'import numpy as np\n'), ((10053, 10093), 'utils.fast_methods.fast_count_nonzero', 'fast_count_nonzero', (['possible_transitions'], {}), '(possible_transitions)\n', (10071, 10093), False, 'from utils.fast_methods import fast_count_nonzero, fast_argmax\n'), ((11061, 11085), 'flatland.core.grid.grid4_utils.get_new_position', 'get_new_position', (['pos', 'd'], {}), '(pos, d)\n', (11077, 11085), False, 'from flatland.core.grid.grid4_utils import get_new_position\n'), ((13481, 13521), 'utils.fast_methods.fast_count_nonzero', 'fast_count_nonzero', (['possible_transitions'], {}), '(possible_transitions)\n', (13499, 13521), False, 'from utils.fast_methods import fast_count_nonzero, fast_argmax\n'), ((13574, 13607), 'utils.fast_methods.fast_argmax', 'fast_argmax', (['possible_transitions'], {}), '(possible_transitions)\n', (13585, 13607), False, 'from utils.fast_methods import fast_count_nonzero, fast_argmax\n'), ((13899, 13950), 'flatland.core.grid.grid4_utils.get_new_position', 'get_new_position', (['t_working_position', 'new_direction'], {}), '(t_working_position, new_direction)\n', (13915, 13950), False, 'from flatland.core.grid.grid4_utils import get_new_position\n')] |
#!/usr/bin/env python3
import tuxedo as t
if __name__ == '__main__':
buf = {'TA_CLASS': ['T_SVCGRP'], 'TA_OPERATION': ['GET']}
assert t.tpimport(t.tpexport(buf)) == buf
assert t.tpimport(t.tpexport(buf, t.TPEX_STRING), t.TPEX_STRING) == buf
assert t.Fname32(t.Fldid32('TA_OPERATION')) == 'TA_OPERATION'
assert t.Fldtype32(t.Fmkfldid32(t.FLD_STRING, 10)) == t.FLD_STRING
assert t.Fldno32(t.Fmkfldid32(t.FLD_STRING, 10)) == 10
binstr = b'\xc1 hello'
binstr2 = t.tpimport(t.tpexport({'TA_OPERATION': binstr}))['TA_OPERATION'][0]
assert binstr2.encode(errors='surrogateescape') == binstr
t.tpexport({'TA_OPERATION': binstr2})
binstr3 = t.tpimport(t.tpexport({'TA_OPERATION': binstr2}))['TA_OPERATION'][0]
assert binstr3.encode(errors='surrogateescape') == binstr
utf8 = b'gl\xc4\x81\xc5\xbe\xc5\xa1\xc4\xb7\xc5\xab\xc5\x86r\xc5\xab\xc4\xb7\xc4\xabtis'
s = t.tpimport(t.tpexport({'TA_OPERATION': utf8}))['TA_OPERATION'][0]
assert s.encode('utf8') == utf8
uni = 'gl\u0101\u017e\u0161\u0137\u016b\u0146r\u016b\u0137\u012btis'
s = t.tpimport(t.tpexport({'TA_OPERATION': uni}))['TA_OPERATION'][0]
assert s == uni
assert t.Fboolev32({'TA_OPERATION': '123456789'}, "TA_OPERATION=='123456789'")
assert not t.Fboolev32({'TA_OPERATION': '123456789'}, "TA_OPERATION=='1234567890'")
assert t.Fboolev32({'TA_OPERATION': '123456789'}, "TA_OPERATION%%'.234.*'")
assert not t.Fboolev32({'TA_OPERATION': '123456789'}, "TA_OPERATION%%'.123.*'")
assert t.Fboolev32({'TA_OPERATION': '123456789'}, "TA_OPERATION!%'.123.*'")
import sys
t.Ffprint32({'TA_OPERATION': '123456789'}, sys.stdout)
t.Ffprint32({t.Fmkfldid32(t.FLD_STRING, 10): 'Dynamic field'}, sys.stdout)
print(t.Fextread32(sys.stdin))
| [
"tuxedo.Fmkfldid32",
"tuxedo.Fldid32",
"tuxedo.Fboolev32",
"tuxedo.Fextread32",
"tuxedo.tpexport",
"tuxedo.Ffprint32"
] | [((631, 668), 'tuxedo.tpexport', 't.tpexport', (["{'TA_OPERATION': binstr2}"], {}), "({'TA_OPERATION': binstr2})\n", (641, 668), True, 'import tuxedo as t\n'), ((1197, 1268), 'tuxedo.Fboolev32', 't.Fboolev32', (["{'TA_OPERATION': '123456789'}", '"""TA_OPERATION==\'123456789\'"""'], {}), '({\'TA_OPERATION\': \'123456789\'}, "TA_OPERATION==\'123456789\'")\n', (1208, 1268), True, 'import tuxedo as t\n'), ((1368, 1436), 'tuxedo.Fboolev32', 't.Fboolev32', (["{'TA_OPERATION': '123456789'}", '"""TA_OPERATION%%\'.234.*\'"""'], {}), '({\'TA_OPERATION\': \'123456789\'}, "TA_OPERATION%%\'.234.*\'")\n', (1379, 1436), True, 'import tuxedo as t\n'), ((1532, 1600), 'tuxedo.Fboolev32', 't.Fboolev32', (["{'TA_OPERATION': '123456789'}", '"""TA_OPERATION!%\'.123.*\'"""'], {}), '({\'TA_OPERATION\': \'123456789\'}, "TA_OPERATION!%\'.123.*\'")\n', (1543, 1600), True, 'import tuxedo as t\n'), ((1621, 1675), 'tuxedo.Ffprint32', 't.Ffprint32', (["{'TA_OPERATION': '123456789'}", 'sys.stdout'], {}), "({'TA_OPERATION': '123456789'}, sys.stdout)\n", (1632, 1675), True, 'import tuxedo as t\n'), ((1284, 1356), 'tuxedo.Fboolev32', 't.Fboolev32', (["{'TA_OPERATION': '123456789'}", '"""TA_OPERATION==\'1234567890\'"""'], {}), '({\'TA_OPERATION\': \'123456789\'}, "TA_OPERATION==\'1234567890\'")\n', (1295, 1356), True, 'import tuxedo as t\n'), ((1452, 1520), 'tuxedo.Fboolev32', 't.Fboolev32', (["{'TA_OPERATION': '123456789'}", '"""TA_OPERATION%%\'.123.*\'"""'], {}), '({\'TA_OPERATION\': \'123456789\'}, "TA_OPERATION%%\'.123.*\'")\n', (1463, 1520), True, 'import tuxedo as t\n'), ((1767, 1790), 'tuxedo.Fextread32', 't.Fextread32', (['sys.stdin'], {}), '(sys.stdin)\n', (1779, 1790), True, 'import tuxedo as t\n'), ((156, 171), 'tuxedo.tpexport', 't.tpexport', (['buf'], {}), '(buf)\n', (166, 171), True, 'import tuxedo as t\n'), ((202, 232), 'tuxedo.tpexport', 't.tpexport', (['buf', 't.TPEX_STRING'], {}), '(buf, t.TPEX_STRING)\n', (212, 232), True, 'import tuxedo as t\n'), ((278, 303), 'tuxedo.Fldid32', 't.Fldid32', (['"""TA_OPERATION"""'], {}), "('TA_OPERATION')\n", (287, 303), True, 'import tuxedo as t\n'), ((347, 377), 'tuxedo.Fmkfldid32', 't.Fmkfldid32', (['t.FLD_STRING', '(10)'], {}), '(t.FLD_STRING, 10)\n', (359, 377), True, 'import tuxedo as t\n'), ((416, 446), 'tuxedo.Fmkfldid32', 't.Fmkfldid32', (['t.FLD_STRING', '(10)'], {}), '(t.FLD_STRING, 10)\n', (428, 446), True, 'import tuxedo as t\n'), ((1694, 1724), 'tuxedo.Fmkfldid32', 't.Fmkfldid32', (['t.FLD_STRING', '(10)'], {}), '(t.FLD_STRING, 10)\n', (1706, 1724), True, 'import tuxedo as t\n'), ((508, 544), 'tuxedo.tpexport', 't.tpexport', (["{'TA_OPERATION': binstr}"], {}), "({'TA_OPERATION': binstr})\n", (518, 544), True, 'import tuxedo as t\n'), ((694, 731), 'tuxedo.tpexport', 't.tpexport', (["{'TA_OPERATION': binstr2}"], {}), "({'TA_OPERATION': binstr2})\n", (704, 731), True, 'import tuxedo as t\n'), ((927, 961), 'tuxedo.tpexport', 't.tpexport', (["{'TA_OPERATION': utf8}"], {}), "({'TA_OPERATION': utf8})\n", (937, 961), True, 'import tuxedo as t\n'), ((1111, 1144), 'tuxedo.tpexport', 't.tpexport', (["{'TA_OPERATION': uni}"], {}), "({'TA_OPERATION': uni})\n", (1121, 1144), True, 'import tuxedo as t\n')] |
"""
Copyright 2012-2019 <NAME>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from typing import cast, Dict, List, Optional, Type, Tuple, TYPE_CHECKING
import json
import re
import datetime
import functools
import logging
from hqlib import utils
from hqlib.typing import MetricParameters, MetricValue, DateTime, Number
from .metric_source import MetricSource
from .target import AdaptedTarget
if TYPE_CHECKING: # pragma: no cover
from ..software_development.project import Project # pylint: disable=unused-import
class ExtraInfo(object):
""" The class represents extra metric information structure, that is serialized to extra_info json tag."""
def __init__(self, **kwargs):
""" Class is initialized with column keys and header texts."""
self.headers = kwargs
self.title = None
self.data = []
def __add__(self, *args):
""" Adds data rows to the extra_info table, matching arguments by position to the column keys."""
item = args[0] if isinstance(args[0], tuple) else args
dictionary_length = len(self.headers)
for i in range(len(item) // dictionary_length):
self.data.append(dict(zip(self.headers.keys(), item[dictionary_length * i:dictionary_length * (i + 1)])))
return self
class Metric(object):
""" Base class for metrics. """
name: str = 'Subclass responsibility'
template = '{name} heeft {value} {unit}.'
norm_template: str = 'Subclass responsibility'
unit: str = 'Subclass responsibility' # Unit in plural, e.g. "lines of code"
target_value: MetricValue = 'Subclass responsibility'
low_target_value: MetricValue = 'Subclass responsibility'
perfect_value: MetricValue = 'Subclass responsibility'
missing_template: str = 'De {metric} van {name} kon niet gemeten worden omdat niet alle benodigde bronnen ' \
'beschikbaar zijn.'
missing_source_template: str = 'De {metric} van {name} kon niet gemeten worden omdat de bron ' \
'{metric_source_class} niet is geconfigureerd.'
missing_source_id_template: str = 'De {metric} van {name} kon niet gemeten worden omdat niet alle benodigde ' \
'bron-ids zijn geconfigureerd. Configureer ids voor de bron ' \
'{metric_source_class}.'
perfect_template: str = ''
url_label_text: str = ''
comment_url_label_text: str = ''
metric_source_class: Type[MetricSource] = None
extra_info_headers: Dict[str, str] = None
def __init__(self, subject=None, project: 'Project' = None) -> None:
self._subject = subject
self._project = project
for source in self._project.metric_sources(self.metric_source_class):
try:
source_id = self._subject.metric_source_id(source)
except AttributeError:
continue
if source_id:
self._metric_source = source
self._metric_source_id, self._display_url = self.__separate_metric_source_links(source_id)
break
else:
if self.metric_source_class:
logging.warning("Couldn't find metric source of class %s for %s", self.metric_source_class.__name__,
self.stable_id())
self._metric_source = None
self._metric_source_id = None
self._display_url = None
self.__id_string = self.stable_id()
self._extra_info_data = list()
from hqlib import metric_source
history_sources = self._project.metric_sources(metric_source.History) if self._project else []
self.__history = cast(metric_source.History, history_sources[0]) if history_sources else None
def __separate_metric_source_links(self, values) -> tuple:
if not isinstance(values, list):
return self.__split_source_and_display(values)
else:
source = []
display = []
for val in values:
src, dsp = self.__split_source_and_display(val)
source.append(src)
display.append(dsp)
return source, display
@staticmethod
def __split_source_and_display(val) -> tuple:
return (val['source'], val['display']) if isinstance(val, dict) else (val, val)
def format_text_with_links(self, text: str) -> str:
""" Format a text paragraph with additional url. """
return Metric.format_comment_with_links(text, self.url(), '')
@staticmethod
def format_comment_with_links(text: str, url_dict: Dict[str, str], # pylint: disable=no-self-use
url_label: str) -> str:
""" Format a text paragraph with optional urls and label for the urls. """
comment_text = Metric._format_links_in_comment_text(text)
links = [
str(utils.format_link_object(href, utils.html_escape(anchor))) for (anchor, href) in list(url_dict.items())
]
if links:
if url_label:
url_label += ': '
comment_text = '{0} [{1}{2}]'.format(comment_text, url_label, ', '.join(sorted(links)))
return json.dumps(comment_text)[1:-1] # Strip quotation marks
@staticmethod
def _format_links_in_comment_text(text: str) -> str:
url_pattern = re.compile(r'(?i)\b(http(?:s?://|www\d{0,3}[.]|[a-z0-9.\-]+[.][a-z]{2,4}/)(?:[^\s()<>]|'
r'\(([^\s()<>]+|(\([^\s()<>]+\)))*\))+(?:\(([^\s()<>]+|(\([^\s()<>]+\)))*\)|'
r'[^\s`!()\[\]{};:\'".,<>?\xab\xbb\u201c\u201d\u2018\u2019]))')
return re.sub(url_pattern, r"{'href': '\1', 'text': '\1'}", text.replace('\n', ' '))
@classmethod
def norm_template_default_values(cls) -> MetricParameters:
""" Return the default values for parameters in the norm template. """
return dict(unit=cls.unit, target=cls.target_value, low_target=cls.low_target_value)
def is_applicable(self) -> bool: # pylint: disable=no-self-use
""" Return whether this metric applies to the specified subject. """
return True
@functools.lru_cache(maxsize=1024)
def normalized_stable_id(self):
""" Returns stable_id where non-alphanumerics are substituted by _ and codes of other characters are added. """
return "".join([c if c.isalnum() else "_" for c in self.stable_id()]) + '_' + \
"".join(['' if c.isalnum() else str(ord(c)) for c in self.stable_id()])
@functools.lru_cache(maxsize=1024)
def stable_id(self) -> str:
""" Return an id that doesn't depend on numbering/order of metrics. """
stable_id = self.__class__.__name__
if not isinstance(self._subject, list):
stable_id += self._subject.name() if self._subject else str(self._subject)
return stable_id
def set_id_string(self, id_string: str) -> None:
""" Set the identification string. This can be set by a client since the identification of a metric may
depend on the section the metric is reported in. E.g. A-1. """
self.__id_string = id_string
def id_string(self) -> str:
""" Return the identification string of the metric. """
return self.__id_string
def target(self) -> MetricValue:
""" Return the target value for the metric. If the actual value of the
metric is below the target value, the metric is not green. """
subject_target = self._subject.target(self.__class__) if hasattr(self._subject, 'target') else None
return self.target_value if subject_target is None else subject_target
def low_target(self) -> MetricValue:
""" Return the low target value for the metric. If the actual value is below the low target value, the metric
needs immediate action and its status/color is red. """
subject_low_target = self._subject.low_target(self.__class__) if hasattr(self._subject, 'low_target') else None
return self.low_target_value if subject_low_target is None else subject_low_target
def __technical_debt_target(self):
""" Return the reduced target due to technical debt for the subject. If the subject has technical debt and
the actual value of the metric is below the technical debt target, the metric is red, else it is grey. """
try:
return self._subject.technical_debt_target(self.__class__)
except AttributeError:
return None
@functools.lru_cache(maxsize=8 * 1024)
def status(self) -> str:
""" Return the status/color of the metric. """
for status_string, has_status in [('missing_source', self.__missing_source_configuration),
('missing', self._missing),
('grey', self.__has_accepted_technical_debt),
('red', self._needs_immediate_action),
('yellow', self._is_below_target),
('perfect', self.__is_perfect)]:
if has_status():
return status_string
return 'green'
def status_start_date(self) -> DateTime:
""" Return since when the metric has the current status. """
return self.__history.status_start_date(self.stable_id(), self.status()) \
if self.__history else datetime.datetime.min
def __has_accepted_technical_debt(self) -> bool:
""" Return whether the metric is below target but above the accepted technical debt level. """
technical_debt_target = self.__technical_debt_target()
if technical_debt_target:
return self._is_below_target() and self._is_value_better_than(technical_debt_target.target_value())
return False
def _missing(self) -> bool:
""" Return whether the metric source is missing. """
return self.value() == -1
def __missing_source_configuration(self) -> bool:
""" Return whether the metric sources have been completely configured. """
return self.__missing_source_class() or self.__missing_source_ids()
def __missing_source_class(self) -> bool:
""" Return whether a metric source class that needs to be configured for the metric to be measurable is
available from the project. """
return not self._project.metric_sources(self.metric_source_class) if self.metric_source_class else False
def __missing_source_ids(self) -> bool:
""" Return whether the metric source ids have been configured for the metric source class. """
return bool(self.metric_source_class) and not self._get_metric_source_ids()
def _needs_immediate_action(self) -> bool:
""" Return whether the metric needs immediate action, i.e. its actual value is below its low target value. """
return not self._is_value_better_than(self.low_target())
def _is_below_target(self) -> bool:
""" Return whether the actual value of the metric is below its target value. """
return not self._is_value_better_than(self.target())
def __is_perfect(self) -> bool:
""" Return whether the actual value of the metric equals its perfect value,
i.e. no further improvement is possible. """
return self.value() == self.perfect_value
def value(self) -> MetricValue:
""" Return the actual value of the metric. """
raise NotImplementedError
def _is_value_better_than(self, target: MetricValue) -> bool:
""" Return whether the actual value of the metric is better than the specified target value. """
raise NotImplementedError
def report(self, max_subject_length: int = 200) -> str:
""" Return the actual value of the metric in the form of a short, mostly one sentence, report. """
name = self.__subject_name()
if len(name) > max_subject_length:
name = name[:max_subject_length] + '...'
logging.info('Reporting %s on %s', self.__class__.__name__, name)
return self._get_template().format(**self._parameters())
def _get_template(self) -> str:
""" Return the template for the metric report. """
if self.__missing_source_class():
return self.missing_source_template
if self.__missing_source_ids():
return self.missing_source_id_template
if self._missing():
return self.missing_template
if self.__is_perfect() and self.perfect_template:
return self.perfect_template
return self.template
def _parameters(self) -> MetricParameters:
""" Return the parameters for the metric report template and for the metric norm template. """
return dict(name=self.__subject_name(),
metric=self.name[0].lower() + self.name[1:],
unit=self.unit,
target=self.target(),
low_target=self.low_target(),
value=self.value(),
metric_source_class=self.metric_source_class.__name__ if self.metric_source_class
else '<metric has no metric source defined>')
def norm(self) -> str:
""" Return a description of the norm for the metric. """
try:
return self.norm_template.format(**self._parameters())
except KeyError as reason:
class_name = self.__class__.__name__
logging.critical('Key missing in %s parameters (%s) for norm template "%s": %s', class_name,
self._parameters(), self.norm_template, reason)
raise
def url(self) -> Dict[str, str]:
""" Return a dictionary of urls for the metric. The key is the anchor, the value the url. """
label = self._metric_source.metric_source_name if self._metric_source else 'Unknown metric source'
urls = [url for url in self._metric_source_urls() if url] # Weed out urls that are empty or None
if len(urls) == 1:
return {label: urls[0]}
return {'{label} ({index}/{count})'.format(label=label, index=index, count=len(urls)): url
for index, url in enumerate(urls, start=1)}
def _metric_source_urls(self) -> List[str]:
""" Return a list of metric source urls to be used to create the url dict. """
if self._metric_source:
if self._get_display_urls():
return self._metric_source.metric_source_urls(*self._get_display_urls())
return [self._metric_source.url()]
return []
def _get_display_urls(self) -> List[str]:
ids = self._display_url if isinstance(self._display_url, list) else [self._display_url]
return [id_ for id_ in ids if id_]
def _get_metric_source_ids(self) -> List[str]:
""" Allow for subclasses to override what the metric source id is. """
ids = self._metric_source_id if isinstance(self._metric_source_id, list) else [self._metric_source_id]
return [id_ for id_ in ids if id_]
def comment(self) -> str:
""" Return a comment on the metric. The comment is retrieved from either the technical debt or the subject. """
comments = [comment for comment in (self.__non_default_target_comment(), self.__technical_debt_comment(),
self.__subject_comment()) if comment]
return ' '.join(comments)
def __subject_comment(self) -> str:
""" Return the comment of the subject about this metric, if any. """
try:
return self._subject.metric_options(self.__class__)['comment']
except (AttributeError, TypeError, KeyError):
return ''
def __technical_debt_comment(self) -> str:
""" Return the comment of the accepted technical debt, if any. """
td_target = self.__technical_debt_target()
return td_target.explanation(self.unit) if td_target else ''
def __non_default_target_comment(self) -> str:
""" Return a comment about a non-default target, if relevant. """
return AdaptedTarget(self.low_target(), self.low_target_value).explanation(self.unit)
def comment_urls(self) -> Dict[str, str]: # pylint: disable=no-self-use
""" Return the source for the comment on the metric. """
return dict()
def __history_records(self, method: callable) -> List[int]:
history = method(self.stable_id()) if self.__history else []
return [int(round(float(value))) if value is not None else None for value in history]
def recent_history(self) -> List[int]:
""" Return a list of recent values of the metric, to be used in e.g. a spark line graph. """
return self.__history_records(self.__history.recent_history) if self.__history else []
def long_history(self) -> List[int]:
""" Return a long list of values of the metric, to be used in e.g. a spark line graph. """
return self.__history_records(self.__history.long_history) if self.__history else []
def get_recent_history_dates(self) -> str:
""" Return a list of recent dates when report was generated. """
return self.__history.get_dates() if self.__history else ""
def get_long_history_dates(self) -> str:
""" Return a long list of dates when report was generated. """
return self.__history.get_dates(long_history=True) if self.__history else ""
def y_axis_range(self) -> Tuple[int, int]:
""" Return a two-tuple (min, max) for use in graphs. """
history = [d for d in self.recent_history() if d is not None]
if not history:
return 0, 100
minimum, maximum = min(history), max(history)
return (minimum - 1, maximum + 1) if minimum == maximum else (minimum, maximum)
def numerical_value(self) -> Number:
""" Return a numerical version of the metric value for use in graphs. By default this simply returns the
regular value, assuming it is already numerical. Metrics that don't have a numerical value by default
can override this method to convert the non-numerical value into a numerical value. """
value = self.value()
if isinstance(value, tuple):
value = value[0]
if isinstance(value, (int, float)):
return value
raise NotImplementedError
def extra_info(self) -> Optional[ExtraInfo]:
""" Method can be overridden by concrete metrics that fill extra info. """
extra_info = None
if self._metric_source and self.extra_info_headers:
url_list = self.extra_info_rows()
if url_list:
extra_info = self.__create_extra_info(url_list)
return extra_info if extra_info is not None and extra_info.data else None
def extra_info_rows(self) -> List:
""" Returns rows of extra info table. """
return self._extra_info_data
def __create_extra_info(self, url_list):
extra_info = ExtraInfo(**self.extra_info_headers)
extra_info.title = self.url_label_text
for item in url_list:
extra_info += self.convert_item_to_extra_info(item)
return extra_info
@staticmethod
def convert_item_to_extra_info(item):
""" Method should transform an item to the form used in extra info. Should be overridden. """
return item
def __subject_name(self) -> str:
""" Return the subject name, or a string representation if the subject has no name. """
try:
return self._subject.name()
except AttributeError:
return str(self._subject)
| [
"re.compile",
"json.dumps",
"hqlib.utils.html_escape",
"functools.lru_cache",
"logging.info",
"typing.cast"
] | [((6688, 6721), 'functools.lru_cache', 'functools.lru_cache', ([], {'maxsize': '(1024)'}), '(maxsize=1024)\n', (6707, 6721), False, 'import functools\n'), ((7059, 7092), 'functools.lru_cache', 'functools.lru_cache', ([], {'maxsize': '(1024)'}), '(maxsize=1024)\n', (7078, 7092), False, 'import functools\n'), ((9053, 9090), 'functools.lru_cache', 'functools.lru_cache', ([], {'maxsize': '(8 * 1024)'}), '(maxsize=8 * 1024)\n', (9072, 9090), False, 'import functools\n'), ((5873, 6131), 're.compile', 're.compile', (['"""(?i)\\\\b(http(?:s?://|www\\\\d{0,3}[.]|[a-z0-9.\\\\-]+[.][a-z]{2,4}/)(?:[^\\\\s()<>]|\\\\(([^\\\\s()<>]+|(\\\\([^\\\\s()<>]+\\\\)))*\\\\))+(?:\\\\(([^\\\\s()<>]+|(\\\\([^\\\\s()<>]+\\\\)))*\\\\)|[^\\\\s`!()\\\\[\\\\]{};:\\\\\'".,<>?\\\\xab\\\\xbb\\\\u201c\\\\u201d\\\\u2018\\\\u2019]))"""'], {}), '(\n \'(?i)\\\\b(http(?:s?://|www\\\\d{0,3}[.]|[a-z0-9.\\\\-]+[.][a-z]{2,4}/)(?:[^\\\\s()<>]|\\\\(([^\\\\s()<>]+|(\\\\([^\\\\s()<>]+\\\\)))*\\\\))+(?:\\\\(([^\\\\s()<>]+|(\\\\([^\\\\s()<>]+\\\\)))*\\\\)|[^\\\\s`!()\\\\[\\\\]{};:\\\\\\\'".,<>?\\\\xab\\\\xbb\\\\u201c\\\\u201d\\\\u2018\\\\u2019]))\'\n )\n', (5883, 6131), False, 'import re\n'), ((12578, 12643), 'logging.info', 'logging.info', (['"""Reporting %s on %s"""', 'self.__class__.__name__', 'name'], {}), "('Reporting %s on %s', self.__class__.__name__, name)\n", (12590, 12643), False, 'import logging\n'), ((4203, 4250), 'typing.cast', 'cast', (['metric_source.History', 'history_sources[0]'], {}), '(metric_source.History, history_sources[0])\n', (4207, 4250), False, 'from typing import cast, Dict, List, Optional, Type, Tuple, TYPE_CHECKING\n'), ((5719, 5743), 'json.dumps', 'json.dumps', (['comment_text'], {}), '(comment_text)\n', (5729, 5743), False, 'import json\n'), ((5442, 5467), 'hqlib.utils.html_escape', 'utils.html_escape', (['anchor'], {}), '(anchor)\n', (5459, 5467), False, 'from hqlib import utils\n')] |
from __future__ import absolute_import
from django.conf.urls import url
from django.contrib.auth.decorators import login_required
from . import views
urlpatterns = [
url(r'^swagger.json/$',views.swagger_json_api),
url(r'^swagger/$', login_required(views.swagger)),
url(r'charts/$', login_required(views.ChartsLoaderView.as_view())),
url(r'user/$', login_required(views.UserInformation.as_view())),
url(r'filters/$', login_required(views.FilterLoaderView.as_view())),
url(r'squealy/(?P<chart_url>[-\w]+)', login_required(views.ChartView.as_view())),
url(r'filter-api/(?P<filter_url>[-\w]+)', login_required(views.FilterView.as_view())),
url(r'databases/$', login_required(views.DatabaseView.as_view())),
url(r'^$', login_required(views.squealy_interface)),
url(r'^(?P<chart_name>[\w@%.\Wd]+)/$', login_required(views.squealy_interface)),
url(r'^(?P<chart_name>[\w@%.\Wd]+)/(?P<mode>\w+)$', login_required(views.squealy_interface)),
]
| [
"django.conf.urls.url",
"django.contrib.auth.decorators.login_required"
] | [((173, 219), 'django.conf.urls.url', 'url', (['"""^swagger.json/$"""', 'views.swagger_json_api'], {}), "('^swagger.json/$', views.swagger_json_api)\n", (176, 219), False, 'from django.conf.urls import url\n'), ((243, 272), 'django.contrib.auth.decorators.login_required', 'login_required', (['views.swagger'], {}), '(views.swagger)\n', (257, 272), False, 'from django.contrib.auth.decorators import login_required\n'), ((745, 784), 'django.contrib.auth.decorators.login_required', 'login_required', (['views.squealy_interface'], {}), '(views.squealy_interface)\n', (759, 784), False, 'from django.contrib.auth.decorators import login_required\n'), ((829, 868), 'django.contrib.auth.decorators.login_required', 'login_required', (['views.squealy_interface'], {}), '(views.squealy_interface)\n', (843, 868), False, 'from django.contrib.auth.decorators import login_required\n'), ((926, 965), 'django.contrib.auth.decorators.login_required', 'login_required', (['views.squealy_interface'], {}), '(views.squealy_interface)\n', (940, 965), False, 'from django.contrib.auth.decorators import login_required\n')] |
import logging
import numpy as np
from gunpowder.nodes.batch_filter import BatchFilter
logger = logging.getLogger(__name__)
class TanhSaturate(BatchFilter):
'''Saturate the values of an array to be floats between -1 and 1 by applying the tanh function.
Args:
array (:class:`ArrayKey`):
The key of the array to modify.
factor (scalar, optional):
The factor to divide by before applying the tanh, controls how quickly the values saturate to -1, 1.
'''
def __init__(self, array, scale=None):
self.array = array
if scale is not None:
self.scale = scale
else:
self.scale = 1.
def process(self, batch, request):
if self.array not in batch.arrays:
return
array = batch.arrays[self.array]
array.data = np.tanh(array.data/self.scale)
| [
"logging.getLogger",
"numpy.tanh"
] | [((98, 125), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (115, 125), False, 'import logging\n'), ((852, 884), 'numpy.tanh', 'np.tanh', (['(array.data / self.scale)'], {}), '(array.data / self.scale)\n', (859, 884), True, 'import numpy as np\n')] |
import requests, json, time, random, datetime, threading, pickle
from termcolor import colored
sitekey = "<KEY>"
def log(event):
d = datetime.datetime.now().strftime("%H:%M:%S")
print("Raffle OFF-S by Azerpas :: " + str(d) + " :: " + event)
class Raffle(object):
def __init__(self):
self.s = requests.session()
self.shoes = [
{"shoe_id":"8","shoe_name":"ZOOM VAPORFLY"},
{"shoe_id":"7","shoe_name":"VAPOR MAX"}]
self.url = "https://www.offspring.co.uk/view/component/entercompetition"
def register(self,identity):
# register to each shoes.
for dshoes in self.shoes:
print("Signin for: " + dshoes['shoe_name'])
d = datetime.datetime.now().strftime('%H:%M')
log("Getting Captcha")
flag = False
while flag != True:
d = datetime.datetime.now().strftime('%H:%M')
try:
file = open(str(d)+'.txt','r') #r as reading only
flag = True
except IOError:
time.sleep(2)
log("No captcha available(1)")
flag = False
try:
FileList = pickle.load(file) #FileList the list where i want to pick out the captcharep
except:
log("Can't open file")
while len(FileList) == 0: #if len(FileList) it will wait for captcha scraper
d = datetime.datetime.now().strftime('%H:%M')
try:
file = open(str(d)+'.txt','r')
FileList = pickle.load(file)
if FileList == []:
log("No captcha available(2)")
time.sleep(3)
except IOError as e:
log("No file, waiting...")
print(e)
time.sleep(3)
captchaREP = random.choice(FileList)
FileList.remove(captchaREP)
file = open(str(d)+'.txt','w')
pickle.dump(FileList,file)
log("Captcha retrieved")
# captcha
headers = {
"authority":"www.offspring.co.uk",
"method":"POST",
"path":"/view/component/entercompetition",
"scheme":"https",
"accept":"*/*",
"accept-encoding":"gzip, deflate, br",
"accept-language":"fr-FR,fr;q=0.8,en-US;q=0.6,en;q=0.4",
"content-length":"624",
"content-type":"application/x-www-form-urlencoded; charset=UTF-8",
"origin":"https://www.offspring.co.uk",
"referer":"https://www.offspring.co.uk/view/content/nikecompetition",
"user-agent":"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/61.0.3163.100 Safari/537.36",
"x-requested-with":"XMLHttpRequest",}
payload = {"firstName":identity['fname'],
"lastName":identity['lname'],
"competitionIDEntered":dshoes['shoe_id'],
"competitionNameEntered":dshoes['shoe_name'],
"emailAddress":identity['mail'],
"phoneNumber":identity['phone'],
"optIn":"false",
"size":identity['shoesize'],
"grecaptcharesponse":captchaREP,
}
req = self.s.post(self.url,headers=headers,data=payload)
print(req)
jsonn = json.loads(req.text)
if req.status_code == 200:
if jsonn['statusCode'] == "success":
print(colored('Successfully entered','red', attrs=['bold']))
else:
log("Failed to register for: " + identity['mail'])
sleep = random.uniform(2.3,2.9)
log("Sleeping: " + str(sleep) + " seconds")
time.sleep(sleep)
self.s.cookies.clear()
if __name__ == "__main__":
ra = Raffle()
accounts = [
# ENTER YOUR ACCOUNTS HERE
{"fname":"pete","lname":"james","mail":"<EMAIL>","phone":"+33612334455","city":"London","zip":"HEC 178","shoesize":"10",},
]
# catpcha
for i in accounts:
ra.register(i)
| [
"requests.session",
"random.choice",
"pickle.dump",
"json.loads",
"random.uniform",
"termcolor.colored",
"pickle.load",
"time.sleep",
"datetime.datetime.now"
] | [((303, 321), 'requests.session', 'requests.session', ([], {}), '()\n', (319, 321), False, 'import requests, json, time, random, datetime, threading, pickle\n'), ((137, 160), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (158, 160), False, 'import requests, json, time, random, datetime, threading, pickle\n'), ((1556, 1579), 'random.choice', 'random.choice', (['FileList'], {}), '(FileList)\n', (1569, 1579), False, 'import requests, json, time, random, datetime, threading, pickle\n'), ((1653, 1680), 'pickle.dump', 'pickle.dump', (['FileList', 'file'], {}), '(FileList, file)\n', (1664, 1680), False, 'import requests, json, time, random, datetime, threading, pickle\n'), ((2870, 2890), 'json.loads', 'json.loads', (['req.text'], {}), '(req.text)\n', (2880, 2890), False, 'import requests, json, time, random, datetime, threading, pickle\n'), ((3110, 3134), 'random.uniform', 'random.uniform', (['(2.3)', '(2.9)'], {}), '(2.3, 2.9)\n', (3124, 3134), False, 'import requests, json, time, random, datetime, threading, pickle\n'), ((3186, 3203), 'time.sleep', 'time.sleep', (['sleep'], {}), '(sleep)\n', (3196, 3203), False, 'import requests, json, time, random, datetime, threading, pickle\n'), ((1018, 1035), 'pickle.load', 'pickle.load', (['file'], {}), '(file)\n', (1029, 1035), False, 'import requests, json, time, random, datetime, threading, pickle\n'), ((651, 674), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (672, 674), False, 'import requests, json, time, random, datetime, threading, pickle\n'), ((1336, 1353), 'pickle.load', 'pickle.load', (['file'], {}), '(file)\n', (1347, 1353), False, 'import requests, json, time, random, datetime, threading, pickle\n'), ((770, 793), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (791, 793), False, 'import requests, json, time, random, datetime, threading, pickle\n'), ((923, 936), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (933, 936), False, 'import requests, json, time, random, datetime, threading, pickle\n'), ((1227, 1250), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (1248, 1250), False, 'import requests, json, time, random, datetime, threading, pickle\n'), ((1427, 1440), 'time.sleep', 'time.sleep', (['(3)'], {}), '(3)\n', (1437, 1440), False, 'import requests, json, time, random, datetime, threading, pickle\n'), ((1525, 1538), 'time.sleep', 'time.sleep', (['(3)'], {}), '(3)\n', (1535, 1538), False, 'import requests, json, time, random, datetime, threading, pickle\n'), ((2976, 3030), 'termcolor.colored', 'colored', (['"""Successfully entered"""', '"""red"""'], {'attrs': "['bold']"}), "('Successfully entered', 'red', attrs=['bold'])\n", (2983, 3030), False, 'from termcolor import colored\n')] |
"""
Core functionality for feature computation
<NAME>
Copyright (c) 2021. Pfizer Inc. All rights reserved.
"""
from abc import ABC, abstractmethod
from collections.abc import Iterator, Sequence
import json
from warnings import warn
from pandas import DataFrame
from numpy import float_, asarray, zeros, sum, moveaxis
__all__ = ["Bank"]
class ArrayConversionError(Exception):
pass
def get_n_feats(size, index):
if isinstance(index, int):
return 1
elif isinstance(index, (Iterator, Sequence)):
return len(index)
elif isinstance(index, slice):
return len(range(*index.indices(size)))
elif isinstance(index, type(Ellipsis)):
return size
def partial_index_check(index):
if index is None:
index = ...
if not isinstance(index, (int, Iterator, Sequence, type(...), slice)):
raise IndexError(f"Index type ({type(index)}) not understood.")
if isinstance(index, str):
raise IndexError("Index type (str) not understood.")
return index
def normalize_indices(nfeat, index):
if index is None:
return [...] * nfeat
elif not isinstance(index, (Iterator, Sequence)): # slice, single integer, etc
return [partial_index_check(index)] * nfeat
elif all([isinstance(i, int) for i in index]): # iterable of ints
return [index] * nfeat
elif isinstance(index, Sequence): # able to be indexed
return [partial_index_check(i) for i in index]
else: # pragma: no cover
return IndexError(f"Index type ({type(index)}) not understood.")
def normalize_axes(ndim, axis, ind_axis):
"""
Normalize input axes to be positive/correct for how the swapping has to work
"""
if axis == ind_axis:
raise ValueError("axis and index_axis cannot be the same")
if ndim == 1:
return 0, None
elif ndim >= 2:
"""
| shape | ax | ia | move1 | ax | ia | res | ax | ia | res move |
|--------|----|----|--------|----|----|-------|----|----|----------|
| (a, b) | 0 | 1 | (b, a) | 0 | 0 | (bf,) | | | |
| (a, b) | 0 | N | (b, a) | 0 | N | (f, b)| | | |
| (a, b) | 1 | 0 | | | | (3a,) | | | |
| (a, b) | 1 | N | | | | (f, a)| | | |
| shape | ax| ia | move1 | ax| ia| move2 | res | | ia| res move |
|----------|---|------|----------|---|---|----------|----------|----|---|----------|
| (a, b, c)| 0 | 1(0) | (b, c, a)| | | | (bf, c) | 0 | 0 | |
| (a, b, c)| 0 | 2(1) | (b, c, a)| | 1 | (c, b, a)| (cf, b) | 0 | 1 | (b, cf) |
| (a, b, c)| 0 | N | (b, c, a)| | | | (f, b, c)| | | |
| (a, b, c)| 1 | 0 | (a, c, b)| | | | (af, c) | 0 | 0 | |
| (a, b, c)| 1 | 2(1) | (a, c, b)| | 1 | (c, a, b)| (cf, a) | 0 | 1 | (a, cf) |
| (a, b, c)| 1 | N | (a, c, b)| | | | (f, a, c)| | | |
| (a, b, c)| 2 | 0 | (a, b, c)| | | | (af, b) | 0 | 0 | |
| (a, b, c)| 2 | 1 | (a, b, c)| | 1 | (b, a, c)| (bf, a) | 0 | 1 | (a, bf) |
| (a, b, c)| 2 | N | (a, b, c)| | | | (f, a, b)| | | |
| shape | ax| ia | move1 | ia| move2 | res | | ia| res move |
|------------|---|------|-------------|---|-------------|-------------|---|---|-----------|
|(a, b, c, d)| 0 | 1(0) | (b, c, d, a)| | | (bf, c, d) | 0 | 0 | |
|(a, b, c, d)| 0 | 2(1) | (b, c, d, a)| 1 | (c, b, d, a)| (cf, b, d) | 0 | 1 | (b, cf, d)|
|(a, b, c, d)| 0 | 3(2) | (b, c, d, a)| 2 | (d, b, c, a)| (df, b, c) | 0 | 2 | (d, c, df)|
|(a, b, c, d)| 0 | N | (b, c, d, a)| | | (f, b, c, d)| | | |
|(a, b, c, d)| 1 | 0 | (a, c, d, b)| | | (af, c, d) | | | |
|(a, b, c, d)| 1 | 2(1) | (a, c, d, b)| 1 | (c, a, d, b)| (cf, a, d) | 0 | 1 | (a, cf, d)|
|(a, b, c, d)| 1 | 3(2) | (a, c, d, b)| 2 | (d, a, c, b)| (df, a, c) | 0 | 2 | (a, c, df)|
|(a, b, c, d)| 1 | N | (a, c, d, b)| | | (f, a, c, d)| | | |
|(a, b, c, d)| 2 | 0 | (a, b, d, c)| | | (af, b, d) | | | |
|(a, b, c, d)| 2 | 1 | (a, b, d, c)| 1 | (b, a, d, c)| (bf, a, d) | 0 | 1 | (a, bf, d)|
|(a, b, c, d)| 2 | 3(2) | (a, b, d, c)| 2 | (d, a, b, c)| (df, a, b) | 0 | 2 | (a, b, df)|
|(a, b, c, d)| 2 | N | (a, b, d, c)| | | (f, a, b, d)| | | |
|(a, b, c, d)| 3 | 0 | (a, b, c, d)| | | (af, b, c) | | | |
|(a, b, c, d)| 3 | 1 | (a, b, c, d)| 1 | (b, a, c, d)| (bf, a, c) | 0 | 1 | (a, bf, c)|
|(a, b, c, d)| 3 | 2 | (a, b, c, d)| 2 | (c, a, b, d)| (cf, a, b) | 0 | 2 | (a, b, cf)|
|(a, b, c, d)| 3 | N | (a, b, c, d)| | | (f, a, b, c)| | | |
"""
ax = axis if axis >= 0 else ndim + axis
if ind_axis is None:
return ax, None
ia = ind_axis if ind_axis >= 0 else ndim + ind_axis
if ia > ax:
ia -= 1
return ax, ia
class Bank:
"""
A feature bank object for ease in creating a table or pipeline of features to be computed.
Parameters
----------
bank_file : {None, path-like}, optional
Path to a saved bank file to load. Optional
Examples
--------
"""
__slots__ = ("_feats", "_indices")
def __str__(self):
return "Bank"
def __repr__(self):
s = "Bank["
for f in self._feats:
s += f"\n\t{f!r},"
s += "\n]"
return s
def __contains__(self, item):
return item in self._feats
def __len__(self):
return len(self._feats)
def __init__(self, bank_file=None):
# initialize some variables
self._feats = []
self._indices = []
if bank_file is not None:
self.load(bank_file)
def add(self, features, index=None):
"""
Add a feature or features to the pipeline.
Parameters
----------
features : {Feature, list}
Single signal Feature, or list of signal Features to add to the feature Bank
index : {int, slice, list}, optional
Index to be applied to data input to each features. Either a index that will
apply to every feature, or a list of features corresponding to each feature being
added.
"""
if isinstance(features, Feature):
if features in self:
warn(
f"Feature {features!s} already in the Bank, will be duplicated.",
UserWarning,
)
self._indices.append(partial_index_check(index))
self._feats.append(features)
elif all([isinstance(i, Feature) for i in features]):
if any([ft in self for ft in features]):
warn("Feature already in the Bank, will be duplicated.", UserWarning)
self._indices.extend(normalize_indices(len(features), index))
self._feats.extend(features)
def save(self, file):
"""
Save the feature Bank to a file for a persistent object that can be loaded later to create
the same Bank as before
Parameters
----------
file : path-like
File to be saved to. Creates a new file or overwrites an existing file.
"""
out = []
for i, ft in enumerate(self._feats):
idx = "Ellipsis" if self._indices[i] is Ellipsis else self._indices[i]
out.append(
{ft.__class__.__name__: {"Parameters": ft._params, "Index": idx}}
)
with open(file, "w") as f:
json.dump(out, f)
def load(self, file):
"""
Load a previously saved feature Bank from a json file.
Parameters
----------
file : path-like
File to be read to create the feature Bank.
"""
# the import must be here, otherwise a circular import error occurs
from skdh.features import lib
with open(file, "r") as f:
feats = json.load(f)
for ft in feats:
name = list(ft.keys())[0]
params = ft[name]["Parameters"]
index = ft[name]["Index"]
if index == "Ellipsis":
index = Ellipsis
# add it to the feature bank
self.add(getattr(lib, name)(**params), index=index)
def compute(
self, signal, fs=1.0, *, axis=-1, index_axis=None, indices=None, columns=None
):
"""
Compute the specified features for the given signal
Parameters
----------
signal : {array-like}
Array-like signal to have features computed for.
fs : float, optional
Sampling frequency in Hz. Default is 1Hz
axis : int, optional
Axis along which to compute the features. Default is -1.
index_axis : {None, int}, optional
Axis corresponding to the indices specified in `Bank.add` or `indices`. Default is
None, which assumes that this axis is not part of the signal. Note that setting this to
None means values for `indices` or the indices set in `Bank.add` will be ignored.
indices : {None, int, list-like, slice, ellipsis}, optional
Indices to apply to the input signal. Either None, a integer, list-like, slice to apply
to each feature, or a list-like of lists/objects with a 1:1 correspondence to the
features present in the Bank. If provided, takes precedence over any values given in
`Bank.add`. Default is None, which will use indices from `Bank.add`.
columns : {None, list}, optional
Columns to use if providing a dataframe. Default is None (uses all columns).
Returns
-------
feats : numpy.ndarray
Computed features.
"""
# standardize the input signal
if isinstance(signal, DataFrame):
columns = columns if columns is not None else signal.columns
x = signal[columns].values.astype(float_)
else:
try:
x = asarray(signal, dtype=float_)
except ValueError as e:
raise ArrayConversionError("Error converting signal to ndarray") from e
axis, index_axis = normalize_axes(x.ndim, axis, index_axis)
if index_axis is None:
indices = [...] * len(self)
else:
if indices is None:
indices = self._indices
else:
indices = normalize_indices(len(self), indices)
# get the number of features that will results. Needed to allocate the feature array
if index_axis is None:
# don't have to move any other axes than the computation axis
x = moveaxis(x, axis, -1)
# number of feats is 1 per
n_feats = [1] * len(self)
feats = zeros((sum(n_feats),) + x.shape[:-1], dtype=float_)
else:
# move both the computation and index axis. do this in two steps to allow for undoing
# just the index axis swap later. The index_axis has been adjusted appropriately
# to match this axis move in 2 steps
x = moveaxis(x, axis, -1)
x = moveaxis(x, index_axis, 0)
n_feats = []
for ind in indices:
n_feats.append(get_n_feats(x.shape[0], ind))
feats = zeros((sum(n_feats),) + x.shape[1:-1], dtype=float_)
feat_i = 0 # keep track of where in the feature array we are
for i, ft in enumerate(self._feats):
feats[feat_i : feat_i + n_feats[i]] = ft.compute(
x[indices[i]], fs=fs, axis=-1
)
feat_i += n_feats[i]
# Move the shape back to the correct one.
# only have to do this if there is an index axis, because otherwise the array is still in
# the same order as originally
if index_axis is not None:
feats = moveaxis(feats, 0, index_axis) # undo the previous swap/move
return feats
class Feature(ABC):
"""
Base feature class
"""
def __str__(self):
return self.__class__.__name__
def __repr__(self):
s = self.__class__.__name__ + "("
for p in self._params:
s += f"{p}={self._params[p]!r}, "
if len(self._params) > 0:
s = s[:-2]
return s + ")"
def __eq__(self, other):
if isinstance(other, type(self)):
# double check the name
eq = str(other) == str(self)
# check the parameters
eq &= other._params == self._params
return eq
else:
return False
__slots__ = ("_params",)
def __init__(self, **params):
self._params = params
@abstractmethod
def compute(self, signal, fs=1.0, *, axis=-1):
"""
Compute the signal feature.
Parameters
----------
signal : array-like
Signal to compute the feature over.
fs : float, optional
Sampling frequency in Hz. Default is 1.0
axis : int, optional
Axis over which to compute the feature. Default is -1 (last dimension)
Returns
-------
feat : numpy.ndarray
ndarray of the computed feature
"""
# move the computation axis to the end
return moveaxis(asarray(signal, dtype=float_), axis, -1)
| [
"numpy.asarray",
"numpy.moveaxis",
"numpy.sum",
"json.load",
"warnings.warn",
"json.dump"
] | [((8060, 8077), 'json.dump', 'json.dump', (['out', 'f'], {}), '(out, f)\n', (8069, 8077), False, 'import json\n'), ((8482, 8494), 'json.load', 'json.load', (['f'], {}), '(f)\n', (8491, 8494), False, 'import json\n'), ((11254, 11275), 'numpy.moveaxis', 'moveaxis', (['x', 'axis', '(-1)'], {}), '(x, axis, -1)\n', (11262, 11275), False, 'from numpy import float_, asarray, zeros, sum, moveaxis\n'), ((11695, 11716), 'numpy.moveaxis', 'moveaxis', (['x', 'axis', '(-1)'], {}), '(x, axis, -1)\n', (11703, 11716), False, 'from numpy import float_, asarray, zeros, sum, moveaxis\n'), ((11733, 11759), 'numpy.moveaxis', 'moveaxis', (['x', 'index_axis', '(0)'], {}), '(x, index_axis, 0)\n', (11741, 11759), False, 'from numpy import float_, asarray, zeros, sum, moveaxis\n'), ((12468, 12498), 'numpy.moveaxis', 'moveaxis', (['feats', '(0)', 'index_axis'], {}), '(feats, 0, index_axis)\n', (12476, 12498), False, 'from numpy import float_, asarray, zeros, sum, moveaxis\n'), ((13906, 13935), 'numpy.asarray', 'asarray', (['signal'], {'dtype': 'float_'}), '(signal, dtype=float_)\n', (13913, 13935), False, 'from numpy import float_, asarray, zeros, sum, moveaxis\n'), ((6856, 6943), 'warnings.warn', 'warn', (['f"""Feature {features!s} already in the Bank, will be duplicated."""', 'UserWarning'], {}), "(f'Feature {features!s} already in the Bank, will be duplicated.',\n UserWarning)\n", (6860, 6943), False, 'from warnings import warn\n'), ((10576, 10605), 'numpy.asarray', 'asarray', (['signal'], {'dtype': 'float_'}), '(signal, dtype=float_)\n', (10583, 10605), False, 'from numpy import float_, asarray, zeros, sum, moveaxis\n'), ((7232, 7301), 'warnings.warn', 'warn', (['"""Feature already in the Bank, will be duplicated."""', 'UserWarning'], {}), "('Feature already in the Bank, will be duplicated.', UserWarning)\n", (7236, 7301), False, 'from warnings import warn\n'), ((11380, 11392), 'numpy.sum', 'sum', (['n_feats'], {}), '(n_feats)\n', (11383, 11392), False, 'from numpy import float_, asarray, zeros, sum, moveaxis\n'), ((11907, 11919), 'numpy.sum', 'sum', (['n_feats'], {}), '(n_feats)\n', (11910, 11919), False, 'from numpy import float_, asarray, zeros, sum, moveaxis\n')] |
# Copyright 2017 AT&T Intellectual Property. All other rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from unittest.mock import patch
import pytest
from shipyard_airflow.control.action.actions_validations_id_api import \
ActionsValidationsResource
from shipyard_airflow.errors import ApiError
from tests.unit.control import common
def actions_db(action_id):
"""
replaces the actual db call
"""
if action_id == 'error_it':
return None
else:
return {
'id': '59bb330a-9e64-49be-a586-d253bb67d443',
'name': 'dag_it',
'parameters': None,
'dag_id': 'did2',
'dag_execution_date': '2017-09-06 14:10:08.528402',
'user': 'robot1',
'timestamp': '2017-09-06 14:10:08.528402',
'context_marker': '8-4-4-4-12a'
}
def get_validations(validation_id):
"""
Stub to return validations
"""
if validation_id == '43':
return {
'id': '43',
'action_id': '59bb330a-9e64-49be-a586-d253bb67d443',
'validation_name': 'It has shiny goodness',
'details': 'This was not very shiny.'
}
else:
return None
class TestActionsValidationsResource():
@patch.object(ActionsValidationsResource, 'get_action_validation',
common.str_responder)
def test_on_get(self, api_client):
"""Validate the on_get method returns 200 on success"""
result = api_client.simulate_get(
"/api/v1.0/actions/123456/validations/123456",
headers=common.AUTH_HEADERS)
assert result.status_code == 200
def test_get_action_validation(self):
"""Tests the main response from get all actions"""
action_resource = ActionsValidationsResource()
# stubs for db
action_resource.get_action_db = actions_db
action_resource.get_validation_db = get_validations
validation = action_resource.get_action_validation(
action_id='59bb330a-9e64-49be-a586-d253bb67d443',
validation_id='43')
assert validation[
'action_id'] == '59bb330a-9e64-49be-a586-d253bb67d443'
assert validation['validation_name'] == 'It has shiny goodness'
with pytest.raises(ApiError) as api_error:
action_resource.get_action_validation(
action_id='59bb330a-9e64-49be-a586-d253bb67d443',
validation_id='not a chance')
assert 'Validation not found' in str(api_error)
with pytest.raises(ApiError) as api_error:
validation = action_resource.get_action_validation(
action_id='error_it', validation_id='not a chance')
assert 'Action not found' in str(api_error)
@patch('shipyard_airflow.db.shipyard_db.ShipyardDbAccess.get_action_by_id')
def test_get_action_db(self, mock_get_action_by_id):
action_resource = ActionsValidationsResource()
action_id = '123456789'
action_resource.get_action_db(action_id)
mock_get_action_by_id.assert_called_with(action_id=action_id)
@patch(
'shipyard_airflow.db.shipyard_db.ShipyardDbAccess.get_validation_by_id'
)
def test_get_validation_db(self, mock_get_tasks_by_id):
action_resource = ActionsValidationsResource()
validation_id = '123456'
action_resource.get_validation_db(validation_id)
mock_get_tasks_by_id.assert_called_with(validation_id=validation_id)
| [
"unittest.mock.patch",
"pytest.raises",
"shipyard_airflow.control.action.actions_validations_id_api.ActionsValidationsResource",
"unittest.mock.patch.object"
] | [((1777, 1869), 'unittest.mock.patch.object', 'patch.object', (['ActionsValidationsResource', '"""get_action_validation"""', 'common.str_responder'], {}), "(ActionsValidationsResource, 'get_action_validation', common.\n str_responder)\n", (1789, 1869), False, 'from unittest.mock import patch\n'), ((3294, 3368), 'unittest.mock.patch', 'patch', (['"""shipyard_airflow.db.shipyard_db.ShipyardDbAccess.get_action_by_id"""'], {}), "('shipyard_airflow.db.shipyard_db.ShipyardDbAccess.get_action_by_id')\n", (3299, 3368), False, 'from unittest.mock import patch\n'), ((3638, 3716), 'unittest.mock.patch', 'patch', (['"""shipyard_airflow.db.shipyard_db.ShipyardDbAccess.get_validation_by_id"""'], {}), "('shipyard_airflow.db.shipyard_db.ShipyardDbAccess.get_validation_by_id')\n", (3643, 3716), False, 'from unittest.mock import patch\n'), ((2297, 2325), 'shipyard_airflow.control.action.actions_validations_id_api.ActionsValidationsResource', 'ActionsValidationsResource', ([], {}), '()\n', (2323, 2325), False, 'from shipyard_airflow.control.action.actions_validations_id_api import ActionsValidationsResource\n'), ((3452, 3480), 'shipyard_airflow.control.action.actions_validations_id_api.ActionsValidationsResource', 'ActionsValidationsResource', ([], {}), '()\n', (3478, 3480), False, 'from shipyard_airflow.control.action.actions_validations_id_api import ActionsValidationsResource\n'), ((3817, 3845), 'shipyard_airflow.control.action.actions_validations_id_api.ActionsValidationsResource', 'ActionsValidationsResource', ([], {}), '()\n', (3843, 3845), False, 'from shipyard_airflow.control.action.actions_validations_id_api import ActionsValidationsResource\n'), ((2795, 2818), 'pytest.raises', 'pytest.raises', (['ApiError'], {}), '(ApiError)\n', (2808, 2818), False, 'import pytest\n'), ((3066, 3089), 'pytest.raises', 'pytest.raises', (['ApiError'], {}), '(ApiError)\n', (3079, 3089), False, 'import pytest\n')] |
import random
from grid import Grid
from grid import Cell
class Prim():
def grid_to_list(self, grid):
"""
Place all cells from grid matrix into a list
:param grid: a Grid object
:return: a list of all cells contained in the grid
"""
list = []
for r in range(grid.rows):
for c in range(grid.cols):
list.append(grid.grid[r][c])
return list
def generate(self, grid):
"""
Generate a maze given a grid of cells.
:param grid: a Grid object
:return: a generated maze
"""
grid_list = self.grid_to_list(grid)
first = grid.random_cell()
path = [first]
grid_list.remove(first)
# frontier = []
while len(grid_list) > 0:
cell = random.choice(path)
neighbors = grid.get_neighbors(cell)
# frontier.extend(neighbors)
neighbor = random.choice(neighbors)
if neighbor not in path:
cell.link(neighbor)
path.append(neighbor)
# frontier.remove(neighbor)
grid_list.remove(neighbor) | [
"random.choice"
] | [((819, 838), 'random.choice', 'random.choice', (['path'], {}), '(path)\n', (832, 838), False, 'import random\n'), ((953, 977), 'random.choice', 'random.choice', (['neighbors'], {}), '(neighbors)\n', (966, 977), False, 'import random\n')] |
# This file implements file system operations at the level of inodes.
import time
import secfs.crypto
import secfs.tables
import secfs.access
import secfs.store.tree
import secfs.store.block
from secfs.store.inode import Inode
from secfs.store.tree import Directory
from cryptography.fernet import Fernet
from secfs.types import I, Principal, User, Group
# usermap contains a map from user ID to their public key according to /.users
usermap = {}
# groupmap contains a map from group ID to the list of members according to /.groups
groupmap = {}
# owner is the user principal that owns the current share
owner = None
# root_i is the i of the root of the current share
root_i = None
def get_inode(i):
"""
Shortcut for retrieving an inode given its i.
"""
ihash = secfs.tables.resolve(i)
if ihash == None:
raise LookupError("asked to resolve i {}, but i does not exist".format(i))
return Inode.load(ihash)
def init(owner, users, groups):
"""
init will initialize a new share root as the given user principal. This
includes setting up . and .. in the root directory, as well as adding the
.users and .groups files that list trusted user public keys and group
memberships respectively. This function will only allocate the share's
root, but not map it to any particular share at the server. The new root's
i is returned so that this can be done by the caller.
"""
if not isinstance(owner, User):
raise TypeError("{} is not a User, is a {}".format(owner, type(owner)))
node = Inode()
node.kind = 0
node.ex = True
node.ctime = time.time()
node.mtime = node.ctime
ihash = secfs.store.block.store(node.bytes(), None) # inodes not encrypted
root_i = secfs.tables.modmap(owner, I(owner), ihash)
if root_i == None:
raise RuntimeError
new_ihash = secfs.store.tree.add(root_i, b'.', root_i)
secfs.tables.modmap(owner, root_i, new_ihash)
new_ihash = secfs.store.tree.add(root_i, b'..', root_i) # TODO(eforde): why would .. be mapped to root_i?
secfs.tables.modmap(owner, root_i, new_ihash)
print("CREATED ROOT AT", new_ihash)
init = {
b".users": users,
b".groups": groups,
}
import pickle
for fn, c in init.items():
bts = pickle.dumps(c)
node = Inode()
node.kind = 1
node.size = len(bts)
node.mtime = node.ctime
node.ctime = time.time()
node.blocks = [secfs.store.block.store(bts, None)] # don't encrypt init
ihash = secfs.store.block.store(node.bytes(), None) # inodes not encrypted
i = secfs.tables.modmap(owner, I(owner), ihash)
link(owner, i, root_i, fn)
return root_i
def _create(parent_i, name, create_as, create_for, isdir, encrypt):
"""
_create allocates a new file, and links it into the directory at parent_i
with the given name. The new file is owned by create_for, but is created
using the credentials of create_as. This distinction is necessary as a user
principal is needed for the final i when creating a file as a group.
"""
if not isinstance(parent_i, I):
raise TypeError("{} is not an I, is a {}".format(parent_i, type(parent_i)))
if not isinstance(create_as, User):
raise TypeError("{} is not a User, is a {}".format(create_as, type(create_as)))
if not isinstance(create_for, Principal):
raise TypeError("{} is not a Principal, is a {}".format(create_for, type(create_for)))
assert create_as.is_user() # only users can create
assert create_as == create_for or create_for.is_group() # create for yourself or for a group
if create_for.is_group() and create_for not in groupmap:
raise PermissionError("cannot create for unknown group {}".format(create_for))
# This check is performed by link() below, but better to fail fast
if not secfs.access.can_write(create_as, parent_i):
if parent_i.p.is_group():
raise PermissionError("cannot create in group-writeable directory {0} as {1}; user is not in group".format(parent_i, create_as))
else:
raise PermissionError("cannot create in user-writeable directory {0} as {1}".format(parent_i, create_as))
# TODO(eforde): encrypt if parent directory is encrypted
# encrypt = encrypt or parent_i.encrypted
node = Inode()
node.encrypted = 1 if encrypt else 0
node.ctime = time.time()
node.mtime = node.ctime
node.kind = 0 if isdir else 1
node.ex = isdir
# store the newly created inode on the server
new_hash = secfs.store.block.store(node.bytes(), None) # inodes not encrypted
# map the block to an i owned by create_for, created with credentials of create_as
new_i = secfs.tables.modmap(create_as, I(create_for), new_hash)
if isdir:
# create . and .. if this is a directory
table_key = secfs.tables.get_itable_key(create_for, create_as)
new_ihash = secfs.store.tree.add(new_i, b'.', new_i, table_key)
secfs.tables.modmap(create_as, new_i, new_ihash)
new_ihash = secfs.store.tree.add(new_i, b'..', parent_i, table_key)
secfs.tables.modmap(create_as, new_i, new_ihash)
# link the new i into the directoy at parent_i with the given name
link(create_as, new_i, parent_i, name)
return new_i
def create(parent_i, name, create_as, create_for, encrypt):
"""
Create a new file.
See secfs.fs._create
"""
return _create(parent_i, name, create_as, create_for, False, encrypt)
def mkdir(parent_i, name, create_as, create_for, encrypt):
"""
Create a new directory.
See secfs.fs._create
"""
return _create(parent_i, name, create_as, create_for, True, encrypt)
def read(read_as, i, off, size):
"""
Read reads [off:off+size] bytes from the file at i.
"""
if not isinstance(i, I):
raise TypeError("{} is not an I, is a {}".format(i, type(i)))
if not isinstance(read_as, User):
raise TypeError("{} is not a User, is a {}".format(read_as, type(read_as)))
if not secfs.access.can_read(read_as, i):
if i.p.is_group():
raise PermissionError("cannot read from group-readable file {0} as {1}; user is not in group".format(i, read_as))
else:
raise PermissionError("cannot read from user-readable file {0} as {1}".format(i, read_as))
node = get_inode(i)
table_key = secfs.tables.get_itable_key(i.p, read_as)
return node.read(table_key)[off:off+size]
def write(write_as, i, off, buf):
"""
Write writes the given bytes into the file at i at the given offset.
"""
if not isinstance(i, I):
raise TypeError("{} is not an I, is a {}".format(i, type(i)))
if not isinstance(write_as, User):
raise TypeError("{} is not a User, is a {}".format(write_as, type(write_as)))
if not secfs.access.can_write(write_as, i):
if i.p.is_group():
raise PermissionError("cannot write to group-owned file {0} as {1}; user is not in group".format(i, write_as))
else:
raise PermissionError("cannot write to user-owned file {0} as {1}".format(i, write_as))
node = get_inode(i)
table_key = secfs.tables.get_itable_key(i.p, write_as)
# TODO: this is obviously stupid -- should not get rid of blocks that haven't changed
bts = node.read(table_key)
# write also allows us to extend a file
if off + len(buf) > len(bts):
bts = bts[:off] + buf
else:
bts = bts[:off] + buf + bts[off+len(buf):]
# update the inode
node.blocks = [secfs.store.block.store(bts, table_key if node.encrypted else None)]
node.mtime = time.time()
node.size = len(bts)
# put new hash in tree
new_hash = secfs.store.block.store(node.bytes(), None) # inodes not encrypted
secfs.tables.modmap(write_as, i, new_hash)
return len(buf)
def rename(parent_i_old, name_old, parent_i_new, name_new, rename_as):
"""
Rename renames the given file in parent_i_old into parent_i_new as name_new
"""
if not isinstance(parent_i_old, I):
raise TypeError("{} is not an I, is a {}".format(parent_i_old, type(parent_i_old)))
if not isinstance(parent_i_new, I):
raise TypeError("{} is not an I, is a {}".format(parent_i_new, type(parent_i_new)))
if not isinstance(rename_as, User):
raise TypeError("{} is not a User, is a {}".format(rename_as, type(rename_as)))
if not secfs.access.can_write(rename_as, parent_i_new):
raise PermissionError("no permission to rename {} to {} in new directory {}".format(name_old, name_new, parent_i_new))
# Fetch i we're moving
i = secfs.store.tree.find_under(parent_i_old, name_old, rename_as)
# Remove i from old directory
table_key = secfs.tables.get_itable_key(parent_i_old.p, rename_as)
new_ihash = secfs.store.tree.remove(parent_i_old, name_old, table_key)
secfs.tables.modmap(rename_as, parent_i_old, new_ihash)
# Add i to new directory
table_key = secfs.tables.get_itable_key(parent_i_new.p, rename_as)
new_ihash = secfs.store.tree.add(parent_i_new, name_new, i, table_key)
secfs.tables.modmap(rename_as, parent_i_new, new_ihash)
return i
def unlink(parent_i, i, name, remove_as):
"""
Unlink removes the given file from the parent_inode
"""
if not isinstance(parent_i, I):
raise TypeError("{} is not an I, is a {}".format(parent_i, type(parent_i)))
if not isinstance(remove_as, User):
raise TypeError("{} is not a User, is a {}".format(remove_as, type(remove_as)))
assert remove_as.is_user() # only users can create
if not secfs.access.can_write(remove_as, i):
if i.p.is_group():
raise PermissionError("cannot remove group-owned file {0} as {1}; user is not in group".format(i, remove_as))
else:
raise PermissionError("cannot remove user-owned file {0} as {1}".format(i, remove_as))
table_key = secfs.tables.get_itable_key(i.p, remove_as)
new_ihash = secfs.store.tree.remove(parent_i, name, table_key)
secfs.tables.modmap(remove_as, parent_i, new_ihash)
#TODO(magendanz) remove filr and inode from server using secfs.store.blocks
secfs.tables.remove(i)
def rmdir(parent_i, i, name, remove_as):
"""
rmdir removes the given directory from the parent_inode as well as all subfiles
"""
if not isinstance(parent_i, I):
raise TypeError("{} is not an I, is a {}".format(parent_i, type(parent_i)))
if not isinstance(remove_as, User):
raise TypeError("{} is not a User, is a {}".format(remove_as, type(remove_as)))
assert remove_as.is_user() # only users can create
if not secfs.access.can_write(remove_as, i):
if i.p.is_group():
raise PermissionError("cannot remove group-owned file {0} as {1}; user is not in group".format(i, remove_as))
else:
raise PermissionError("cannot remove user-owned file {0} as {1}".format(i, remove_as))
print("Permissions: {} can edit {} owned file".format(remove_as, i))
table_key = secfs.tables.get_itable_key(i.p, remove_as)
# recursive rm of all subfiles/subdirs
inode = get_inode(i)
sub_is = []
# pass to unlink if not dir
if inode.kind == 0:
dr = Directory(i, table_key)
subfiles = [(sub_name, sub_i) for sub_name, sub_i in dr.children if ((sub_name != b'.') and (sub_name != b'..'))]
print("Subfiles to try and rm {}".format(subfiles))
# confirm that can delete all subfiles/subdirs before starting to delete
for child_name, child_i in subfiles:
print("Checking permissions. {} can edit {}".format(remove_as, child_i))
if not secfs.access.can_write(remove_as, child_i):
raise PermissionError("cannot remove group-owned file {0} as {1}; user is not in group".format(child_i, remove_as))
for child_name, child_i in subfiles:
print("Recusing to delete child {}".format(child_name))
sub_is += rmdir(i, child_i, child_name, remove_as)
# TODO(magendanz) do we need to delete . and ..?
new_ihash = secfs.store.tree.remove(parent_i, name, table_key)
#if parent_i.p != remove_as:
# p_i = Group.(ctx.gid)
secfs.tables.modmap(remove_as, parent_i, new_ihash)
#TODO(magendanz) remove filr and inode from server using secfs.store.blocks
secfs.tables.remove(i)
sub_is.append(i)
return sub_is
else:
unlink(parent_i, i, name, remove_as)
return i
def readdir(i, off, read_as):
"""
Return a list of is in the directory at i.
Each returned list item is a tuple of an i and an index. The index can be
used to request a suffix of the list at a later time.
"""
table_key = secfs.tables.get_itable_key(i.p, read_as)
dr = Directory(i, table_key)
if dr == None:
return None
return [(i, index+1) for index, i in enumerate(dr.children) if index >= off]
def link(link_as, i, parent_i, name):
"""
Adds the given i into the given parent directory under the given name.
"""
if not isinstance(parent_i, I):
raise TypeError("{} is not an I, is a {}".format(parent_i, type(parent_i)))
if not isinstance(i, I):
raise TypeError("{} is not an I, is a {}".format(i, type(i)))
if not isinstance(link_as, User):
raise TypeError("{} is not a User, is a {}".format(link_as, type(link_as)))
if not secfs.access.can_write(link_as, parent_i):
if parent_i.p.is_group():
raise PermissionError("cannot create in group-writeable directory {0} as {1}; user is not in group".format(parent_i, link_as))
else:
raise PermissionError("cannot create in user-writeable directory {0} as {1}".format(parent_i, link_as))
table_key = secfs.tables.get_itable_key(parent_i.p, link_as)
parent_ihash = secfs.store.tree.add(parent_i, name, i, table_key)
secfs.tables.modmap(link_as, parent_i, parent_ihash)
| [
"secfs.types.I",
"pickle.dumps",
"secfs.store.tree.Directory",
"secfs.store.inode.Inode.load",
"secfs.store.inode.Inode",
"time.time"
] | [((922, 939), 'secfs.store.inode.Inode.load', 'Inode.load', (['ihash'], {}), '(ihash)\n', (932, 939), False, 'from secfs.store.inode import Inode\n'), ((1557, 1564), 'secfs.store.inode.Inode', 'Inode', ([], {}), '()\n', (1562, 1564), False, 'from secfs.store.inode import Inode\n'), ((1619, 1630), 'time.time', 'time.time', ([], {}), '()\n', (1628, 1630), False, 'import time\n'), ((4365, 4372), 'secfs.store.inode.Inode', 'Inode', ([], {}), '()\n', (4370, 4372), False, 'from secfs.store.inode import Inode\n'), ((4431, 4442), 'time.time', 'time.time', ([], {}), '()\n', (4440, 4442), False, 'import time\n'), ((7687, 7698), 'time.time', 'time.time', ([], {}), '()\n', (7696, 7698), False, 'import time\n'), ((12899, 12922), 'secfs.store.tree.Directory', 'Directory', (['i', 'table_key'], {}), '(i, table_key)\n', (12908, 12922), False, 'from secfs.store.tree import Directory\n'), ((1780, 1788), 'secfs.types.I', 'I', (['owner'], {}), '(owner)\n', (1781, 1788), False, 'from secfs.types import I, Principal, User, Group\n'), ((2295, 2310), 'pickle.dumps', 'pickle.dumps', (['c'], {}), '(c)\n', (2307, 2310), False, 'import pickle\n'), ((2327, 2334), 'secfs.store.inode.Inode', 'Inode', ([], {}), '()\n', (2332, 2334), False, 'from secfs.store.inode import Inode\n'), ((2439, 2450), 'time.time', 'time.time', ([], {}), '()\n', (2448, 2450), False, 'import time\n'), ((4789, 4802), 'secfs.types.I', 'I', (['create_for'], {}), '(create_for)\n', (4790, 4802), False, 'from secfs.types import I, Principal, User, Group\n'), ((11311, 11334), 'secfs.store.tree.Directory', 'Directory', (['i', 'table_key'], {}), '(i, table_key)\n', (11320, 11334), False, 'from secfs.store.tree import Directory\n'), ((2654, 2662), 'secfs.types.I', 'I', (['owner'], {}), '(owner)\n', (2655, 2662), False, 'from secfs.types import I, Principal, User, Group\n')] |
from __future__ import division
import io
import matplotlib as mpl
import matplotlib.pyplot as plt
import networkx as nx
import numpy
import os
import tensorflow as tf
def figure_to_buff(figure):
"""Converts the matplotlib plot specified by 'figure' to a PNG image and
returns it. The supplied figure is closed and inaccessible after this call."""
# Save the plot to a PNG in memory.
buf = io.BytesIO()
plt.savefig(buf, format='png')
# Closing the figure prevents it from being displayed directly inside
# the notebook.
plt.close(figure)
buf.seek(0)
return buf
def generate_edge_weight_buffer(nodes):
b_nodes = list(nodes.values())
print(b_nodes)
G = nx.DiGraph()
total_stake = sum([node.stake for node in b_nodes])
# Build node sizes in proportion to stake held within the graph.
node_sizes = []
node_labels = {}
for node in b_nodes:
G.add_node(node.identity)
node_sizes.append(25 + 500 * (node.stake / total_stake))
node_labels[node.identity] = str(node.identity)
# Edge colors (alphas and weight) reflect attribution wieghts of each
# connection.
edge_colors = {}
edge_labels = {}
for node in b_nodes:
for edge in node.edges:
if (node.identity, edge['first']) not in edge_labels:
G.add_edge(node.identity, edge['first'])
edge_colors[(node.identity,
edge['first'])] = float(edge['second'])
if node.identity != edge['first']:
edge_labels[(
node.identity,
edge['first'])] = "%.3f" % float(edge['second'])
else:
edge_labels[(node.identity, edge['first'])] = ""
# Set edge weights.
for u, v, d in G.edges(data=True):
d['weight'] = edge_colors[(u, v)]
edges, weights = zip(*nx.get_edge_attributes(G, 'weight').items())
# Clear Matplot lib buffer and create new figure.
plt.cla()
plt.clf()
figure = plt.figure(figsize=(20, 15))
pos = nx.layout.circular_layout(G)
nodes = nx.draw_networkx_nodes(G,
pos,
node_size=node_sizes,
node_color='blue')
edges = nx.draw_networkx_edges(G,
pos,
arrowstyle='->',
arrowsize=15,
edge_color=weights,
edge_cmap=plt.cm.Blues,
width=5)
edge_labels = nx.draw_networkx_edge_labels(G,
pos,
edge_labels=edge_labels,
with_labels=True,
label_pos=0.3)
for node in b_nodes:
pos[node.identity] = pos[node.identity] + numpy.array([0, 0.1])
labels = nx.draw_networkx_labels(G, pos, node_labels)
# Save the plot to a PNG in memory.
buf = io.BytesIO()
plt.savefig(buf, format='png')
# Closing the figure prevents it from being displayed directly inside
# the notebook.
plt.close(figure)
buf.seek(0)
return buf
| [
"networkx.draw_networkx_edges",
"matplotlib.pyplot.savefig",
"networkx.draw_networkx_edge_labels",
"networkx.get_edge_attributes",
"networkx.DiGraph",
"io.BytesIO",
"matplotlib.pyplot.clf",
"matplotlib.pyplot.close",
"networkx.draw_networkx_nodes",
"matplotlib.pyplot.figure",
"networkx.draw_netw... | [((406, 418), 'io.BytesIO', 'io.BytesIO', ([], {}), '()\n', (416, 418), False, 'import io\n'), ((423, 453), 'matplotlib.pyplot.savefig', 'plt.savefig', (['buf'], {'format': '"""png"""'}), "(buf, format='png')\n", (434, 453), True, 'import matplotlib.pyplot as plt\n'), ((552, 569), 'matplotlib.pyplot.close', 'plt.close', (['figure'], {}), '(figure)\n', (561, 569), True, 'import matplotlib.pyplot as plt\n'), ((705, 717), 'networkx.DiGraph', 'nx.DiGraph', ([], {}), '()\n', (715, 717), True, 'import networkx as nx\n'), ((2018, 2027), 'matplotlib.pyplot.cla', 'plt.cla', ([], {}), '()\n', (2025, 2027), True, 'import matplotlib.pyplot as plt\n'), ((2032, 2041), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (2039, 2041), True, 'import matplotlib.pyplot as plt\n'), ((2055, 2083), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(20, 15)'}), '(figsize=(20, 15))\n', (2065, 2083), True, 'import matplotlib.pyplot as plt\n'), ((2095, 2123), 'networkx.layout.circular_layout', 'nx.layout.circular_layout', (['G'], {}), '(G)\n', (2120, 2123), True, 'import networkx as nx\n'), ((2136, 2207), 'networkx.draw_networkx_nodes', 'nx.draw_networkx_nodes', (['G', 'pos'], {'node_size': 'node_sizes', 'node_color': '"""blue"""'}), "(G, pos, node_size=node_sizes, node_color='blue')\n", (2158, 2207), True, 'import networkx as nx\n'), ((2325, 2444), 'networkx.draw_networkx_edges', 'nx.draw_networkx_edges', (['G', 'pos'], {'arrowstyle': '"""->"""', 'arrowsize': '(15)', 'edge_color': 'weights', 'edge_cmap': 'plt.cm.Blues', 'width': '(5)'}), "(G, pos, arrowstyle='->', arrowsize=15, edge_color=\n weights, edge_cmap=plt.cm.Blues, width=5)\n", (2347, 2444), True, 'import networkx as nx\n'), ((2669, 2768), 'networkx.draw_networkx_edge_labels', 'nx.draw_networkx_edge_labels', (['G', 'pos'], {'edge_labels': 'edge_labels', 'with_labels': '(True)', 'label_pos': '(0.3)'}), '(G, pos, edge_labels=edge_labels, with_labels=\n True, label_pos=0.3)\n', (2697, 2768), True, 'import networkx as nx\n'), ((3063, 3107), 'networkx.draw_networkx_labels', 'nx.draw_networkx_labels', (['G', 'pos', 'node_labels'], {}), '(G, pos, node_labels)\n', (3086, 3107), True, 'import networkx as nx\n'), ((3159, 3171), 'io.BytesIO', 'io.BytesIO', ([], {}), '()\n', (3169, 3171), False, 'import io\n'), ((3176, 3206), 'matplotlib.pyplot.savefig', 'plt.savefig', (['buf'], {'format': '"""png"""'}), "(buf, format='png')\n", (3187, 3206), True, 'import matplotlib.pyplot as plt\n'), ((3305, 3322), 'matplotlib.pyplot.close', 'plt.close', (['figure'], {}), '(figure)\n', (3314, 3322), True, 'import matplotlib.pyplot as plt\n'), ((3028, 3049), 'numpy.array', 'numpy.array', (['[0, 0.1]'], {}), '([0, 0.1])\n', (3039, 3049), False, 'import numpy\n'), ((1914, 1949), 'networkx.get_edge_attributes', 'nx.get_edge_attributes', (['G', '"""weight"""'], {}), "(G, 'weight')\n", (1936, 1949), True, 'import networkx as nx\n')] |
from .GlobalData import global_data
from .utils.oc import oc
import requests
import time
import logging
class App:
def __init__(self, deployment, project, template, build_config,route=""):
self.project = project
self.template = template
self.deployment = deployment
self.build_config = build_config
self.route = route
self.logger = logging.getLogger('reliability')
def build(self, kubeconfig):
(result, rc) = oc("start-build -n " + self.project + " " + self.build_config, kubeconfig)
if rc != 0:
self.logger.error("build_app: Failed to create app " + self.deployment + " in project " + self.project)
return "App build failed for build config : " + self.build_config
else:
with global_data.builds_lock:
global_data.total_build_count += 1
return "App build succeeded for build config : " + self.build_config
def visit(self):
visit_success = False
try:
r = requests.get("http://" + self.route + "/")
self.logger.info(str(r.status_code) + ": visit: " + self.route)
if r.status_code == 200:
visit_success = True
except Exception as e :
self.logger.error(f"visit: {self.route} Exception {e}")
return visit_success
def scale_up(self, kubeconfig):
(result, rc) = oc("scale --replicas=2 -n " + self.project + " dc/" + self.deployment, kubeconfig)
if rc !=0 :
self.logger.error("scale_up: Failed to scale up " + self.project + "." + self.deployment)
return "App scale up failed for deployment : " + self.deployment
else:
return "App scale up succeeded for deployment : " + self.deployment
def scale_down(self, kubeconfig):
(result, rc) = oc("scale --replicas=1 -n " + self.project + " dc/" + self.deployment, kubeconfig)
if rc !=0 :
self.logger.error("scale_down: Failed to scale down " + self.project + "." + self.deployment)
return "App scale down failed for deployment : " + self.deployment
else:
return "App scale down succeeded for deployment : " + self.deployment
class Apps:
def __init__(self):
self.failed_apps = 0
self.apps = {}
self.logger = logging.getLogger('reliability')
def add(self, app, kubeconfig):
(result, rc) = oc("new-app -n " + app.project + " --template " + app.template, kubeconfig)
if rc != 0:
self.logger.error("create_app: Failed to create app " + app.deployment + " in project " + app.project)
return None
else:
self.apps[app.project + "." + app.deployment] = app
(route,rc) = oc("get route --no-headers -n " + app.project + " | awk {'print $2'} | grep " + app.template, kubeconfig)
if rc == 0:
app.route = route.rstrip()
max_tries = 60
current_tries = 0
visit_success = False
while not visit_success and current_tries <= max_tries:
self.logger.info(app.template + " route not available yet, sleeping 10 seconds")
time.sleep(10)
current_tries += 1
visit_success = app.visit()
if not visit_success:
self.failed_apps += 1
self.logger.error("add_app: " + app.project + "." + app.deployment + " did not become available" )
return app
# removing an app just removes the dictionary entry, actual app removed by project deletion
def remove(self,app):
self.apps.pop(app.project + "." + app.deployment)
def simulate(self):
apps = {}
app1 = App('cakephp-mysql-example','cakephp-mysql-example-0','cakephp-mysql-example','cakephp-mysql-example')
self.apps[app1.project + "." + app1.deployment] = app1
# app2 = App('nodejs-mongodb-example','nodejs-mongodb-example-1','nodejs-mongodb-example','nodejs-mongodb-example')
# self.apps[app2.project + "." + app2.deployment] = app2
def init(self):
pass
all_apps=Apps()
if __name__ == "__main__":
app = App("cakephp-mysql-example", "t1", "cakephp-mysql-example","cakephp-mysql-example")
apps = Apps()
# apps.add(app)
# time.sleep(180)
app.visit()
app.scale_up()
time.sleep(30)
app.scale_down()
app.build()
| [
"logging.getLogger",
"requests.get",
"time.sleep"
] | [((4449, 4463), 'time.sleep', 'time.sleep', (['(30)'], {}), '(30)\n', (4459, 4463), False, 'import time\n'), ((387, 419), 'logging.getLogger', 'logging.getLogger', (['"""reliability"""'], {}), "('reliability')\n", (404, 419), False, 'import logging\n'), ((2358, 2390), 'logging.getLogger', 'logging.getLogger', (['"""reliability"""'], {}), "('reliability')\n", (2375, 2390), False, 'import logging\n'), ((1035, 1077), 'requests.get', 'requests.get', (["('http://' + self.route + '/')"], {}), "('http://' + self.route + '/')\n", (1047, 1077), False, 'import requests\n'), ((3259, 3273), 'time.sleep', 'time.sleep', (['(10)'], {}), '(10)\n', (3269, 3273), False, 'import time\n')] |
#!/usr/bin/env python
from __future__ import unicode_literals
import os
import sys
import tarfile
import shutil
import tempfile
from contextlib import contextmanager
from pymatgen.io.gaussian import GaussianInput, GaussianOutput
from tinydb import TinyDB
@contextmanager
def cd(run_path, cleanup=lambda: True):
"""
Temporarily work in another directory, creating it if necessary.
"""
home = os.getcwd()
os.chdir(os.path.expanduser(run_path))
try:
yield
finally:
os.chdir(home)
cleanup()
@contextmanager
def tempdir():
"""
Temporarily work in temporary directory, deleting it aftewards.
"""
dirpath = tempfile.mkdtemp()
def cleanup():
shutil.rmtree(dirpath)
with cd(dirpath, cleanup):
yield dirpath
def extract_data_from_tar_file(tar_file):
with tarfile.open(tar_file, 'r:gz') as tar:
tar.extractall()
folder = tar_file.replace('.tar.gz', '')
with cd(folder):
tdout = GaussianOutput('td.log')
td_exit = tdout.read_excitation_energies()
td_triplet = [e for e in td_exit if 'triplet' in e[3].lower()][0][0]
td_singlet = [e for e in td_exit if 'singlet' in e[3].lower()][0][0]
tdaout = GaussianOutput('tda.log')
tda_exit = tdaout.read_excitation_energies()
tda_triplet = [e for e in tda_exit if 'triplet' in e[3].lower()][0][0]
tda_singlet = [e for e in tda_exit if 'singlet' in e[3].lower()][0][0]
nicssout = GaussianOutput('nics_singlet.log')
# occasionally some jobs fail here
if not nicssout.properly_terminated:
return False
nicss_mag = nicssout.read_magnetic_shielding()
nicss_six_ring_above = (abs(nicss_mag[-8]['isotropic']) +
abs(nicss_mag[-6]['isotropic']))/2
nicss_six_ring_below = (abs(nicss_mag[-7]['isotropic']) +
abs(nicss_mag[-5]['isotropic']))/2
nicss_five_ring_above = (abs(nicss_mag[-4]['isotropic']) +
abs(nicss_mag[-2]['isotropic']))/2
nicss_five_ring_below = (abs(nicss_mag[-3]['isotropic']) +
abs(nicss_mag[-1]['isotropic']))/2
nicstout = GaussianOutput('nics_triplet.log')
if not nicstout.properly_terminated:
return False
nicst_mag = nicstout.read_magnetic_shielding()
nicst_six_ring_above = (abs(nicst_mag[-8]['isotropic']) +
abs(nicst_mag[-6]['isotropic']))/2
nicst_six_ring_below = (abs(nicst_mag[-7]['isotropic']) +
abs(nicst_mag[-5]['isotropic']))/2
nicst_five_ring_above = (abs(nicst_mag[-4]['isotropic']) +
abs(nicst_mag[-2]['isotropic']))/2
nicst_five_ring_below = (abs(nicst_mag[-3]['isotropic']) +
abs(nicst_mag[-1]['isotropic']))/2
data = {'td_singlet': td_singlet, 'td_triplet': td_triplet,
'tda_singlet': tda_singlet, 'tda_triplet': tda_triplet,
'nicss_six_ring_above': nicss_six_ring_above,
'nicss_six_ring_below': nicss_six_ring_below,
'nicss_five_ring_above': nicss_five_ring_above,
'nicss_five_ring_below': nicss_five_ring_below,
'nicst_six_ring_above': nicst_six_ring_above,
'nicst_six_ring_below': nicst_six_ring_below,
'nicst_five_ring_above': nicst_five_ring_above,
'nicst_five_ring_below': nicst_five_ring_below}
return data
data_to_write = []
db = TinyDB(os.path.join('..', 'data', 'structures.json'))
systems = list(db.all())
done = 0
for i, system in enumerate(systems):
input_file = GaussianInput.from_dict(system['input'])
directory = input_file.title
tar_name = '{}.tar.gz'.format(directory)
tar_file = os.path.abspath(os.path.join('..', 'data', 'calculations', tar_name))
if os.path.isfile(tar_file):
# extract the data in a temp directory to avoid clobbering any data
with tempdir() as tmp_dir:
shutil.copy(tar_file, tmp_dir)
data = extract_data_from_tar_file(tar_name)
if not data:
print('{} did not finish correctly, skipping'.format(directory))
continue
data.update({'x_sub': system['x_sub'], 'y_sub': system['y_sub'],
'z_sub': system['z_sub'], 'nx': system['nx'],
'ny': system['ny'], 'title': system['title']})
data_to_write.append(data)
if i % 500 == 0:
done += 5
print('{}% completed'.format(done))
print('writing data')
db = TinyDB(os.path.join('..', 'data', 'calculated-data.json'))
db.insert_multiple(data_to_write)
| [
"tarfile.open",
"pymatgen.io.gaussian.GaussianInput.from_dict",
"os.path.join",
"os.getcwd",
"os.path.isfile",
"os.chdir",
"pymatgen.io.gaussian.GaussianOutput",
"tempfile.mkdtemp",
"shutil.copy",
"shutil.rmtree",
"os.path.expanduser"
] | [((413, 424), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (422, 424), False, 'import os\n'), ((676, 694), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {}), '()\n', (692, 694), False, 'import tempfile\n'), ((3652, 3697), 'os.path.join', 'os.path.join', (['""".."""', '"""data"""', '"""structures.json"""'], {}), "('..', 'data', 'structures.json')\n", (3664, 3697), False, 'import os\n'), ((3787, 3827), 'pymatgen.io.gaussian.GaussianInput.from_dict', 'GaussianInput.from_dict', (["system['input']"], {}), "(system['input'])\n", (3810, 3827), False, 'from pymatgen.io.gaussian import GaussianInput, GaussianOutput\n'), ((3998, 4022), 'os.path.isfile', 'os.path.isfile', (['tar_file'], {}), '(tar_file)\n', (4012, 4022), False, 'import os\n'), ((4743, 4793), 'os.path.join', 'os.path.join', (['""".."""', '"""data"""', '"""calculated-data.json"""'], {}), "('..', 'data', 'calculated-data.json')\n", (4755, 4793), False, 'import os\n'), ((438, 466), 'os.path.expanduser', 'os.path.expanduser', (['run_path'], {}), '(run_path)\n', (456, 466), False, 'import os\n'), ((512, 526), 'os.chdir', 'os.chdir', (['home'], {}), '(home)\n', (520, 526), False, 'import os\n'), ((722, 744), 'shutil.rmtree', 'shutil.rmtree', (['dirpath'], {}), '(dirpath)\n', (735, 744), False, 'import shutil\n'), ((851, 881), 'tarfile.open', 'tarfile.open', (['tar_file', '"""r:gz"""'], {}), "(tar_file, 'r:gz')\n", (863, 881), False, 'import tarfile\n'), ((998, 1022), 'pymatgen.io.gaussian.GaussianOutput', 'GaussianOutput', (['"""td.log"""'], {}), "('td.log')\n", (1012, 1022), False, 'from pymatgen.io.gaussian import GaussianInput, GaussianOutput\n'), ((1246, 1271), 'pymatgen.io.gaussian.GaussianOutput', 'GaussianOutput', (['"""tda.log"""'], {}), "('tda.log')\n", (1260, 1271), False, 'from pymatgen.io.gaussian import GaussianInput, GaussianOutput\n'), ((1503, 1537), 'pymatgen.io.gaussian.GaussianOutput', 'GaussianOutput', (['"""nics_singlet.log"""'], {}), "('nics_singlet.log')\n", (1517, 1537), False, 'from pymatgen.io.gaussian import GaussianInput, GaussianOutput\n'), ((2262, 2296), 'pymatgen.io.gaussian.GaussianOutput', 'GaussianOutput', (['"""nics_triplet.log"""'], {}), "('nics_triplet.log')\n", (2276, 2296), False, 'from pymatgen.io.gaussian import GaussianInput, GaussianOutput\n'), ((3937, 3989), 'os.path.join', 'os.path.join', (['""".."""', '"""data"""', '"""calculations"""', 'tar_name'], {}), "('..', 'data', 'calculations', tar_name)\n", (3949, 3989), False, 'import os\n'), ((4147, 4177), 'shutil.copy', 'shutil.copy', (['tar_file', 'tmp_dir'], {}), '(tar_file, tmp_dir)\n', (4158, 4177), False, 'import shutil\n')] |
import appdaemon.plugins.hass.hassapi as hass
import datetime
import globals
#
# App which turns on the light based on the room the user is currently in
#
#
# Args:
# room_sensor: the sensor which shows the room the user is in. example: sensor.mqtt_room_user_one
# entity: The entity which gets turned on by alexa/snips. example: input_boolean.room_based_light
# mappings:
# livingroom:
# room: name of the room
# entity: entity to turn on
#
# Release Notes
#
# Version 1.2:
# None Check
#
# Version 1.1:
# Using globals
#
# Version 1.0:
# Initial Version
class RoomBasedLightControl(hass.Hass):
def initialize(self):
self.listen_state_handle_list = []
self.timer_handle_list = []
self.room_sensor = globals.get_arg(self.args,"room_sensor")
self.entity = globals.get_arg(self.args,"entity")
self.mappings = self.args["mappings"]
self.mappings_dict = {}
for mapping in self.mappings:
self.mappings_dict[self.mappings[mapping]["room"]] = self.mappings[mapping]["entity"]
self.listen_state_handle_list.append(self.listen_state(self.state_change, self.entity))
def state_change(self, entity, attributes, old, new, kwargs):
self.log("{} turned {}".format(self.friendly_name(self.entity),new))
room = self.get_state(self.room_sensor)
self.log("User is in room {}".format(room))
mapped_entity = self.mappings_dict.get(room)
self.log("Entity for that room is: {}".format(mapped_entity))
if mapped_entity != None:
if new == "on":
self.log("Turning {} on".format(mapped_entity))
self.turn_on(mapped_entity)
elif new == "off":
self.log("Turning {} off".format(mapped_entity))
self.turn_off(mapped_entity)
def terminate(self):
for listen_state_handle in self.listen_state_handle_list:
self.cancel_listen_state(listen_state_handle)
for timer_handle in self.timer_handle_list:
self.cancel_timer(timer_handle) | [
"globals.get_arg"
] | [((745, 786), 'globals.get_arg', 'globals.get_arg', (['self.args', '"""room_sensor"""'], {}), "(self.args, 'room_sensor')\n", (760, 786), False, 'import globals\n'), ((808, 844), 'globals.get_arg', 'globals.get_arg', (['self.args', '"""entity"""'], {}), "(self.args, 'entity')\n", (823, 844), False, 'import globals\n')] |
#!/usr/bin/env python
'''
Calculating the emissions from deposits in Platypus stable accounts
'''
import numpy as np
import matplotlib.pyplot as plt
from matplotlib import cm
from matplotlib.ticker import LinearLocator, EngFormatter, PercentFormatter
from strategy_const import *
from const import *
def boosted_pool_emission_rate(your_stable_deposit, vePTP_held, other_deposit_weights):
''' proportion of boosted pool emissions your deposits and vePTP earn
'''
your_boosted_pool_weight = np.sqrt(your_stable_deposit * vePTP_held)
return your_boosted_pool_weight / other_deposit_weights
def base_pool_emission_rate(your_stable_deposit, other_stable_deposits):
''' proportion of base pool emissions your deposits earn
'''
total_deposits = other_stable_deposits + your_stable_deposit
return your_stable_deposit / total_deposits
# define function with vectorize decorator for extensibility
@np.vectorize
def total_emissions_rate(stable_bankroll,
ptp_marketbuy_proportion):
'''
:stable_bankroll: total USD value of the stables you'd invest in the Platypus protocol
:ptp_marketbuy_proportion: proportion of stable_bankroll you'd use to marketbuy PTP for staking to vePTP
returns the number of PTP tokens you'd rececive given defined constants earlier in the notebook.
'''
n_PTP = (stable_bankroll * ptp_marketbuy_proportion) / PTP_PRICE
n_vePTP = HOURS_SPENT_STAKING * HOURLY_STAKED_PTP_vePTP_YIELD * n_PTP
stable_deposit = stable_bankroll * (1 - ptp_marketbuy_proportion)
# calculating lower bound on total deposit weights:
# assume all other deposits are from one wallet with all other staked PTP
# and it's been staking as long as you have
total_deposit_weights = GLOBAL_PTP_STAKED * HOURLY_STAKED_PTP_vePTP_YIELD * HOURS_SPENT_STAKING
boosted = boosted_pool_emission_rate(stable_deposit, n_vePTP, total_deposit_weights)
base = base_pool_emission_rate(stable_deposit, TVL - stable_deposit)
return (BOOSTING_POOL_ALLOCATION * boosted) + (BASE_POOL_ALLOCATION * base)
def plot_2d_returns(stable_bankroll, ptp_proportion, returns_array, as_percents = True):
"""Use matplotlib to plot the slope of returns across different bankroll strategies
"""
fig, ax = plt.subplots(subplot_kw={"projection": "3d"}, figsize=(18,9))
manifold = ax.plot_surface(stable_bankroll, ptp_proportion, returns_array,
cmap=cm.plasma, linewidth=0.5, antialiased=False)
# labels, titles, and axes
ax.set_title(f"Monthly Strategy Emissions given PTP staking for {round(HOURS_SPENT_STAKING / 24)} Days")
ax.xaxis.set_major_formatter(EngFormatter(unit="$", places=1, sep="\N{THIN SPACE}"))
ax.set_xlabel("Strategy Bankroll")
ax.yaxis.set_major_formatter(PercentFormatter(xmax=1, decimals=1))
ax.set_ylabel("Percent Market-Bought and Staked")
ax.zaxis.set_major_locator(LinearLocator(9))
ax.zaxis.set_major_formatter(PercentFormatter(xmax=1, decimals=4))
ax.set_zlabel("Percent of Emissions for Strategy")
# colorbar for scale
fig.colorbar(manifold, shrink=0.5, aspect=5, format=PercentFormatter(xmax=1, decimals=4))
plt.show()
def main():
print(f"Emissions calculations consider PTP/USD: ${round(PTP_PRICE, 3)}\n" +
f"Reflecting a FDMC of \t${round(FDMC / 10**6)}MM " +
f"({round(PERCENT_COINS_CIRCULATING * 100)}% of coins available)\n" +
f"and implying TVL of \t${round(TVL / 10**6)}MM " +
f"(Mcap/TVL: {round(1 / TVL_TO_CMC_RATIO, 4)})\n" +
f"with {round(GLOBAL_PTP_STAKED / 10**6, 2)}MM PTP staked for vePTP ({round(PERCENT_PTP_STAKED * 100)}%)")
# Create the mesh and calculate return rates
stable_bankroll, ptp_proportion = np.meshgrid(stable_deposit_range, ptp_market_buy_bankroll_proportion)
returns = total_emissions_rate(stable_bankroll, ptp_proportion)
# plotting time
plot_2d_returns(stable_bankroll, ptp_proportion, returns)
if __name__ == '__main__':
main()
| [
"numpy.sqrt",
"matplotlib.ticker.PercentFormatter",
"matplotlib.ticker.LinearLocator",
"matplotlib.ticker.EngFormatter",
"numpy.meshgrid",
"matplotlib.pyplot.subplots",
"matplotlib.pyplot.show"
] | [((504, 545), 'numpy.sqrt', 'np.sqrt', (['(your_stable_deposit * vePTP_held)'], {}), '(your_stable_deposit * vePTP_held)\n', (511, 545), True, 'import numpy as np\n'), ((2317, 2379), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'subplot_kw': "{'projection': '3d'}", 'figsize': '(18, 9)'}), "(subplot_kw={'projection': '3d'}, figsize=(18, 9))\n", (2329, 2379), True, 'import matplotlib.pyplot as plt\n'), ((3214, 3224), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3222, 3224), True, 'import matplotlib.pyplot as plt\n'), ((3813, 3882), 'numpy.meshgrid', 'np.meshgrid', (['stable_deposit_range', 'ptp_market_buy_bankroll_proportion'], {}), '(stable_deposit_range, ptp_market_buy_bankroll_proportion)\n', (3824, 3882), True, 'import numpy as np\n'), ((2694, 2740), 'matplotlib.ticker.EngFormatter', 'EngFormatter', ([], {'unit': '"""$"""', 'places': '(1)', 'sep': '"""\u2009"""'}), "(unit='$', places=1, sep='\\u2009')\n", (2706, 2740), False, 'from matplotlib.ticker import LinearLocator, EngFormatter, PercentFormatter\n'), ((2822, 2858), 'matplotlib.ticker.PercentFormatter', 'PercentFormatter', ([], {'xmax': '(1)', 'decimals': '(1)'}), '(xmax=1, decimals=1)\n', (2838, 2858), False, 'from matplotlib.ticker import LinearLocator, EngFormatter, PercentFormatter\n'), ((2945, 2961), 'matplotlib.ticker.LinearLocator', 'LinearLocator', (['(9)'], {}), '(9)\n', (2958, 2961), False, 'from matplotlib.ticker import LinearLocator, EngFormatter, PercentFormatter\n'), ((2996, 3032), 'matplotlib.ticker.PercentFormatter', 'PercentFormatter', ([], {'xmax': '(1)', 'decimals': '(4)'}), '(xmax=1, decimals=4)\n', (3012, 3032), False, 'from matplotlib.ticker import LinearLocator, EngFormatter, PercentFormatter\n'), ((3171, 3207), 'matplotlib.ticker.PercentFormatter', 'PercentFormatter', ([], {'xmax': '(1)', 'decimals': '(4)'}), '(xmax=1, decimals=4)\n', (3187, 3207), False, 'from matplotlib.ticker import LinearLocator, EngFormatter, PercentFormatter\n')] |
from types import FunctionType
import numpy as np
import pandas as pd
from functools import partial
from multiprocessing import Pool, cpu_count
def get_levenshtein_distance(str1: str, str2: str) -> float:
"""
Computes the Levenshtein distance between two strings
:param str1: first string
:param str2: second string
:return: the distance between the two params
"""
size_x = len(str1) + 1
size_y = len(str2) + 1
matrix = np.zeros((size_x, size_y))
for x in range(size_x):
matrix[x, 0] = x
for y in range(size_y):
matrix[0, y] = y
for x in range(1, size_x):
for y in range(1, size_y):
if str1[x - 1] == str2[y - 1]:
matrix[x, y] = min(
matrix[x - 1, y] + 1,
matrix[x - 1, y - 1],
matrix[x, y - 1] + 1
)
else:
matrix[x, y] = min(
matrix[x - 1, y] + 1,
matrix[x - 1, y - 1] + 1,
matrix[x, y - 1] + 1
)
return matrix[size_x - 1, size_y - 1]
def add_distance_column(filename: str, df: pd.DataFrame) -> pd.DataFrame:
"""
Add new column to df which contains distance computed using filename
:param filename: filename to compare to df
:param df: df with artist or tracks names
:return: df with new column
"""
df['distances'] = df.applymap(lambda x: get_levenshtein_distance(filename, x))
return df
def parallelize_dataframe(df: pd.DataFrame, func: FunctionType, word: str, n_cores: int = cpu_count() - 1) -> pd.DataFrame:
"""
Apply certain func against dataframe parallelling the application
:param df: DataFrame which contains the required by func
:param func: func that will be parallelize through df
:param word: to compute the distance using
:param n_cores: thread to parallelize the function
:return: DataFrame after func applied
"""
df_split = np.array_split(df, n_cores) # TODO: add df length check to get n_cores
pool = Pool(n_cores)
f = partial(func, word)
df = pd.concat(pool.map(f, df_split))
pool.close()
pool.join()
return df
| [
"multiprocessing.cpu_count",
"numpy.array_split",
"numpy.zeros",
"functools.partial",
"multiprocessing.Pool"
] | [((462, 488), 'numpy.zeros', 'np.zeros', (['(size_x, size_y)'], {}), '((size_x, size_y))\n', (470, 488), True, 'import numpy as np\n'), ((2007, 2034), 'numpy.array_split', 'np.array_split', (['df', 'n_cores'], {}), '(df, n_cores)\n', (2021, 2034), True, 'import numpy as np\n'), ((2090, 2103), 'multiprocessing.Pool', 'Pool', (['n_cores'], {}), '(n_cores)\n', (2094, 2103), False, 'from multiprocessing import Pool, cpu_count\n'), ((2112, 2131), 'functools.partial', 'partial', (['func', 'word'], {}), '(func, word)\n', (2119, 2131), False, 'from functools import partial\n'), ((1607, 1618), 'multiprocessing.cpu_count', 'cpu_count', ([], {}), '()\n', (1616, 1618), False, 'from multiprocessing import Pool, cpu_count\n')] |
import requests_cache
import os.path
import tempfile
try:
from requests_cache import remove_expired_responses
except ModuleNotFoundError:
from requests_cache.core import remove_expired_responses
def caching(
cache=False,
name=None,
backend="sqlite",
expire_after=86400,
allowable_codes=(200,),
allowable_methods=("GET",),
):
"""
pygbif caching management
:param cache: [bool] if ``True`` all http requests are cached. if ``False`` (default),
no http requests are cached.
:param name: [str] the cache name. when backend=sqlite, this is the path for the
sqlite file, ignored if sqlite not used. if not set, the file is put in your
temporary directory, and therefore is cleaned up/deleted after closing your
python session
:param backend: [str] the backend, one of:
- ``sqlite`` sqlite database (default)
- ``memory`` not persistent, stores all data in Python dict in memory
- ``mongodb`` (experimental) MongoDB database (pymongo < 3.0 required)
- ``redis`` stores all data on a redis data store (redis required)
:param expire_after: [str] timedelta or number of seconds after cache will be expired
or None (default) to ignore expiration. default: 86400 seconds (24 hrs)
:param allowable_codes: [tuple] limit caching only for response with this codes
(default: 200)
:param allowable_methods: [tuple] cache only requests of this methods
(default: ‘GET’)
:return: sets options to be used by pygbif, returns the options you selected
in a hash
Note: setting cache=False will turn off caching, but the backend data still
persists. thus, you can turn caching back on without losing your cache.
this also means if you want to delete your cache you have to do it yourself.
Note: on loading pygbif, we clean up expired responses
Usage::
import pygbif
# caching is off by default
from pygbif import occurrences
%time z=occurrences.search(taxonKey = 3329049)
%time w=occurrences.search(taxonKey = 3329049)
# turn caching on
pygbif.caching(True)
%time z=occurrences.search(taxonKey = 3329049)
%time w=occurrences.search(taxonKey = 3329049)
# set a different backend
pygbif.caching(cache=True, backend="redis")
%time z=occurrences.search(taxonKey = 3329049)
%time w=occurrences.search(taxonKey = 3329049)
# set a different backend
pygbif.caching(cache=True, backend="mongodb")
%time z=occurrences.search(taxonKey = 3329049)
%time w=occurrences.search(taxonKey = 3329049)
# set path to a sqlite file
pygbif.caching(name = "some/path/my_file")
"""
default_name = "pygbif_requests_cache"
if not cache:
requests_cache.uninstall_cache()
CACHE_NAME = None
else:
if name is None and backend == "sqlite":
CACHE_NAME = os.path.join(tempfile.gettempdir(), default_name)
else:
CACHE_NAME = default_name
requests_cache.install_cache(
cache_name=CACHE_NAME, backend=backend, expire_after=expire_after
)
remove_expired_responses()
cache_settings = {
"cache": cache,
"name": CACHE_NAME,
"backend": backend,
"expire_after": expire_after,
"allowable_codes": allowable_codes,
"allowable_methods": allowable_methods,
}
return cache_settings
| [
"requests_cache.uninstall_cache",
"requests_cache.install_cache",
"tempfile.gettempdir",
"requests_cache.core.remove_expired_responses"
] | [((2867, 2899), 'requests_cache.uninstall_cache', 'requests_cache.uninstall_cache', ([], {}), '()\n', (2897, 2899), False, 'import requests_cache\n'), ((3121, 3220), 'requests_cache.install_cache', 'requests_cache.install_cache', ([], {'cache_name': 'CACHE_NAME', 'backend': 'backend', 'expire_after': 'expire_after'}), '(cache_name=CACHE_NAME, backend=backend,\n expire_after=expire_after)\n', (3149, 3220), False, 'import requests_cache\n'), ((3247, 3273), 'requests_cache.core.remove_expired_responses', 'remove_expired_responses', ([], {}), '()\n', (3271, 3273), False, 'from requests_cache.core import remove_expired_responses\n'), ((3023, 3044), 'tempfile.gettempdir', 'tempfile.gettempdir', ([], {}), '()\n', (3042, 3044), False, 'import tempfile\n')] |
import os, sys
exp_id=[
"exp1.0",
]
env_source=[
"file",
]
exp_mode = [
"continuous",
#"newb",
#"base",
]
num_theories_init=[
4,
]
pred_nets_neurons=[
8,
]
pred_nets_activation=[
"linear",
# "leakyRelu",
]
domain_net_neurons=[
8,
]
domain_pred_mode=[
"onehot",
]
mse_amp=[
1e-7,
]
simplify_criteria=[
'\("DLs",0,3,"relative"\)',
]
scheduler_settings=[
'\("ReduceLROnPlateau",40,0.1\)',
]
optim_type=[
'\("adam",5e-3\)',
]
optim_domain_type=[
'\("adam",1e-3\)',
]
reg_amp=[
1e-8,
]
reg_domain_amp = [
1e-5,
]
batch_size = [
2000,
]
loss_core = [
"DLs",
]
loss_order = [
-1,
]
loss_decay_scale = [
"None",
]
is_mse_decay = [
False,
]
loss_balance_model_influence = [
False,
]
num_examples = [
20000,
]
iter_to_saturation = [
5000,
]
MDL_mode = [
"both",
]
num_output_dims = [
2,
]
num_layers = [
3,
]
is_pendulum = [
False,
]
date_time = [
"10-9",
]
seed = [
0,
30,
60,
90,
120,
150,
180,
210,
240,
270,
]
def assign_array_id(array_id, param_list):
if len(param_list) == 0:
print("redundancy: {0}".format(array_id))
return []
else:
param_bottom = param_list[-1]
length = len(param_bottom)
current_param = param_bottom[array_id % length]
return assign_array_id(int(array_id / length), param_list[:-1]) + [current_param]
array_id = int(sys.argv[1])
param_list = [exp_id,
env_source,
exp_mode,
num_theories_init,
pred_nets_neurons,
pred_nets_activation,
domain_net_neurons,
domain_pred_mode,
mse_amp,
simplify_criteria,
scheduler_settings,
optim_type,
optim_domain_type,
reg_amp,
reg_domain_amp,
batch_size,
loss_core,
loss_order,
loss_decay_scale,
is_mse_decay,
loss_balance_model_influence,
num_examples,
iter_to_saturation,
MDL_mode,
num_output_dims,
num_layers,
is_pendulum,
date_time,
seed,
]
param_chosen = assign_array_id(array_id, param_list)
exec_str = "python ../theory_learning/theory_exp.py"
for param in param_chosen:
exec_str += " {0}".format(param)
exec_str += " {0}".format(array_id)
print(param_chosen)
print(exec_str)
from shutil import copyfile
current_PATH = os.path.dirname(os.path.realpath(__file__))
def make_dir(filename):
import os
import errno
if not os.path.exists(os.path.dirname(filename)):
print("directory {0} does not exist, created.".format(os.path.dirname(filename)))
try:
os.makedirs(os.path.dirname(filename))
except OSError as exc: # Guard against race condition
if exc.errno != errno.EEXIST:
print(exc)
raise
filename = "../data/" + "{0}_{1}/".format(param_chosen[0], param_chosen[-2])
make_dir(filename)
fc = "run_theory.py"
if not os.path.isfile(filename + fc):
copyfile(current_PATH + "/" + fc, filename + fc)
os.system(exec_str)
| [
"os.path.realpath",
"os.path.dirname",
"shutil.copyfile",
"os.path.isfile",
"os.system"
] | [((2847, 2866), 'os.system', 'os.system', (['exec_str'], {}), '(exec_str)\n', (2856, 2866), False, 'import os\n'), ((2192, 2218), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (2208, 2218), False, 'import os\n'), ((2757, 2786), 'os.path.isfile', 'os.path.isfile', (['(filename + fc)'], {}), '(filename + fc)\n', (2771, 2786), False, 'import os\n'), ((2796, 2844), 'shutil.copyfile', 'copyfile', (["(current_PATH + '/' + fc)", '(filename + fc)'], {}), "(current_PATH + '/' + fc, filename + fc)\n", (2804, 2844), False, 'from shutil import copyfile\n'), ((2301, 2326), 'os.path.dirname', 'os.path.dirname', (['filename'], {}), '(filename)\n', (2316, 2326), False, 'import os\n'), ((2391, 2416), 'os.path.dirname', 'os.path.dirname', (['filename'], {}), '(filename)\n', (2406, 2416), False, 'import os\n'), ((2456, 2481), 'os.path.dirname', 'os.path.dirname', (['filename'], {}), '(filename)\n', (2471, 2481), False, 'import os\n')] |
import os
import errno
import stat
import logging
from io import BytesIO
from time import time, mktime, strptime
from fuse import FuseOSError, Operations, LoggingMixIn
logger = logging.getLogger('dochub_fs')
def wrap_errno(func):
"""
@brief Transform Exceptions happening inside func into meaningful
errno if possible
"""
def wrapper(*args, **kwargs):
try:
return func(*args, **kwargs)
except KeyError:
raise FuseOSError(errno.ENOENT)
except ValueError:
raise FuseOSError(errno.EINVAL)
return wrapper
class Node:
"""
@brief Map Dochub API nodes onto filesystem nodes.
Takes a JSON serialized representation of Dochub objects,
and expose useful attributes
"""
def __init__(self, serialized, fs):
self.serialized = serialized
self.fs = fs
def sub_node(self, serialized):
return Node(serialized, self.fs)
@property
def is_category(self):
return 'courses' in self.serialized and 'children' in self.serialized
@property
def is_course(self):
return 'slug' in self.serialized
@property
def is_document(self):
return 'votes' in self.serialized
@property
def is_dir(self):
return self.is_category or self.is_course
@property
def name(self):
if self.is_course:
return "{slug} {name}".format(**self.serialized)
if self.is_document:
return "{name}{file_type}".format(**self.serialized)
return self.serialized['name']
@property
def size(self):
return self.serialized.get('file_size', 4096)
@property
def ctime(self):
if 'date' in self.serialized:
t = strptime(self.serialized['date'], "%Y-%m-%dT%H:%M:%S.%fZ")
return int(mktime(t))
return self.fs.mount_time
atime = ctime
mtime = ctime
def getattr(self):
mode = (0o500|stat.S_IFDIR) if self.is_dir else (0o400|stat.S_IFREG)
return {
'st_mode': mode,
'st_ctime': self.ctime,
'st_mtime': self.mtime,
'st_atime': self.atime,
'st_nlink': 1,
'st_uid': self.fs.uid,
'st_gid': self.fs.gid,
'st_size': self.size,
}
@property
def children(self):
if not self.is_dir:
raise ValueError(
"Attempt to get direcctory children on non-directory %s" %
self.serialized['name']
)
if self.is_category:
children = self.serialized['children'] + self.serialized['courses']
elif self.is_course:
r = self.fs.api.get_course(self.serialized['slug'])
children = r['document_set']
return {child.name: child for child in map(self.sub_node, children)}
@property
def content(self):
if not self.is_document:
raise ValueError(
"Attempt to get file content on non-file %s" %
self.serialized['name']
)
return self.fs.api.get_document(self.serialized['id'])
def find(self, path):
if len(path) > 0:
return self.children[path[0]].find(path[1:])
else:
return self
class DocumentUpload:
"""
@brief A file created locally, being buffered before posting to the
server.
"""
def __init__(self, fs, course, name):
self.fs = fs
self.io = BytesIO()
self.ctime = time()
self.mtime, self.atime = self.ctime, self.ctime
self.name, self.ext = name.split('.', 1)
self.course = course
@property
def size(self):
return self.io.tell()
def getattr(self):
return {
'st_mode': 0o200 | stat.S_IFREG,
'st_ctime': self.ctime,
'st_mtime': self.mtime,
'st_atime': self.atime,
'st_nlink': 1,
'st_uid': self.fs.uid,
'st_gid': self.fs.gid,
'st_size': self.size,
}
def do_upload(self):
self.io.seek(0)
self.fs.api.add_document(course_slug=self.course.serialized['slug'],
name=self.name, file=self.io,
filename='.'.join([self.name, self.ext]))
def to_breadcrumbs(path):
res = []
prefix, name = os.path.split(path)
while name:
res = [name] + res
prefix, name = os.path.split(prefix)
return res
class DochubFileSystem(LoggingMixIn, Operations):
"""
@brief Implementation of filesystem operations
"""
def __init__(self, api):
self.api = api
self.mount_time = int(time())
self.uid, self.gid = os.getuid(), os.getgid()
self.uploads = {}
tree = self.api.get_tree()
assert len(tree) == 1
self.tree = Node(tree[0], self)
@wrap_errno
def find_path(self, path):
if path in self.uploads:
return self.uploads[path]
return self.tree.find(to_breadcrumbs(path))
def getattr(self, path, fh=None):
return self.find_path(path).getattr()
def readdir(self, path, fh=None):
node = self.find_path(path)
return ['.', '..'] + list(node.children.keys())
def read(self, path, size, offset, fh=None):
node = self.find_path(path)
return node.content[offset:offset+size]
def create(self, path, mode):
directory, name = os.path.split(path)
parent = self.find_path(directory)
if not parent.is_course:
raise Exception()
if (mode & stat.S_IFREG):
logger.info("Create file %s", path)
self.uploads[path] = DocumentUpload(self, parent, name)
return 3
def release(self, path, fh):
"""
@brief When the file is closed, perform the actual upload to DocHub
"""
if path in self.uploads and self.uploads[path].size > 0:
upload = self.uploads.pop(path)
upload.do_upload()
def write(self, path, data, offset, fh=None):
if path in self.uploads:
upload = self.uploads[path]
if offset != upload.size:
upload.io.seek(offset)
self.uploads[path].io.write(data)
return len(data)
return -1
| [
"logging.getLogger",
"time.strptime",
"os.getuid",
"time.mktime",
"io.BytesIO",
"fuse.FuseOSError",
"os.path.split",
"os.getgid",
"time.time"
] | [((180, 210), 'logging.getLogger', 'logging.getLogger', (['"""dochub_fs"""'], {}), "('dochub_fs')\n", (197, 210), False, 'import logging\n'), ((4455, 4474), 'os.path.split', 'os.path.split', (['path'], {}), '(path)\n', (4468, 4474), False, 'import os\n'), ((3558, 3567), 'io.BytesIO', 'BytesIO', ([], {}), '()\n', (3565, 3567), False, 'from io import BytesIO\n'), ((3589, 3595), 'time.time', 'time', ([], {}), '()\n', (3593, 3595), False, 'from time import time, mktime, strptime\n'), ((4541, 4562), 'os.path.split', 'os.path.split', (['prefix'], {}), '(prefix)\n', (4554, 4562), False, 'import os\n'), ((5561, 5580), 'os.path.split', 'os.path.split', (['path'], {}), '(path)\n', (5574, 5580), False, 'import os\n'), ((1798, 1856), 'time.strptime', 'strptime', (["self.serialized['date']", '"""%Y-%m-%dT%H:%M:%S.%fZ"""'], {}), "(self.serialized['date'], '%Y-%m-%dT%H:%M:%S.%fZ')\n", (1806, 1856), False, 'from time import time, mktime, strptime\n'), ((4785, 4791), 'time.time', 'time', ([], {}), '()\n', (4789, 4791), False, 'from time import time, mktime, strptime\n'), ((4822, 4833), 'os.getuid', 'os.getuid', ([], {}), '()\n', (4831, 4833), False, 'import os\n'), ((4835, 4846), 'os.getgid', 'os.getgid', ([], {}), '()\n', (4844, 4846), False, 'import os\n'), ((490, 515), 'fuse.FuseOSError', 'FuseOSError', (['errno.ENOENT'], {}), '(errno.ENOENT)\n', (501, 515), False, 'from fuse import FuseOSError, Operations, LoggingMixIn\n'), ((561, 586), 'fuse.FuseOSError', 'FuseOSError', (['errno.EINVAL'], {}), '(errno.EINVAL)\n', (572, 586), False, 'from fuse import FuseOSError, Operations, LoggingMixIn\n'), ((1880, 1889), 'time.mktime', 'mktime', (['t'], {}), '(t)\n', (1886, 1889), False, 'from time import time, mktime, strptime\n')] |
# Generated by Django 2.1.4 on 2019-01-10 04:07
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('algolab_class_API', '0010_submithistory'),
]
operations = [
migrations.RemoveField(
model_name='boardquestion',
name='context',
),
migrations.RemoveField(
model_name='boardquestion',
name='context_type',
),
migrations.RemoveField(
model_name='boardreply',
name='context',
),
migrations.AddField(
model_name='boardquestion',
name='contents',
field=models.TextField(db_column='Contents', default='내용을 입력하세요.', verbose_name='내용'),
),
migrations.AddField(
model_name='boardquestion',
name='contents_type',
field=models.CharField(choices=[('NOTICE', '공지사항'), ('QUESTION', '질문')], db_column='ContentsType', default='QUESTION', max_length=10, verbose_name='글 종류'),
),
migrations.AddField(
model_name='boardreply',
name='contents',
field=models.TextField(db_column='Contents', default='내용을 입력하세요.', verbose_name='내용'),
),
migrations.AlterField(
model_name='boardquestion',
name='write_time',
field=models.DateTimeField(db_column='WriteTime', verbose_name='작성 시간'),
),
migrations.AlterField(
model_name='course',
name='manager',
field=models.ForeignKey(db_column='Manager', on_delete=django.db.models.deletion.DO_NOTHING, related_name='courseManager_set', to=settings.AUTH_USER_MODEL, verbose_name='교수자'),
),
migrations.AlterField(
model_name='submithistory',
name='status',
field=models.CharField(choices=[('NOT_SOLVED', 'NotSolved'), ('SOLVED', 'Solved'), ('COMPILE_ERROR', 'CompileError'), ('TIME_OVER', 'TimeOver'), ('RUNTIME_ERROR', 'RuntimeError'), ('SERVER_ERROR', 'ServerError')], db_column='Status', default='NOT_SOLVED', max_length=10, verbose_name='제출 결과'),
),
]
| [
"django.db.models.TextField",
"django.db.models.ForeignKey",
"django.db.models.DateTimeField",
"django.db.migrations.RemoveField",
"django.db.models.CharField"
] | [((306, 372), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""boardquestion"""', 'name': '"""context"""'}), "(model_name='boardquestion', name='context')\n", (328, 372), False, 'from django.db import migrations, models\n'), ((417, 488), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""boardquestion"""', 'name': '"""context_type"""'}), "(model_name='boardquestion', name='context_type')\n", (439, 488), False, 'from django.db import migrations, models\n'), ((533, 596), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""boardreply"""', 'name': '"""context"""'}), "(model_name='boardreply', name='context')\n", (555, 596), False, 'from django.db import migrations, models\n'), ((749, 828), 'django.db.models.TextField', 'models.TextField', ([], {'db_column': '"""Contents"""', 'default': '"""내용을 입력하세요."""', 'verbose_name': '"""내용"""'}), "(db_column='Contents', default='내용을 입력하세요.', verbose_name='내용')\n", (765, 828), False, 'from django.db import migrations, models\n'), ((962, 1118), 'django.db.models.CharField', 'models.CharField', ([], {'choices': "[('NOTICE', '공지사항'), ('QUESTION', '질문')]", 'db_column': '"""ContentsType"""', 'default': '"""QUESTION"""', 'max_length': '(10)', 'verbose_name': '"""글 종류"""'}), "(choices=[('NOTICE', '공지사항'), ('QUESTION', '질문')],\n db_column='ContentsType', default='QUESTION', max_length=10,\n verbose_name='글 종류')\n", (978, 1118), False, 'from django.db import migrations, models\n'), ((1236, 1315), 'django.db.models.TextField', 'models.TextField', ([], {'db_column': '"""Contents"""', 'default': '"""내용을 입력하세요."""', 'verbose_name': '"""내용"""'}), "(db_column='Contents', default='내용을 입력하세요.', verbose_name='내용')\n", (1252, 1315), False, 'from django.db import migrations, models\n'), ((1448, 1513), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'db_column': '"""WriteTime"""', 'verbose_name': '"""작성 시간"""'}), "(db_column='WriteTime', verbose_name='작성 시간')\n", (1468, 1513), False, 'from django.db import migrations, models\n'), ((1636, 1815), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'db_column': '"""Manager"""', 'on_delete': 'django.db.models.deletion.DO_NOTHING', 'related_name': '"""courseManager_set"""', 'to': 'settings.AUTH_USER_MODEL', 'verbose_name': '"""교수자"""'}), "(db_column='Manager', on_delete=django.db.models.deletion.\n DO_NOTHING, related_name='courseManager_set', to=settings.\n AUTH_USER_MODEL, verbose_name='교수자')\n", (1653, 1815), False, 'from django.db import migrations, models\n'), ((1934, 2238), 'django.db.models.CharField', 'models.CharField', ([], {'choices': "[('NOT_SOLVED', 'NotSolved'), ('SOLVED', 'Solved'), ('COMPILE_ERROR',\n 'CompileError'), ('TIME_OVER', 'TimeOver'), ('RUNTIME_ERROR',\n 'RuntimeError'), ('SERVER_ERROR', 'ServerError')]", 'db_column': '"""Status"""', 'default': '"""NOT_SOLVED"""', 'max_length': '(10)', 'verbose_name': '"""제출 결과"""'}), "(choices=[('NOT_SOLVED', 'NotSolved'), ('SOLVED', 'Solved'),\n ('COMPILE_ERROR', 'CompileError'), ('TIME_OVER', 'TimeOver'), (\n 'RUNTIME_ERROR', 'RuntimeError'), ('SERVER_ERROR', 'ServerError')],\n db_column='Status', default='NOT_SOLVED', max_length=10, verbose_name=\n '제출 결과')\n", (1950, 2238), False, 'from django.db import migrations, models\n')] |
""" Python wrapper for the Spider API """
from __future__ import annotations
import json
import logging
import time
from datetime import datetime, timedelta
from typing import Any, Dict, ValuesView
from urllib.parse import unquote
import requests
from spiderpy.devices.powerplug import SpiderPowerPlug
from spiderpy.devices.thermostat import SpiderThermostat
BASE_URL = "https://spider-api.ithodaalderop.nl"
AUTHENTICATE_URL = BASE_URL + "/api/tokens"
DEVICES_URL = BASE_URL + "/api/devices"
ENERGY_DEVICES_URL = BASE_URL + "/api/devices/energy/energyDevices"
POWER_PLUGS_URL = BASE_URL + "/api/devices/energy/smartPlugs"
ENERGY_MONITORING_URL = BASE_URL + "/api/monitoring/15/devices"
REFRESH_RATE = 120
_LOGGER = logging.getLogger(__name__)
class SpiderApi:
""" Interface class for the Spider API """
def __init__(
self, username: str, password: str, refresh_rate: int = REFRESH_RATE
) -> None:
""" Constructor """
self._username = ""
for char in username:
self._username += hex(ord(char)).lstrip("0x")
self._password = password
self._thermostats: Dict[Any, Any] = {}
self._power_plugs: Dict[Any, Any] = {}
self._last_refresh: int = 0
self._access_token: str = ""
self._refresh_token: str = ""
self._token_expires_at = datetime.now() - timedelta(days=1)
self._token_expires_in = None
self._refresh_rate: int = refresh_rate
def update(self) -> None:
""" Update the cache """
current_time = int(time.time())
if current_time >= (self._last_refresh + self._refresh_rate):
self.update_thermostats()
self.update_power_plugs()
self._last_refresh = current_time
def update_thermostats(self) -> None:
""" Retrieve thermostats """
results = self._request_update(DEVICES_URL)
if results is False:
return
for thermostat in results:
if thermostat["type"] == 105:
self._thermostats[thermostat["id"]] = SpiderThermostat(thermostat)
def get_thermostats(self) -> ValuesView[SpiderThermostat]:
""" Get all thermostats """
self.update()
return self._thermostats.values()
def get_thermostat(self, unique_id: str) -> SpiderThermostat | None:
""" Get a thermostat by id """
self.update()
if unique_id in self._thermostats:
return self._thermostats[unique_id]
return None
def set_temperature(self, thermostat: SpiderThermostat, temperature: float) -> bool:
""" Set the temperature. Unfortunately, the API requires the complete object"""
if thermostat.set_temperature(temperature):
url = DEVICES_URL + "/" + thermostat.id
try:
self._request_action(url, json.dumps(thermostat.data))
return True
except SpiderApiException:
_LOGGER.error(f"Unable to set temperature to {temperature}.")
return False
def set_operation_mode(
self, thermostat: SpiderThermostat, operation_mode: str
) -> bool:
""" Set the operation mode. Unfortunately, the API requires the complete object"""
if thermostat.set_operation_mode(operation_mode):
url = DEVICES_URL + "/" + thermostat.id
try:
self._request_action(url, json.dumps(thermostat.data))
return True
except SpiderApiException:
_LOGGER.error(
f"Unable to set operation mode to {operation_mode}. Is this operation mode supported?"
)
return False
def set_fan_speed(self, thermostat: SpiderThermostat, fan_speed: str) -> bool:
""" Set the fan speed. Unfortunately, the API requires the complete object"""
if thermostat.set_fan_speed(fan_speed):
url = DEVICES_URL + "/" + thermostat.id
try:
self._request_action(url, json.dumps(thermostat.data))
return True
except SpiderApiException:
_LOGGER.error(
f"Unable to set fan speed to {fan_speed}. Is this fan speed supported?"
)
return False
def update_power_plugs(self) -> None:
""" Retrieve power plugs """
results = self._request_update(ENERGY_DEVICES_URL)
if results is False:
return
for power_plug in results:
if power_plug["isSwitch"]:
today = (
datetime.today()
.replace(hour=00, minute=00, second=00)
.strftime("%s")
)
energy_url = (
ENERGY_MONITORING_URL
+ "/"
+ power_plug["energyDeviceId"]
+ "/?take=96&start="
+ str(today)
+ "000"
)
energy_results = self._request_update(energy_url)
if energy_results is False:
continue
try:
power_plug["todayUsage"] = float(
energy_results[0]["totalEnergy"]["normal"]
) + float(energy_results[0]["totalEnergy"]["low"])
except IndexError:
_LOGGER.error("Unable to get today energy usage for power plug")
self._power_plugs[power_plug["id"]] = SpiderPowerPlug(power_plug)
def get_power_plugs(self) -> ValuesView[SpiderPowerPlug]:
""" Get all power plugs """
self.update()
return self._power_plugs.values()
def get_power_plug(self, unique_id: str) -> SpiderPowerPlug | None:
""" Get a power plug by id """
self.update()
if unique_id in self._power_plugs:
return self._power_plugs[unique_id]
return None
def turn_power_plug_on(self, power_plug: SpiderPowerPlug) -> bool:
""" Turn the power_plug on"""
if power_plug.turn_on():
url = POWER_PLUGS_URL + "/" + power_plug.id + "/switch"
try:
self._request_action(url, "true")
return True
except SpiderApiException:
_LOGGER.error("Unable to turn power plug on.")
return False
def turn_power_plug_off(self, power_plug: SpiderPowerPlug) -> bool:
""" Turn the power plug off"""
if power_plug.turn_off():
url = POWER_PLUGS_URL + "/" + power_plug.id + "/switch"
try:
self._request_action(url, "false")
return True
except SpiderApiException:
_LOGGER.error("Unable to turn power plug off.")
return False
def _is_authenticated(self) -> bool:
""" Check if access token is expired """
if self._refresh_token == "":
self._request_login()
if datetime.now() > self._token_expires_at:
self._refresh_access_token()
return True
return False
def _request_action(self, url: str, data: str) -> None:
""" Perform a request to execute an action """
self._is_authenticated()
headers = {
"authorization": "Bearer " + self._access_token,
"Content-Type": "application/json",
"X-Client-Platform": "android-phone",
"X-Client-Version": "1.5.9 (3611)",
"X-Client-Library": "SpiderPy",
}
try:
response = requests.request("PUT", url, data=data, headers=headers)
except Exception as exception:
raise SpiderApiException(exception) from exception
if response.status_code == 401:
raise SpiderApiException("Access denied. Failed to refresh?")
if response.status_code != 200:
raise SpiderApiException(
f"Unable to perform action. Status code: {response.status_code}. Data: {data}"
)
def _request_update(self, url: str) -> Dict[Any, Any]:
""" Perform a request to update information """
self._is_authenticated()
headers = {
"authorization": "Bearer " + self._access_token,
"Content-Type": "application/json",
"X-Client-Platform": "android-phone",
"X-Client-Version": "1.5.9 (3611)",
"X-Client-Library": "SpiderPy",
}
try:
response = requests.request("GET", url, headers=headers)
except Exception as exception:
raise SpiderApiException(exception) from exception
if response.status_code == 401:
raise SpiderApiException("Access denied. Failed to refresh?")
if response.status_code != 200:
raise SpiderApiException(
f"Unable to request update. Status code: {response.status_code}"
)
return response.json()
def _request_login(self) -> None:
headers = {
"Content-Type": "application/x-www-form-urlencoded",
"X-Client-Platform": "android-phone",
"X-Client-Version": "1.5.9 (3611)",
"X-Client-Library": "SpiderPy",
}
payload = {
"grant_type": "password",
"username": self._username,
"password": self._password,
}
try:
response = requests.request(
"POST", AUTHENTICATE_URL, data=payload, headers=headers
)
except Exception as exception:
raise UnauthorizedException(exception) from exception
if response.status_code != 200:
raise SpiderApiException(
f"Unable to request login. Status code: {response.status_code}"
)
data = response.json()
self._access_token = data["access_token"]
self._refresh_token = unquote(data["refresh_token"])
self._token_expires_in = data["expires_in"]
self._token_expires_at = datetime.now() + timedelta(
0, (int(data["expires_in"]) - 20)
)
def _refresh_access_token(self) -> None:
""" Refresh access_token """
headers = {
"Content-Type": "application/x-www-form-urlencoded",
"X-Client-Platform": "android-phone",
"X-Client-Version": "1.5.9 (3611)",
"X-Client-Library": "SpiderPy",
}
payload = {"grant_type": "refresh_token", "refresh_token": self._refresh_token}
response = requests.request(
"POST", AUTHENTICATE_URL, data=payload, headers=headers
)
data = response.json()
if response.status_code != 200:
raise SpiderApiException(
f"Unable to refresh access token. Status code: {response.status_code}"
)
self._access_token = data["access_token"]
self._refresh_token = unquote(data["refresh_token"])
self._token_expires_in = data["expires_in"]
self._token_expires_at = datetime.now() + timedelta(
0, (int(data["expires_in"]) - 20)
)
class UnauthorizedException(Exception):
pass
class SpiderApiException(Exception):
pass
| [
"logging.getLogger",
"spiderpy.devices.thermostat.SpiderThermostat",
"json.dumps",
"requests.request",
"datetime.datetime.now",
"datetime.datetime.today",
"datetime.timedelta",
"time.time",
"urllib.parse.unquote",
"spiderpy.devices.powerplug.SpiderPowerPlug"
] | [((722, 749), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (739, 749), False, 'import logging\n'), ((9948, 9978), 'urllib.parse.unquote', 'unquote', (["data['refresh_token']"], {}), "(data['refresh_token'])\n", (9955, 9978), False, 'from urllib.parse import unquote\n'), ((10578, 10651), 'requests.request', 'requests.request', (['"""POST"""', 'AUTHENTICATE_URL'], {'data': 'payload', 'headers': 'headers'}), "('POST', AUTHENTICATE_URL, data=payload, headers=headers)\n", (10594, 10651), False, 'import requests\n'), ((10967, 10997), 'urllib.parse.unquote', 'unquote', (["data['refresh_token']"], {}), "(data['refresh_token'])\n", (10974, 10997), False, 'from urllib.parse import unquote\n'), ((1343, 1357), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1355, 1357), False, 'from datetime import datetime, timedelta\n'), ((1360, 1377), 'datetime.timedelta', 'timedelta', ([], {'days': '(1)'}), '(days=1)\n', (1369, 1377), False, 'from datetime import datetime, timedelta\n'), ((1554, 1565), 'time.time', 'time.time', ([], {}), '()\n', (1563, 1565), False, 'import time\n'), ((6997, 7011), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (7009, 7011), False, 'from datetime import datetime, timedelta\n'), ((7593, 7649), 'requests.request', 'requests.request', (['"""PUT"""', 'url'], {'data': 'data', 'headers': 'headers'}), "('PUT', url, data=data, headers=headers)\n", (7609, 7649), False, 'import requests\n'), ((8523, 8568), 'requests.request', 'requests.request', (['"""GET"""', 'url'], {'headers': 'headers'}), "('GET', url, headers=headers)\n", (8539, 8568), False, 'import requests\n'), ((9454, 9527), 'requests.request', 'requests.request', (['"""POST"""', 'AUTHENTICATE_URL'], {'data': 'payload', 'headers': 'headers'}), "('POST', AUTHENTICATE_URL, data=payload, headers=headers)\n", (9470, 9527), False, 'import requests\n'), ((10064, 10078), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (10076, 10078), False, 'from datetime import datetime, timedelta\n'), ((11083, 11097), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (11095, 11097), False, 'from datetime import datetime, timedelta\n'), ((2074, 2102), 'spiderpy.devices.thermostat.SpiderThermostat', 'SpiderThermostat', (['thermostat'], {}), '(thermostat)\n', (2090, 2102), False, 'from spiderpy.devices.thermostat import SpiderThermostat\n'), ((5520, 5547), 'spiderpy.devices.powerplug.SpiderPowerPlug', 'SpiderPowerPlug', (['power_plug'], {}), '(power_plug)\n', (5535, 5547), False, 'from spiderpy.devices.powerplug import SpiderPowerPlug\n'), ((2857, 2884), 'json.dumps', 'json.dumps', (['thermostat.data'], {}), '(thermostat.data)\n', (2867, 2884), False, 'import json\n'), ((3420, 3447), 'json.dumps', 'json.dumps', (['thermostat.data'], {}), '(thermostat.data)\n', (3430, 3447), False, 'import json\n'), ((4022, 4049), 'json.dumps', 'json.dumps', (['thermostat.data'], {}), '(thermostat.data)\n', (4032, 4049), False, 'import json\n'), ((4589, 4605), 'datetime.datetime.today', 'datetime.today', ([], {}), '()\n', (4603, 4605), False, 'from datetime import datetime, timedelta\n')] |
import torch
import torch.nn as nn
from torch3d.nn import SetAbstraction
class PointNetSSG(nn.Module):
"""
PointNet++ single-scale grouping architecture from the `"PointNet++: Deep Hierarchical Feature Learning on Point Sets in a Metric Space" <https://arxiv.org/abs/1706.02413>`_ paper.
Args:
in_channels (int): Number of channels in the input point set
num_classes (int): Number of classes in the dataset
dropout (float, optional): Dropout rate in the classifier. Default: 0.5
""" # noqa
def __init__(self, in_channels, num_classes, dropout=0.5):
super(PointNetSSG, self).__init__()
self.sa1 = SetAbstraction(in_channels, [64, 64, 128], 512, 32, 0.2, bias=False)
self.sa2 = SetAbstraction(128 + 3, [128, 128, 256], 128, 64, 0.4, bias=False)
self.sa3 = SetAbstraction(256 + 3, [256, 512, 1024], 1, 128, 0.8, bias=False)
self.mlp = nn.Sequential(
nn.Linear(1024, 512, bias=False),
nn.BatchNorm1d(512),
nn.ReLU(True),
nn.Dropout(dropout),
nn.Linear(512, 256, bias=False),
nn.BatchNorm1d(256),
nn.ReLU(True),
nn.Dropout(dropout),
)
self.fc = nn.Linear(256, num_classes)
def forward(self, x):
x = self.sa1(x)
x = self.sa2(x)
x = self.sa3(x)
x = x.squeeze(2)
x = self.mlp(x)
x = self.fc(x)
return x
| [
"torch.nn.ReLU",
"torch.nn.Dropout",
"torch.nn.BatchNorm1d",
"torch3d.nn.SetAbstraction",
"torch.nn.Linear"
] | [((661, 729), 'torch3d.nn.SetAbstraction', 'SetAbstraction', (['in_channels', '[64, 64, 128]', '(512)', '(32)', '(0.2)'], {'bias': '(False)'}), '(in_channels, [64, 64, 128], 512, 32, 0.2, bias=False)\n', (675, 729), False, 'from torch3d.nn import SetAbstraction\n'), ((749, 815), 'torch3d.nn.SetAbstraction', 'SetAbstraction', (['(128 + 3)', '[128, 128, 256]', '(128)', '(64)', '(0.4)'], {'bias': '(False)'}), '(128 + 3, [128, 128, 256], 128, 64, 0.4, bias=False)\n', (763, 815), False, 'from torch3d.nn import SetAbstraction\n'), ((835, 901), 'torch3d.nn.SetAbstraction', 'SetAbstraction', (['(256 + 3)', '[256, 512, 1024]', '(1)', '(128)', '(0.8)'], {'bias': '(False)'}), '(256 + 3, [256, 512, 1024], 1, 128, 0.8, bias=False)\n', (849, 901), False, 'from torch3d.nn import SetAbstraction\n'), ((1241, 1268), 'torch.nn.Linear', 'nn.Linear', (['(256)', 'num_classes'], {}), '(256, num_classes)\n', (1250, 1268), True, 'import torch.nn as nn\n'), ((948, 980), 'torch.nn.Linear', 'nn.Linear', (['(1024)', '(512)'], {'bias': '(False)'}), '(1024, 512, bias=False)\n', (957, 980), True, 'import torch.nn as nn\n'), ((994, 1013), 'torch.nn.BatchNorm1d', 'nn.BatchNorm1d', (['(512)'], {}), '(512)\n', (1008, 1013), True, 'import torch.nn as nn\n'), ((1027, 1040), 'torch.nn.ReLU', 'nn.ReLU', (['(True)'], {}), '(True)\n', (1034, 1040), True, 'import torch.nn as nn\n'), ((1054, 1073), 'torch.nn.Dropout', 'nn.Dropout', (['dropout'], {}), '(dropout)\n', (1064, 1073), True, 'import torch.nn as nn\n'), ((1087, 1118), 'torch.nn.Linear', 'nn.Linear', (['(512)', '(256)'], {'bias': '(False)'}), '(512, 256, bias=False)\n', (1096, 1118), True, 'import torch.nn as nn\n'), ((1132, 1151), 'torch.nn.BatchNorm1d', 'nn.BatchNorm1d', (['(256)'], {}), '(256)\n', (1146, 1151), True, 'import torch.nn as nn\n'), ((1165, 1178), 'torch.nn.ReLU', 'nn.ReLU', (['(True)'], {}), '(True)\n', (1172, 1178), True, 'import torch.nn as nn\n'), ((1192, 1211), 'torch.nn.Dropout', 'nn.Dropout', (['dropout'], {}), '(dropout)\n', (1202, 1211), True, 'import torch.nn as nn\n')] |
import boto3
import datetime
import requests
import pytest
from unittest.mock import patch
from reports.reporting import release_summary
from collections import Counter
from functools import partial
ENTITIES = [
'participants',
'biospecimens',
'phenotypes',
'genomic-files',
'study-files',
'read-groups',
'diagnoses',
'sequencing-experiments',
'families'
]
def test_get_studies(client, mocked_apis):
""" Test that a task is set to canceled after being initialized """
db = boto3.resource('dynamodb')
db = boto3.client('dynamodb')
with patch('requests.get') as mock_request:
mock_request.side_effect = mocked_apis
studies, version, state = release_summary.get_studies('RE_00000000')
assert studies == ['SD_00000000']
assert version == '0.0.0'
assert state == 'staged'
mock_request.assert_called_with(
'http://coordinator/releases/RE_00000000?limit=100',
timeout=10)
@pytest.mark.parametrize("entity", ENTITIES)
def test_entity_counts(client, entity, mocked_apis):
with patch('requests.get') as mock_request:
mock_request.side_effect = mocked_apis
r = release_summary.count_entity('SD_00000000', entity)
assert r == 1
def test_count_study(client, mocked_apis):
""" Test that entities are counted within a study """
with patch('requests.get') as mock_request:
mock_request.side_effect = mocked_apis
r = release_summary.count_study('SD_00000000')
assert r == {k: 1 for k in ENTITIES}
def test_count_studies(client, mocked_apis):
""" Test that study counts are aggregated across studies """
with patch('requests.get') as mock_request:
mock_request.side_effect = mocked_apis
studies = ['SD_00000000', 'SD_00000001']
study_counts = {study: Counter(release_summary.count_study(study))
for study in studies}
r = release_summary.collect_counts(study_counts)
assert r == {k: 2 for k in ENTITIES + ['studies']}
def test_run(client, mocked_apis):
""" Test that study counts are aggregated across studies """
db = boto3.resource('dynamodb')
release_table = db.Table('release-summary')
study_table = db.Table('study-summary')
with patch('requests.get') as mock_request:
mock_request.side_effect = mocked_apis
r = release_summary.run('TA_00000000', 'RE_00000000')
assert all(k in r for k in ENTITIES)
assert r['release_id'] == 'RE_00000000'
assert r['task_id'] == 'TA_00000000'
assert release_table.item_count == 1
assert study_table.item_count == 1
st = study_table.get_item(Key={
'release_id': 'RE_00000000',
'study_id': 'SD_00000000'
})['Item']
assert st['study_id'] == 'SD_00000000'
assert st['version'] == '0.0.0'
assert st['state'] == 'staged'
assert all(st[k] == 1 for k in ENTITIES)
def test_get_report(client, mocked_apis):
""" Test that api returns release summary """
db = boto3.resource('dynamodb')
table = db.Table('release-summary')
with patch('requests.get') as mock_request:
mock_request.side_effect = mocked_apis
r = release_summary.run('TA_00000000', 'RE_00000000')
assert table.item_count == 1
resp = client.get('/reports/releases/RE_00000000')
assert all(k in resp.json for k in ENTITIES)
assert all(resp.json[k] == 1 for k in ENTITIES)
assert resp.json['release_id'] == 'RE_00000000'
assert resp .json['task_id'] == 'TA_00000000'
assert 'SD_00000000' in resp.json['study_summaries']
st = resp.json['study_summaries']['SD_00000000']
def test_report_not_found(client, mocked_apis):
resp = client.get('/reports/releases/RE_XXXXXXXX')
assert resp.status_code == 404
assert 'could not find a report for release RE_' in resp.json['message']
def test_publish(client, mocked_apis):
""" Test that release and study summary rows are updated upon publish """
db = boto3.resource('dynamodb')
release_table = db.Table('release-summary')
study_table = db.Table('study-summary')
def _test_summaries(state, version):
assert release_table.item_count == 1
re = release_table.get_item(Key={
'release_id': 'RE_00000000',
'study_id': 'SD_00000000'
})['Item']
assert re['release_id'] == 'RE_00000000'
assert re['version'] == version
assert re['state'] == state
assert study_table.item_count == 1
st = study_table.get_item(Key={
'release_id': 'RE_00000000',
'study_id': 'SD_00000000'
})['Item']
assert st['study_id'] == 'SD_00000000'
assert st['version'] == version
assert st['state'] == state
assert all(st[k] == 1 for k in ENTITIES)
with patch('requests.get') as mock_request:
# The release has been run as candidate release 0.0.3
mock_request.side_effect = partial(mocked_apis, version='0.0.3')
r = release_summary.run('TA_00000000', 'RE_00000000')
_test_summaries('staged', '0.0.3')
# Now the release has been published and its version number bumped
# in the coordinator to 0.1.0
mock_request.side_effect = partial(mocked_apis, version='0.1.0')
r = release_summary.publish('RE_00000000')
_test_summaries('published', '0.1.0')
def test_publish_does_not_exist(client, mocked_apis):
"""
Test behavior if a release is published and one of the summary rows
do not exist
"""
db = boto3.resource('dynamodb')
release_table = db.Table('release-summary')
study_table = db.Table('study-summary')
with patch('requests.get') as mock_request:
mock_request.side_effect = partial(mocked_apis, version='0.0.3')
r = release_summary.run('TA_00000000', 'RE_00000000')
assert release_table.item_count == 1
assert study_table.item_count == 1
# Now delete the summaries, as if it never existed
release_table.delete_item(Key={
'release_id': 'RE_00000000'
})
study_table.delete_item(Key={
'release_id': 'RE_00000000',
'study_id': 'SD_00000000'
})
assert release_table.item_count == 0
assert study_table.item_count == 0
# Publish the release
mock_request.side_effect = partial(mocked_apis, version='0.1.0')
r = release_summary.publish('RE_00000000')
# There should still be no summary rows
assert release_table.item_count == 0
assert study_table.item_count == 0
def test_get_report_per_study(client, mocked_apis):
""" Test that api returns release summary for specific study"""
db = boto3.resource('dynamodb')
table = db.Table('release-summary')
with patch('requests.get') as mock_request:
mock_request.side_effect = mocked_apis
s = release_summary.run('TA_00000000', 'RE_00000000')
assert table.item_count == 1
resp = client.get('/reports/RE_00000000/SD_00000000')
assert all(k in resp.json for k in ENTITIES)
assert all(resp.json[k] == 1 for k in ENTITIES)
assert resp.json['release_id'] == 'RE_00000000'
assert resp .json['task_id'] == 'TA_00000000'
assert 'SD_00000000' in resp.json['study_id']
def test_get_report_per_study_not_found(client):
resp = client.get('/reports/RE_XXXXXXXX/SD_XXXXXXXX')
assert resp.status_code == 404
assert 'could not find study'
' report for release RE_' in resp.json['message']
assert 'and study id SD_' in resp.json['message']
def test_get_report_per_study_filter_by_state(client, mocked_apis):
""" Test that api returns release summary for specific study"""
db = boto3.resource('dynamodb')
table = db.Table('release-summary')
with patch('requests.get') as mock_request:
mock_request.side_effect = mocked_apis
s = release_summary.run('TA_00000000', 'RE_00000000')
assert table.item_count == 1
resp = client.get('/reports/studies/SD_00000000?state=staged')
r1 = resp.json['releases'][0]['RE_00000000']
assert all(k in r1 for k in ENTITIES)
assert all(r1
[k] == 1 for k in ENTITIES)
assert r1['release_id'] == 'RE_00000000'
assert r1['task_id'] == 'TA_00000000'
assert 'SD_00000000' in r1['study_id']
def test_get_report_per_study_filter_by_state_not_found(client):
resp = client.get('/reports/studies/SD_XXXXXXXX?state=published')
assert resp.status_code == 404
assert 'could not find study'
' report for study SD_' in resp.json['message']
| [
"reports.reporting.release_summary.run",
"reports.reporting.release_summary.publish",
"reports.reporting.release_summary.get_studies",
"boto3.client",
"reports.reporting.release_summary.count_study",
"reports.reporting.release_summary.count_entity",
"pytest.mark.parametrize",
"boto3.resource",
"repo... | [((998, 1041), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""entity"""', 'ENTITIES'], {}), "('entity', ENTITIES)\n", (1021, 1041), False, 'import pytest\n'), ((521, 547), 'boto3.resource', 'boto3.resource', (['"""dynamodb"""'], {}), "('dynamodb')\n", (535, 547), False, 'import boto3\n'), ((557, 581), 'boto3.client', 'boto3.client', (['"""dynamodb"""'], {}), "('dynamodb')\n", (569, 581), False, 'import boto3\n'), ((2178, 2204), 'boto3.resource', 'boto3.resource', (['"""dynamodb"""'], {}), "('dynamodb')\n", (2192, 2204), False, 'import boto3\n'), ((3056, 3082), 'boto3.resource', 'boto3.resource', (['"""dynamodb"""'], {}), "('dynamodb')\n", (3070, 3082), False, 'import boto3\n'), ((4028, 4054), 'boto3.resource', 'boto3.resource', (['"""dynamodb"""'], {}), "('dynamodb')\n", (4042, 4054), False, 'import boto3\n'), ((5598, 5624), 'boto3.resource', 'boto3.resource', (['"""dynamodb"""'], {}), "('dynamodb')\n", (5612, 5624), False, 'import boto3\n'), ((6778, 6804), 'boto3.resource', 'boto3.resource', (['"""dynamodb"""'], {}), "('dynamodb')\n", (6792, 6804), False, 'import boto3\n'), ((7781, 7807), 'boto3.resource', 'boto3.resource', (['"""dynamodb"""'], {}), "('dynamodb')\n", (7795, 7807), False, 'import boto3\n'), ((592, 613), 'unittest.mock.patch', 'patch', (['"""requests.get"""'], {}), "('requests.get')\n", (597, 613), False, 'from unittest.mock import patch\n'), ((712, 754), 'reports.reporting.release_summary.get_studies', 'release_summary.get_studies', (['"""RE_00000000"""'], {}), "('RE_00000000')\n", (739, 754), False, 'from reports.reporting import release_summary\n'), ((1104, 1125), 'unittest.mock.patch', 'patch', (['"""requests.get"""'], {}), "('requests.get')\n", (1109, 1125), False, 'from unittest.mock import patch\n'), ((1202, 1253), 'reports.reporting.release_summary.count_entity', 'release_summary.count_entity', (['"""SD_00000000"""', 'entity'], {}), "('SD_00000000', entity)\n", (1230, 1253), False, 'from reports.reporting import release_summary\n'), ((1388, 1409), 'unittest.mock.patch', 'patch', (['"""requests.get"""'], {}), "('requests.get')\n", (1393, 1409), False, 'from unittest.mock import patch\n'), ((1486, 1528), 'reports.reporting.release_summary.count_study', 'release_summary.count_study', (['"""SD_00000000"""'], {}), "('SD_00000000')\n", (1513, 1528), False, 'from reports.reporting import release_summary\n'), ((1695, 1716), 'unittest.mock.patch', 'patch', (['"""requests.get"""'], {}), "('requests.get')\n", (1700, 1716), False, 'from unittest.mock import patch\n'), ((1963, 2007), 'reports.reporting.release_summary.collect_counts', 'release_summary.collect_counts', (['study_counts'], {}), '(study_counts)\n', (1993, 2007), False, 'from reports.reporting import release_summary\n'), ((2306, 2327), 'unittest.mock.patch', 'patch', (['"""requests.get"""'], {}), "('requests.get')\n", (2311, 2327), False, 'from unittest.mock import patch\n'), ((2404, 2453), 'reports.reporting.release_summary.run', 'release_summary.run', (['"""TA_00000000"""', '"""RE_00000000"""'], {}), "('TA_00000000', 'RE_00000000')\n", (2423, 2453), False, 'from reports.reporting import release_summary\n'), ((3132, 3153), 'unittest.mock.patch', 'patch', (['"""requests.get"""'], {}), "('requests.get')\n", (3137, 3153), False, 'from unittest.mock import patch\n'), ((3230, 3279), 'reports.reporting.release_summary.run', 'release_summary.run', (['"""TA_00000000"""', '"""RE_00000000"""'], {}), "('TA_00000000', 'RE_00000000')\n", (3249, 3279), False, 'from reports.reporting import release_summary\n'), ((4863, 4884), 'unittest.mock.patch', 'patch', (['"""requests.get"""'], {}), "('requests.get')\n", (4868, 4884), False, 'from unittest.mock import patch\n'), ((4999, 5036), 'functools.partial', 'partial', (['mocked_apis'], {'version': '"""0.0.3"""'}), "(mocked_apis, version='0.0.3')\n", (5006, 5036), False, 'from functools import partial\n'), ((5049, 5098), 'reports.reporting.release_summary.run', 'release_summary.run', (['"""TA_00000000"""', '"""RE_00000000"""'], {}), "('TA_00000000', 'RE_00000000')\n", (5068, 5098), False, 'from reports.reporting import release_summary\n'), ((5292, 5329), 'functools.partial', 'partial', (['mocked_apis'], {'version': '"""0.1.0"""'}), "(mocked_apis, version='0.1.0')\n", (5299, 5329), False, 'from functools import partial\n'), ((5342, 5380), 'reports.reporting.release_summary.publish', 'release_summary.publish', (['"""RE_00000000"""'], {}), "('RE_00000000')\n", (5365, 5380), False, 'from reports.reporting import release_summary\n'), ((5727, 5748), 'unittest.mock.patch', 'patch', (['"""requests.get"""'], {}), "('requests.get')\n", (5732, 5748), False, 'from unittest.mock import patch\n'), ((5801, 5838), 'functools.partial', 'partial', (['mocked_apis'], {'version': '"""0.0.3"""'}), "(mocked_apis, version='0.0.3')\n", (5808, 5838), False, 'from functools import partial\n'), ((5851, 5900), 'reports.reporting.release_summary.run', 'release_summary.run', (['"""TA_00000000"""', '"""RE_00000000"""'], {}), "('TA_00000000', 'RE_00000000')\n", (5870, 5900), False, 'from reports.reporting import release_summary\n'), ((6422, 6459), 'functools.partial', 'partial', (['mocked_apis'], {'version': '"""0.1.0"""'}), "(mocked_apis, version='0.1.0')\n", (6429, 6459), False, 'from functools import partial\n'), ((6472, 6510), 'reports.reporting.release_summary.publish', 'release_summary.publish', (['"""RE_00000000"""'], {}), "('RE_00000000')\n", (6495, 6510), False, 'from reports.reporting import release_summary\n'), ((6854, 6875), 'unittest.mock.patch', 'patch', (['"""requests.get"""'], {}), "('requests.get')\n", (6859, 6875), False, 'from unittest.mock import patch\n'), ((6952, 7001), 'reports.reporting.release_summary.run', 'release_summary.run', (['"""TA_00000000"""', '"""RE_00000000"""'], {}), "('TA_00000000', 'RE_00000000')\n", (6971, 7001), False, 'from reports.reporting import release_summary\n'), ((7857, 7878), 'unittest.mock.patch', 'patch', (['"""requests.get"""'], {}), "('requests.get')\n", (7862, 7878), False, 'from unittest.mock import patch\n'), ((7955, 8004), 'reports.reporting.release_summary.run', 'release_summary.run', (['"""TA_00000000"""', '"""RE_00000000"""'], {}), "('TA_00000000', 'RE_00000000')\n", (7974, 8004), False, 'from reports.reporting import release_summary\n'), ((1869, 1903), 'reports.reporting.release_summary.count_study', 'release_summary.count_study', (['study'], {}), '(study)\n', (1896, 1903), False, 'from reports.reporting import release_summary\n')] |
import os
import signal
import atexit
import json
import time
from pathlib import Path
import subprocess
import argparse
import pprint
from distutils.util import strtobool
children_pid = []
@atexit.register
def kill_child():
for child_pid in children_pid:
os.kill(child_pid, signal.SIGTERM)
cmd_parser = argparse.ArgumentParser(add_help=False)
cmd_parser.add_argument(
"cmd", type=str, choices=["new", "del", "run", "set", "list"], help="Main command",
)
args = cmd_parser.parse_known_args()[0]
PATH_TO_SETTINGS = Path.home() / Path(".tas.json")
def get_default_settings():
return {
"namespaces": [
{
"name": "default",
"path": str(Path.home() / Path("Documents/tas_projects/")),
}
],
"templates": [
{
"name": "default",
"actions": [
{"type": "venv"},
{"type": "dir", "path": "py"},
{"type": "dir", "path": "py/src"},
{"type": "dir", "path": "sql"},
{"type": "dir", "path": "resources"},
{"type": "file", "path": "README.md"},
{"type": "requirements", "packages": ["jupyter"]},
{"type": "file_link", "url": ""}
],
}
],
"projects": [],
}
def load_settings():
if not PATH_TO_SETTINGS.exists():
return get_default_settings()
with open(PATH_TO_SETTINGS) as f:
return json.load(f)
def save_settings():
with open(PATH_TO_SETTINGS, "w+") as f:
json.dump(settings, f, ensure_ascii=False, indent=4)
def lookup_in_list_of_dicts(l, name, return_index=False):
for i, val in enumerate(l):
if val["name"] == name:
return val if not return_index else (i, val)
return None if not return_index else (None, None)
def get_proj(args, should_exist, ns_path):
proj_name = f"{args.namespace}.{args.name}"
proj_path = Path(ns_path) / Path(args.name)
exists = lookup_in_list_of_dicts(settings["projects"], proj_name)
if exists and not should_exist:
raise Exception("Project already exists!")
elif not exists and should_exist:
raise Exception("Project not found!")
return exists if exists else {"name": proj_name, "path": proj_path}
def get_args(cmd):
# TODO: create allowed combinations of args
if cmd == "set":
args_parser = argparse.ArgumentParser(parents=[cmd_parser])
args_parser.add_argument("namespace", type=str, help="Namespace")
args_parser.add_argument("path", type=str, help="PosixPath")
elif cmd == "del":
args_parser = argparse.ArgumentParser(parents=[cmd_parser])
args_parser.add_argument("name", type=str, help="Name of an object")
args_parser.add_argument(
"-namespace", "-ns", type=str, default="default", help="Namespace"
)
args_parser.add_argument("type", type=str, choices=["n", "p"], default="p")
elif cmd == "list":
args_parser = argparse.ArgumentParser(parents=[cmd_parser])
args_parser.add_argument(
"type", type=str, choices=["n", "t", "p", "a"], default="p"
)
elif cmd == "new":
args_parser = argparse.ArgumentParser(parents=[cmd_parser])
args_parser.add_argument("name", type=str, help="Name")
args_parser.add_argument(
"-template", "-t", type=str, default="default", help="Template"
)
args_parser.add_argument(
"-namespace", "-ns", type=str, default="default", help="Namespace"
)
args_parser.add_argument("-path", "-p", type=str, help="PosixPath")
elif cmd == "run":
args_parser = argparse.ArgumentParser(parents=[cmd_parser])
args_parser.add_argument(
"-namespace", "-ns", type=str, default="default", help="Namespace"
)
args_parser.add_argument("name", type=str, help="Project name")
return args_parser.parse_args()
def interactive_y_n(question):
while True:
try:
reply = str(input(question + " (y/n): ")).lower().strip()
return strtobool(reply)
except ValueError as e:
pprint.pprint("Please enter yes or no!")
pass
settings = load_settings()
if __name__ == "__main__":
extra_args = get_args(args.cmd)
if args.cmd == "set":
# TODO: make it interactive?
ns_id, ns = lookup_in_list_of_dicts(
settings["namespaces"], extra_args.namespace, return_index=True
)
if ns_id != None:
settings["namespaces"][ns_id] = {**ns, "path": extra_args.path}
else:
settings["namespaces"].append(
{"name": extra_args.namespace, "path": extra_args.path}
)
save_settings()
elif args.cmd == "del":
# TODO: interactive and delete projects
if extra_args.type == "n":
target = "namespaces"
name = args.name
elif extra_args.type == "p":
target = "projects"
ns = lookup_in_list_of_dicts(settings["namespaces"], extra_args.namespace)
proj = get_proj(extra_args, True, ns["path"])
name = proj["name"]
target_id, ns = lookup_in_list_of_dicts(
settings[target], name, return_index=True
)
if target_id is None:
raise Exception("No such name!")
del settings[target][target_id]
save_settings()
elif args.cmd == "list":
if extra_args.type == "n":
pprint.pprint(settings["namespaces"])
elif extra_args.type == "p":
pprint.pprint(settings["projects"])
elif extra_args.type == "t":
pprint.pprint(settings["templates"])
elif extra_args.type == "a":
pprint.pprint(settings)
elif args.cmd == "new":
ns = lookup_in_list_of_dicts(settings["namespaces"], extra_args.namespace)
proj = get_proj(extra_args, False, ns["path"])
template = lookup_in_list_of_dicts(settings["templates"], extra_args.template)
if proj["path"].exists():
if not interactive_y_n("Path already exists. Should we proceed?"):
exit()
else:
proj["path"].mkdir(parents=True)
for action in template["actions"]:
if action["type"] == "dir":
(proj["path"] / Path(action["path"])).mkdir(
parents=False, exist_ok=True
)
elif action["type"] == "file":
filepath = proj["path"] / Path(action["path"])
filepath.touch()
elif action["type"] == "requirements":
os.chdir(proj["path"])
subprocess.call(
[
"python",
"-m",
"venv",
"--system-site-packages",
str(proj["path"] / Path("env")),
]
)
if action["packages"]:
subprocess.call(
["./env/bin/python", "-m", "pip", "install"]
+ action["packages"]
)
filepath = proj["path"] / Path("requirements.txt")
with filepath.open("w+") as f:
f.write("\n".join(action["packages"]))
settings["projects"].append({"name": proj["name"], "path": str(proj["path"])})
save_settings()
elif args.cmd == "run":
ns = lookup_in_list_of_dicts(settings["namespaces"], extra_args.namespace)
proj = get_proj(extra_args, True, ns["path"])
os.chdir(Path(proj["path"]))
child = subprocess.Popen(
["./env/bin/python", "-m", "jupyter", "notebook", "--log-level=0"]
)
children_pid.append(child.pid)
time.sleep(2)
while not interactive_y_n("Would you like to end?"):
continue
| [
"os.kill",
"distutils.util.strtobool",
"argparse.ArgumentParser",
"pathlib.Path",
"subprocess.Popen",
"pathlib.Path.home",
"time.sleep",
"os.chdir",
"subprocess.call",
"json.load",
"pprint.pprint",
"json.dump"
] | [((320, 359), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'add_help': '(False)'}), '(add_help=False)\n', (343, 359), False, 'import argparse\n'), ((534, 545), 'pathlib.Path.home', 'Path.home', ([], {}), '()\n', (543, 545), False, 'from pathlib import Path\n'), ((548, 565), 'pathlib.Path', 'Path', (['""".tas.json"""'], {}), "('.tas.json')\n", (552, 565), False, 'from pathlib import Path\n'), ((271, 305), 'os.kill', 'os.kill', (['child_pid', 'signal.SIGTERM'], {}), '(child_pid, signal.SIGTERM)\n', (278, 305), False, 'import os\n'), ((1547, 1559), 'json.load', 'json.load', (['f'], {}), '(f)\n', (1556, 1559), False, 'import json\n'), ((1635, 1687), 'json.dump', 'json.dump', (['settings', 'f'], {'ensure_ascii': '(False)', 'indent': '(4)'}), '(settings, f, ensure_ascii=False, indent=4)\n', (1644, 1687), False, 'import json\n'), ((2032, 2045), 'pathlib.Path', 'Path', (['ns_path'], {}), '(ns_path)\n', (2036, 2045), False, 'from pathlib import Path\n'), ((2048, 2063), 'pathlib.Path', 'Path', (['args.name'], {}), '(args.name)\n', (2052, 2063), False, 'from pathlib import Path\n'), ((2489, 2534), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'parents': '[cmd_parser]'}), '(parents=[cmd_parser])\n', (2512, 2534), False, 'import argparse\n'), ((2723, 2768), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'parents': '[cmd_parser]'}), '(parents=[cmd_parser])\n', (2746, 2768), False, 'import argparse\n'), ((4208, 4224), 'distutils.util.strtobool', 'strtobool', (['reply'], {}), '(reply)\n', (4217, 4224), False, 'from distutils.util import strtobool\n'), ((3099, 3144), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'parents': '[cmd_parser]'}), '(parents=[cmd_parser])\n', (3122, 3144), False, 'import argparse\n'), ((4269, 4309), 'pprint.pprint', 'pprint.pprint', (['"""Please enter yes or no!"""'], {}), "('Please enter yes or no!')\n", (4282, 4309), False, 'import pprint\n'), ((3306, 3351), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'parents': '[cmd_parser]'}), '(parents=[cmd_parser])\n', (3329, 3351), False, 'import argparse\n'), ((5631, 5668), 'pprint.pprint', 'pprint.pprint', (["settings['namespaces']"], {}), "(settings['namespaces'])\n", (5644, 5668), False, 'import pprint\n'), ((710, 721), 'pathlib.Path.home', 'Path.home', ([], {}), '()\n', (719, 721), False, 'from pathlib import Path\n'), ((724, 755), 'pathlib.Path', 'Path', (['"""Documents/tas_projects/"""'], {}), "('Documents/tas_projects/')\n", (728, 755), False, 'from pathlib import Path\n'), ((3780, 3825), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'parents': '[cmd_parser]'}), '(parents=[cmd_parser])\n', (3803, 3825), False, 'import argparse\n'), ((5718, 5753), 'pprint.pprint', 'pprint.pprint', (["settings['projects']"], {}), "(settings['projects'])\n", (5731, 5753), False, 'import pprint\n'), ((7813, 7901), 'subprocess.Popen', 'subprocess.Popen', (["['./env/bin/python', '-m', 'jupyter', 'notebook', '--log-level=0']"], {}), "(['./env/bin/python', '-m', 'jupyter', 'notebook',\n '--log-level=0'])\n", (7829, 7901), False, 'import subprocess\n'), ((7967, 7980), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (7977, 7980), False, 'import time\n'), ((5803, 5839), 'pprint.pprint', 'pprint.pprint', (["settings['templates']"], {}), "(settings['templates'])\n", (5816, 5839), False, 'import pprint\n'), ((7777, 7795), 'pathlib.Path', 'Path', (["proj['path']"], {}), "(proj['path'])\n", (7781, 7795), False, 'from pathlib import Path\n'), ((5889, 5912), 'pprint.pprint', 'pprint.pprint', (['settings'], {}), '(settings)\n', (5902, 5912), False, 'import pprint\n'), ((6657, 6677), 'pathlib.Path', 'Path', (["action['path']"], {}), "(action['path'])\n", (6661, 6677), False, 'from pathlib import Path\n'), ((6778, 6800), 'os.chdir', 'os.chdir', (["proj['path']"], {}), "(proj['path'])\n", (6786, 6800), False, 'import os\n'), ((6476, 6496), 'pathlib.Path', 'Path', (["action['path']"], {}), "(action['path'])\n", (6480, 6496), False, 'from pathlib import Path\n'), ((7158, 7245), 'subprocess.call', 'subprocess.call', (["(['./env/bin/python', '-m', 'pip', 'install'] + action['packages'])"], {}), "(['./env/bin/python', '-m', 'pip', 'install'] + action[\n 'packages'])\n", (7173, 7245), False, 'import subprocess\n'), ((7353, 7377), 'pathlib.Path', 'Path', (['"""requirements.txt"""'], {}), "('requirements.txt')\n", (7357, 7377), False, 'from pathlib import Path\n'), ((7045, 7056), 'pathlib.Path', 'Path', (['"""env"""'], {}), "('env')\n", (7049, 7056), False, 'from pathlib import Path\n')] |
# -*- coding: utf-8 -*-
import os
import datetime
import logging
import requests
import numpy
import cv2
import zbar
from Queue import Queue
from threading import Thread
from PIL import Image
logger = logging.getLogger(__name__)
TEMP_DIR = os.path.join(os.getcwd(), 'temp')
def get_temp_dir():
"""Create TEMP_DIR if it doesn't exist"""
if not os.path.exists(TEMP_DIR):
os.mkdir(TEMP_DIR)
return TEMP_DIR
def thumbnail(picture, size=0.50):
"""Thumbnail the picture"""
width, height = picture.size
w, h = int(width * size), int(height * size)
picture.thumbnail((w, h), Image.ANTIALIAS)
return picture
def save_picture(picture, path, filename):
"""Save picture to filesystem, return the path"""
# Unfortunately, StringIO was unsatisfactory
# StringIO size exceeds size of filesystem save. Why??
storage = os.path.join(path, filename)
picture.save(storage, optimize=True, format='JPEG')
return storage
def delete_picture(path):
"""Delete the file, with a try except clause"""
try:
os.remove(path)
# Gee! Thanks Windows
except:
pass
def prepare_msg(qrcode, picture, timestamp):
"""Prepare message to send to server"""
timestamp = datetime.datetime.strftime(timestamp, '%Y%m%d%H%M%S%f')
filename = '{}.jpeg'.format(timestamp)
temp_storage = save_picture(picture, get_temp_dir(), filename)
data = dict(qrcode=qrcode, timestamp=timestamp)
files = {'picture': temp_storage}
return filename, data, files
def server_auth(queue, url, qrcode, picture, timestamp, timeout=5):
"""Send message to server for auth"""
filename, data, files = prepare_msg(qrcode, picture, timestamp)
try:
if logger.getEffectiveLevel() >= logging.INFO:
# Profile the request
start = datetime.datetime.now()
r = requests.post(url, data=data, files=files, timeout=timeout)
if logger.getEffectiveLevel >= logging.INFO:
# Profile the request
end = datetime.datetime.now()
elapsed_time = (end - start).total_seconds()
logger.info('Elapsed time was {} seconds'.format(elapsed_time))
except Exception as e:
response = None
# Did the request timeout?
if isinstance(e, requests.exceptions.Timeout):
response = dict(network_timeout=True)
else:
response = r.json()
finally:
delete_picture(os.path.join(get_temp_dir(), filename))
queue.put(response)
class QRCodeScanner(object):
def __init__(
self,
url=None,
max_responses=2,
timeout=5,
ok_color=(0, 0, 255),
not_ok_color=(255, 0, 0),
box_width=1,
debug=False
):
self.url = url
self.timeout = timeout
self.max_responses
self.thread = None
self.queue = Queue()
# Init zbar.
self.scanner = zbar.ImageScanner()
# Disable all zbar symbols.
self.scanner.set_config(0, zbar.Config.ENABLE, 0)
# Enable QRCodes.
self.scanner.set_config(zbar.Symbol.QRCODE, zbar.Config.ENABLE, 1)
# Highlight scanned QR Codes.
self.ok_color = ok_color
self.not_ok_color = not_ok_color
self.box_width = box_width
self.successes = 0
self.debug = debug
def main(self, frame, timestamp):
"""Main function"""
self.before_zbar(timestamp)
frame, qrcodes = self.zbar(frame)
if len(qrcodes) > 0:
self.auth(frame, qrcodes, timestamp)
frame = self.after_zbar(frame, qrcodes, timestamp)
self.process_results_from_queue(timestamp)
return frame
def auth(self, frame, qrcodes, timestamp):
"""Auth with server"""
if self.url is not None:
qrcode = self.get_next_qrcode(frame, qrcodes)
if qrcode is not None:
if len(self.responses) > self.max_responses:
frame = Image.fromarray(frame)
self.launch_thread(self.url, qrcode, frame, timestamp)
def get_next_qrcode(self, frame, qrcodes):
"""Returns the largest valid QR code, which is neither the
active QR code nor throttled"""
height, width = frame.shape[:2]
frame_size = width * height
target = None
targets = [
dict(
qrcode=qrcode,
size=self.qrcode_size(qrcodes[qrcode])
)
for qrcode in qrcodes
]
targets = sorted(targets, key=lambda k: k['size'])
for target in targets:
qrcode = target['qrcode']
qrcode_size = target['size'] / frame_size
qrcode_size = round(qrcode_size, 4)
if self.debug:
logger.info('QRcode percent of frame: {}%'.format(
qrcode_size
))
# Throttle requests for the same QR code.
if self.active_qrcode != qrcode:
# Throttle requests for cached QR codes.
if not self.is_qrcode_throttled(qrcode):
# Ensure the QR code is valid.
is_valid = self.is_valid_qrcode(qrcode)
if self.debug:
logger.info('QRcode is valid: {}'.format(is_valid))
if is_valid:
if self.max_qrcode_size > 0:
if qrcode_size > self.max_qrcode_size:
self.max_size_exceeded = True
break
if not self.max_size_exceeded:
return qrcode
def is_valid_qrcode(self, qrcode):
"""Intended to be overriden by subclass."""
return True if qrcode is not None else False
def is_qrcode_throttled(self, qrcode):
for throttle in (self.ok_throttle_dict, self.not_ok_throttle_dict):
if qrcode in throttle:
return True
def get_qrcode_size(self, qrcode):
contour = numpy.array(qrcode, dtype=numpy.int32)
return cv2.contourArea(contour)
def before_zbar(self, timestamp):
"""Remove expired QR codes from throttle dict"""
for throttle in (self.ok_throttle_dict, self.not_ok_throttle_dict):
delete = []
for qrcode in throttle:
expired = (throttle[qrcode] <= datetime.datetime.now())
if expired:
delete.append(qrcode)
for qrcode in delete:
del throttle[qrcode]
def zbar(self, frame):
"""Scan frame using ZBar"""
qrcodes = {}
# Convert to grayscale, as binarization requires
gray = cv2.cvtColor(frame, cv2.COLOR_RGB2GRAY)
# Apply Otsu Binarization
_, threshold = cv2.threshold(
gray, 0, 255, cv2.THRESH_BINARY + cv2.THRESH_OTSU
)
try:
# Convert to string, as ZBar requires
pil_image = Image.fromarray(threshold)
width, height = pil_image.size
raw = pil_image.tostring()
except:
logger.error('Error converting to PIL image')
else:
try:
image = zbar.Image(width, height, 'Y800', raw)
except:
logger.error('Error converting to ZBar image')
else:
self.scanner.scan(image)
for qrcode in image:
location = []
for point in qrcode.location:
location.append(point)
qrcodes[qrcode.data] = location
if self.debug:
self.successes += 1
if self.debug:
frame = cv2.cvtColor(threshold, cv2.COLOR_GRAY2RGB)
return frame, qrcodes
def after_zbar(self, frame, qrcodes, timestamp):
"""Intended to be overridden by subclass. Currently, draws boxes
around QR codes"""
frame = self.draw_boxes(qrcodes, frame)
return frame
def draw_box(self, frame, location, color, width):
"""Draw a box around around QR code"""
for index in range(len(location)):
if (index + 1) == len(location):
next_index = 0
else:
next_index = index + 1
# From OpenCV 3.0.0, cv2.LINE_AA was renamed cv2.CV_AA
if cv2.__version__ >= '3.0.0':
cv2.line(
frame,
location[index], location[next_index],
color,
width,
lineType=cv2.LINE_AA
)
else:
cv2.line(
frame,
location[index], location[next_index],
color,
width,
cv2.CV_AA
)
return frame
def is_thread_running(self):
"""Check if the thread is running"""
# Is a thread active?
if self.thread is not None:
if self.thread.is_alive():
return True
def launch_thread(self, url, qrcode, frame, timestamp):
"""Launch a thread to auth against server with requests library"""
try:
self.thread = Thread(
target=server_auth,
args=(
self.queue,
url,
qrcode,
Image.fromarray(frame),
timestamp
)
).start()
except:
logger.error('Thread failed to start')
else:
self.after_thread_started(qrcode, timestamp)
def after_thread_started(self, qrcode, timestamp):
"""Runs after thread is started. Throttles not OK results"""
# Throttle requests
self.not_ok_throttle_dict[qrcode] = (
timestamp + datetime.timedelta(seconds=self.not_ok_throttle)
)
self.active_qrcode = qrcode
logger.info('Sent QRcode to server {}'.format(self.active_qrcode))
def process_results_from_queue(self, timestamp):
"""Throttles OK results. Prepares response for GUI"""
if not self.queue.empty():
# Clear active qrcode
self.active_qrcode = None
response = self.queue.get()
if response is not None:
# Response is OK. Flag the QR code as OK, and throttle it
if 'qrcode' in response:
qrcode = response['qrcode']
ok_throttle = datetime.timedelta(seconds=self.ok_throttle)
self.ok_throttle_dict[qrcode] = timestamp + ok_throttle
self.responses.append(response)
| [
"logging.getLogger",
"requests.post",
"numpy.array",
"zbar.Image",
"datetime.timedelta",
"os.remove",
"os.path.exists",
"cv2.threshold",
"cv2.line",
"cv2.contourArea",
"os.mkdir",
"cv2.cvtColor",
"PIL.Image.fromarray",
"os.path.join",
"os.getcwd",
"zbar.ImageScanner",
"datetime.datet... | [((202, 229), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (219, 229), False, 'import logging\n'), ((254, 265), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (263, 265), False, 'import os\n'), ((865, 893), 'os.path.join', 'os.path.join', (['path', 'filename'], {}), '(path, filename)\n', (877, 893), False, 'import os\n'), ((1240, 1295), 'datetime.datetime.strftime', 'datetime.datetime.strftime', (['timestamp', '"""%Y%m%d%H%M%S%f"""'], {}), "(timestamp, '%Y%m%d%H%M%S%f')\n", (1266, 1295), False, 'import datetime\n'), ((354, 378), 'os.path.exists', 'os.path.exists', (['TEMP_DIR'], {}), '(TEMP_DIR)\n', (368, 378), False, 'import os\n'), ((388, 406), 'os.mkdir', 'os.mkdir', (['TEMP_DIR'], {}), '(TEMP_DIR)\n', (396, 406), False, 'import os\n'), ((1066, 1081), 'os.remove', 'os.remove', (['path'], {}), '(path)\n', (1075, 1081), False, 'import os\n'), ((1863, 1922), 'requests.post', 'requests.post', (['url'], {'data': 'data', 'files': 'files', 'timeout': 'timeout'}), '(url, data=data, files=files, timeout=timeout)\n', (1876, 1922), False, 'import requests\n'), ((2880, 2887), 'Queue.Queue', 'Queue', ([], {}), '()\n', (2885, 2887), False, 'from Queue import Queue\n'), ((2932, 2951), 'zbar.ImageScanner', 'zbar.ImageScanner', ([], {}), '()\n', (2949, 2951), False, 'import zbar\n'), ((6076, 6114), 'numpy.array', 'numpy.array', (['qrcode'], {'dtype': 'numpy.int32'}), '(qrcode, dtype=numpy.int32)\n', (6087, 6114), False, 'import numpy\n'), ((6130, 6154), 'cv2.contourArea', 'cv2.contourArea', (['contour'], {}), '(contour)\n', (6145, 6154), False, 'import cv2\n'), ((6757, 6796), 'cv2.cvtColor', 'cv2.cvtColor', (['frame', 'cv2.COLOR_RGB2GRAY'], {}), '(frame, cv2.COLOR_RGB2GRAY)\n', (6769, 6796), False, 'import cv2\n'), ((6854, 6918), 'cv2.threshold', 'cv2.threshold', (['gray', '(0)', '(255)', '(cv2.THRESH_BINARY + cv2.THRESH_OTSU)'], {}), '(gray, 0, 255, cv2.THRESH_BINARY + cv2.THRESH_OTSU)\n', (6867, 6918), False, 'import cv2\n'), ((1827, 1850), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (1848, 1850), False, 'import datetime\n'), ((2028, 2051), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (2049, 2051), False, 'import datetime\n'), ((7028, 7054), 'PIL.Image.fromarray', 'Image.fromarray', (['threshold'], {}), '(threshold)\n', (7043, 7054), False, 'from PIL import Image\n'), ((7790, 7833), 'cv2.cvtColor', 'cv2.cvtColor', (['threshold', 'cv2.COLOR_GRAY2RGB'], {}), '(threshold, cv2.COLOR_GRAY2RGB)\n', (7802, 7833), False, 'import cv2\n'), ((9968, 10016), 'datetime.timedelta', 'datetime.timedelta', ([], {'seconds': 'self.not_ok_throttle'}), '(seconds=self.not_ok_throttle)\n', (9986, 10016), False, 'import datetime\n'), ((7266, 7304), 'zbar.Image', 'zbar.Image', (['width', 'height', '"""Y800"""', 'raw'], {}), "(width, height, 'Y800', raw)\n", (7276, 7304), False, 'import zbar\n'), ((8492, 8586), 'cv2.line', 'cv2.line', (['frame', 'location[index]', 'location[next_index]', 'color', 'width'], {'lineType': 'cv2.LINE_AA'}), '(frame, location[index], location[next_index], color, width,\n lineType=cv2.LINE_AA)\n', (8500, 8586), False, 'import cv2\n'), ((8735, 8814), 'cv2.line', 'cv2.line', (['frame', 'location[index]', 'location[next_index]', 'color', 'width', 'cv2.CV_AA'], {}), '(frame, location[index], location[next_index], color, width, cv2.CV_AA)\n', (8743, 8814), False, 'import cv2\n'), ((3996, 4018), 'PIL.Image.fromarray', 'Image.fromarray', (['frame'], {}), '(frame)\n', (4011, 4018), False, 'from PIL import Image\n'), ((6434, 6457), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (6455, 6457), False, 'import datetime\n'), ((10635, 10679), 'datetime.timedelta', 'datetime.timedelta', ([], {'seconds': 'self.ok_throttle'}), '(seconds=self.ok_throttle)\n', (10653, 10679), False, 'import datetime\n'), ((9513, 9535), 'PIL.Image.fromarray', 'Image.fromarray', (['frame'], {}), '(frame)\n', (9528, 9535), False, 'from PIL import Image\n')] |
from threading import *
from tkinter import *
from tkinter.filedialog import askopenfilename
import tkinter, tkinter.scrolledtext
import os
import sys
import urllib.request
import glob
import time
import hashlib
import quarantaene
from vta import vtapi
import argparse
os_name = sys.platform
terminations = []
if "win" in os_name:
if not os.path.exists("MultiAV\\Quarantine\\"):
os.makedirs("MultiAV\\Quarantine\\")
quarantine_folder = "MultiAV\\Quarantine\\*"
file_to_quarantine = "MultiAV\\Quarantine\\"
transfer = os. getcwd() + "\\Transfer\\*"
else:
if not os.path.exists("MultiAV/Quarantine/"):
os.makedirs("MultiAV/Quarantine/")
quarantine_folder = "MultiAV/Quarantine/*"
file_to_quarantine = "MultiAV/Quarantine/"
transfer = os. getcwd() + "/Transfer/*"
main = None
update_button = None
details_button = None
scan_button = None
quit_button = None
b_delete = None
b_delete_all = None
b_restore = None
b_restore_all = None
b_add_file = None
text_box = None
li = None
file= None
def quarantine():
global text_box
global terminations
global li
global b_delete
global b_delete_all
global b_restore
global b_restore_all
global b_add_file
k = 0
while True:
tmp = len(li.get(k))
if tmp == 0:
break
else:
li.delete(0, tmp)
k += 1
li.update()
terminations = glob.glob(quarantine_folder)
if terminations == []:
text_box.insert(END, "[ + ] No files in quarantine\n", "positive")
text_box.tag_config('positive', foreground="green")
text_box.see(END)
text_box.update()
else:
text_box.insert(END, "[ + ] Files in quarantine:\n", "positive")
text_box.tag_config('positive', foreground="green")
text_box.see(END)
text_box.update()
for i in terminations:
text_box.insert(END, "[ * ] " + i + "\n", "info")
text_box.tag_config("info", background = "red")
text_box.see(END)
text_box.update()
li.insert(END, i)
li.update()
b_delete_all["command"] =lambda:button_action_handler("delete_all")
b_delete["command"] = lambda:button_action_handler("delete")
b_restore["command"] = lambda:button_action_handler("restore")
b_restore_all["command"] = lambda:button_action_handler("restore_all")
b_add_file["command"] = lambda:button_action_handler("add_file")
def delete(file, ALL):
global li
global text_box
global terminations
if len(terminations) != 0:
if ALL == 1:
for i in range(len(terminations)):
os.remove(terminations[i])
text_box.insert(END, "[ + ] Deletion successful: \n" + terminations[i] + "\n", "positive")
text_box.tag_config("positive", foreground="green")
text_box.see(END)
text_box.update()
li.delete(0, len(terminations[i]))
li.update()
elif ALL == 0:
os.remove(file)
li.delete(ACTIVE, len(file))
li.update()
text_box.insert(END, "[ + ] Deletion successful:\n" + file + "\n", "positive")
text_box.tag_config("positive", foreground="green")
text_box.see(END)
text_box.update()
terminations = glob.glob(quarantine_folder)
for i in terminations:
li.insert(END, i)
li.update()
else:
text_box.insert(END, "[ - ] Unable to locate any files\n", "negative")
text_box.tag_config("negative", foreground="red")
text_box.see(END)
text_box.update()
def restore(file, ALL):
global li
global text_box
global terminations
if len(terminations) != 0:
if ALL == 1:
for i in range(len(terminations)):
quarantaene.decode_base64(terminations[i])
text_box.insert(END, "[ + ] Successfully restored\n" + terminations[i] + "\n", 'positive')
text_box.tag_config('positive', foreground="green")
text_box.see(END)
text_box.update()
li.delete(0, len(terminations[i]))
li.update()
elif ALL == 0:
quarantaene.decode_base64(file)
li.delete(ACTIVE, len(file))
text_box.insert(END, "[ + ] Successfully restored\n" + file + "\n", "positive")
text_box.tag_config("positive", foreground="green")
text_box.see(END)
text_box.update()
terminations = glob.glob(quarantine_folder)
for i in terminations:
li.insert(END, i)
li.update()
else:
text_box.insert(END, "[ - ] Unable to locate any files\n", "negative")
text_box.tag_config("negative", foreground="red")
text_box.see(END)
text_box.update()
def add_file_to_quarantine():
global li
global terminations
file = askopenfilename()
quarantaene.encode_base64(file, file_to_quarantine)
text_box.insert(END, "[ + ] Moved to quarantine:\n" + file + "\n", "positive")
text_box.tag_config("positive", foreground="green")
text_box.see(END)
text_box.update()
li.update()
k = 0
while True:
tmp = len(li.get(k))
if tmp == 0:
break
else:
li.delete(0, tmp)
k += 1
li.update()
terminations = glob.glob(quarantine_folder)
for i in terminations:
li.insert(END, i)
li.update()
def parse_options():
parser = argparse.ArgumentParser()
parser.add_argument("-F", "--results-file", dest="sfile",
help="Get report of previously scanned file. If the "
"given filename cannot be found/opened, we'll assume "
"it's a hash.")
parser.add_argument("-f", "--file", dest="file",
help="Scan file")
parser.add_argument("-v", "--verbose", default=False, action="store_true",
dest="verbose", help="Print complete reply")
return parser.parse_args()
def automatic_scan(path):
global text_box
global md5hash
match = False
file = path
start = time.time()
text_box.insert(END, "[ * ] Scanning " + file + "\n")
text_box.see(END)
text_box.update()
arg = parse_options()
arg.file=file
vt = vtapi(arg.verbose)
vt.sendfile(arg.file)
try:
f = open(file, "rb")
content = f.read()
f.close()
content = create_md5(content)
md5hash=content.decode("utf-8")
text_box.insert(END, "MD5-Hash: " + md5hash + "\n")
text_box.see(END)
text_box.update()
except MemoryError:
text_box.insert(END, "[ - ] Unable to create MD5-Hash:\n----->MemoryError!\n", 'negative')
text_box.insert(END, "[ ! ] Only select files under 1 GB\n", "negative")
text_box.tag_config('negative', foreground="red")
text_box.see(END)
text_box.update()
return None
except Exception as e:
text_box.insert(END, "[ ! ] Unable to handle problem\n[ ! ] Try again/file might be corrupted\n", "negative")
text_box.tag_config('negative', foreground="red")
text_box.see(END)
text_box.update()
return None
while(True):
scan_result=vt.print_scan_results(vt.results("file", md5hash))
if(scan_result!=0):
un=scan_result.count("Clean")
line_count=scan_result.count("\n")-2
percent=100-((un/line_count)*100)
if(percent!=0):
match=True
break
text_box.insert(END, "[ * ] Scan duration: {0}\n".format(round(time.time()-start, 2)))
text_box.see(END)
text_box.update()
if (match==True):
quarantaene.encode_base64(file, file_to_quarantine)
text_box.insert(END, "[ ! ] Threat found: {0}%\n[ ! ] File was moved into quarantine\n".format(percent), "important")
text_box.tag_config("important", foreground="red")
text_box.see(END)
text_box.update()
else:
text_box.insert(END, "[ + ] No threat was found\n", "positive")
text_box.tag_config("positive", foreground="green")
text_box.see(END)
text_box.update()
def scan():
global text_box
global md5hash
match = False
file = askopenfilename()
start = time.time()
text_box.insert(END, "[ * ] Scanning " + file + "\n")
text_box.see(END)
text_box.update()
arg = parse_options()
arg.file=file
vt = vtapi(arg.verbose)
vt.sendfile(arg.file)
try:
f = open(file, "rb")
content = f.read()
f.close()
content = create_md5(content)
md5hash=content.decode("utf-8")
text_box.insert(END, "MD5-Hash: " + md5hash + "\n")
text_box.see(END)
text_box.update()
except MemoryError:
text_box.insert(END, "[ - ] Unable to create MD5-Hash:\n----->MemoryError!\n", 'negative')
text_box.insert(END, "[ ! ] Only select files under 1 GB\n", "negative")
text_box.tag_config('negative', foreground="red")
text_box.see(END)
text_box.update()
return None
except Exception as e:
text_box.insert(END, "[ ! ] Unable to handle problem\n[ ! ] Try again/file might be corrupted\n", "negative")
text_box.tag_config('negative', foreground="red")
text_box.see(END)
text_box.update()
return None
while(True):
scan_result=vt.print_scan_results(vt.results("file", md5hash))
if(scan_result!=0):
un=scan_result.count("Clean")
line_count=scan_result.count("\n")-2
percent=100-((un/line_count)*100)
if(percent!=0):
match=True
break
text_box.insert(END, "[ * ] Scan duration: {0}\n".format(round(time.time()-start, 2)))
text_box.see(END)
text_box.update()
if (match==True):
quarantaene.encode_base64(file, file_to_quarantine)
text_box.insert(END, "[ ! ] Threat found: {0}%\n[ ! ] File was moved into quarantine\n".format(percent), "important")
text_box.tag_config("important", foreground="red")
text_box.see(END)
text_box.update()
else:
text_box.insert(END, "[ + ] No threat was found\n", "positive")
text_box.tag_config("positive", foreground="green")
text_box.see(END)
text_box.update()
def create_md5(content):
md = hashlib.md5()
md.update(content)
return bytes(md.hexdigest(), "utf-8")
def detailedReport():
global text_box
global md5hash
arg = parse_options()
arg.sfile=md5hash
vt = vtapi(arg.verbose)
scan_result1=vt.print_scan_results(vt.results("file", md5hash))
text_box.insert(END, scan_result1)
text_box.see(END)
text_box.update()
def update():
global text_box
def closing():
main.destroy()
sys.exit()
def button_action_handler(s):
global text_box
global b_delete
global b_delete_all
global b_restore
global b_restore_all
global b_add_file
global li
if s == "delete":
tb = Thread(target=delete, args=(li.get(ACTIVE),0))
tb.start()
if s == "delete_all":
tb = Thread(target=delete, args=(0,1))
tb.start()
if s == "restore":
tb = Thread(target=restore, args=(li.get(ACTIVE),0))
tb.start()
if s == "restore_all":
tb = Thread(target=restore, args=(0,1))
tb.start()
if s == "add_file":
tb = Thread(target=add_file_to_quarantine)
tb.start()
if s == "details_button":
tb = Thread(target=detailedReport)
tb.start()
if s == "scan_button":
tb = Thread(target=scan)
tb.start()
if s == "update_button":
tb = Thread(target=update)
tb.start()
if s == "details_button":
tb = Thread(target=detailedReport)
tb.start()
if s == "quarantine_button":
if li.winfo_viewable() == 0:
b_delete.place(x = 605, y = 109)
b_delete_all.place(x = 605, y = 134)
b_restore.place(x = 605, y = 159)
b_restore_all.place(x = 605, y = 184)
b_add_file.place(x = 605, y = 209)
li.place(x = 605, y = 0)
tb = Thread(target=quarantine)
tb.start()
if li.winfo_viewable() == 1:
b_delete.place_forget()
b_delete_all.place_forget()
b_restore.place_forget()
b_restore_all.place_forget()
b_add_file.place_forget()
li.place_forget()
if s == "quit_button":
tb = Thread(target=closing)
tb.start()
def gui_thread():
global main
global update_button
global details_button
global scan_button
global url_scan_button
global url_scan_button
global quit_button
global text_box
global li
global b_delete
global b_delete_all
global b_restore
global b_restore_all
global b_add_file
main = tkinter.Tk()
main.title("MultiAV")
main.wm_iconbitmap("")
main.geometry("800x240")
main.resizable(False, False)
hoehe = 2
breite = 16
scan_button = tkinter.Button(main,text = "Scan", command=lambda:button_action_handler("scan_button"), height = hoehe, width = breite)
scan_button.grid(row = 0, column = 0)
details_button = tkinter.Button(main,text = "Detailed Result", command=lambda:button_action_handler("details_button"), height = hoehe, width = breite)
details_button.grid(row = 1, column = 0)
update_button = tkinter.Button(main,text = "Update", command=lambda:button_action_handler("update_button"), height = hoehe, width = breite)
update_button.grid(row = 2, column = 0)
quarantine_button = tkinter.Button(main,text = "Quarantine", command=lambda:button_action_handler("quarantine_button"), height = hoehe, width = breite)
quarantine_button.grid(row = 3, column = 0)
quit_button = tkinter.Button(main,text = "Close", command=lambda:button_action_handler("quit_button"), height = hoehe, width = breite)
quit_button.grid(row = 4, column = 0, sticky="w")
b_delete = tkinter.Button(main,text = "Remove current", height=0, width = 21, justify=CENTER)
b_delete_all = tkinter.Button(main,text = "Remove all", height = 0, width = 21, justify=CENTER)
b_restore = tkinter.Button(main,text = "Restore current", height=0, width = 21, justify=CENTER)
b_restore_all = tkinter.Button(main,text = "Restore all", height = 0, width = 21, justify=CENTER)
b_add_file = tkinter.Button(main,text = "Add file", height = 0, width = 21, justify=CENTER)
b_delete.place(x = 605, y = 109)
b_delete_all.place(x = 605, y = 134)
b_restore.place(x = 605, y = 159)
b_restore_all.place(x = 605, y = 184)
b_add_file.place(x = 605, y = 209)
b_delete.place_forget()
b_delete_all.place_forget()
b_restore.place_forget()
b_restore_all.place_forget()
b_add_file.place_forget()
text_box = tkinter.scrolledtext.ScrolledText(main)
text_box.place(height = 240, width = 454,x = 153, y = 0)
li = tkinter.Listbox(main, height=6, width = 24)
li.place(x = 605, y = 0)
li.place_forget()
text_box.insert(END, "Your System is Protected\n", "VIP")
text_box.tag_config("VIP", background='yellow')
text_box.insert(END, "[ + ] Preparing the program\n", 'positive')
text_box.tag_config('positive', foreground='green')
text_box.see(END)
text_box.update()
list_of_files = glob.glob(transfer)
if(len(list_of_files)>0):
latest_file = max(list_of_files, key=os.path.getctime)
automatic_scan(latest_file)
main.mainloop()
t_main = Thread(target=gui_thread)
t_main.start()
| [
"os.path.exists",
"quarantaene.encode_base64",
"argparse.ArgumentParser",
"hashlib.md5",
"os.makedirs",
"vta.vtapi",
"tkinter.Button",
"os.getcwd",
"quarantaene.decode_base64",
"tkinter.scrolledtext.ScrolledText",
"tkinter.Tk",
"glob.glob",
"sys.exit",
"time.time",
"tkinter.Listbox",
"... | [((1415, 1443), 'glob.glob', 'glob.glob', (['quarantine_folder'], {}), '(quarantine_folder)\n', (1424, 1443), False, 'import glob\n'), ((4976, 4993), 'tkinter.filedialog.askopenfilename', 'askopenfilename', ([], {}), '()\n', (4991, 4993), False, 'from tkinter.filedialog import askopenfilename\n'), ((4998, 5049), 'quarantaene.encode_base64', 'quarantaene.encode_base64', (['file', 'file_to_quarantine'], {}), '(file, file_to_quarantine)\n', (5023, 5049), False, 'import quarantaene\n'), ((5441, 5469), 'glob.glob', 'glob.glob', (['quarantine_folder'], {}), '(quarantine_folder)\n', (5450, 5469), False, 'import glob\n'), ((5578, 5603), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (5601, 5603), False, 'import argparse\n'), ((6249, 6260), 'time.time', 'time.time', ([], {}), '()\n', (6258, 6260), False, 'import time\n'), ((6416, 6434), 'vta.vtapi', 'vtapi', (['arg.verbose'], {}), '(arg.verbose)\n', (6421, 6434), False, 'from vta import vtapi\n'), ((8402, 8419), 'tkinter.filedialog.askopenfilename', 'askopenfilename', ([], {}), '()\n', (8417, 8419), False, 'from tkinter.filedialog import askopenfilename\n'), ((8432, 8443), 'time.time', 'time.time', ([], {}), '()\n', (8441, 8443), False, 'import time\n'), ((8599, 8617), 'vta.vtapi', 'vtapi', (['arg.verbose'], {}), '(arg.verbose)\n', (8604, 8617), False, 'from vta import vtapi\n'), ((10539, 10552), 'hashlib.md5', 'hashlib.md5', ([], {}), '()\n', (10550, 10552), False, 'import hashlib\n'), ((10738, 10756), 'vta.vtapi', 'vtapi', (['arg.verbose'], {}), '(arg.verbose)\n', (10743, 10756), False, 'from vta import vtapi\n'), ((10982, 10992), 'sys.exit', 'sys.exit', ([], {}), '()\n', (10990, 10992), False, 'import sys\n'), ((13097, 13109), 'tkinter.Tk', 'tkinter.Tk', ([], {}), '()\n', (13107, 13109), False, 'import tkinter, tkinter.scrolledtext\n'), ((14236, 14315), 'tkinter.Button', 'tkinter.Button', (['main'], {'text': '"""Remove current"""', 'height': '(0)', 'width': '(21)', 'justify': 'CENTER'}), "(main, text='Remove current', height=0, width=21, justify=CENTER)\n", (14250, 14315), False, 'import tkinter, tkinter.scrolledtext\n'), ((14338, 14413), 'tkinter.Button', 'tkinter.Button', (['main'], {'text': '"""Remove all"""', 'height': '(0)', 'width': '(21)', 'justify': 'CENTER'}), "(main, text='Remove all', height=0, width=21, justify=CENTER)\n", (14352, 14413), False, 'import tkinter, tkinter.scrolledtext\n'), ((14435, 14520), 'tkinter.Button', 'tkinter.Button', (['main'], {'text': '"""Restore current"""', 'height': '(0)', 'width': '(21)', 'justify': 'CENTER'}), "(main, text='Restore current', height=0, width=21, justify=CENTER\n )\n", (14449, 14520), False, 'import tkinter, tkinter.scrolledtext\n'), ((14539, 14615), 'tkinter.Button', 'tkinter.Button', (['main'], {'text': '"""Restore all"""', 'height': '(0)', 'width': '(21)', 'justify': 'CENTER'}), "(main, text='Restore all', height=0, width=21, justify=CENTER)\n", (14553, 14615), False, 'import tkinter, tkinter.scrolledtext\n'), ((14638, 14711), 'tkinter.Button', 'tkinter.Button', (['main'], {'text': '"""Add file"""', 'height': '(0)', 'width': '(21)', 'justify': 'CENTER'}), "(main, text='Add file', height=0, width=21, justify=CENTER)\n", (14652, 14711), False, 'import tkinter, tkinter.scrolledtext\n'), ((15082, 15121), 'tkinter.scrolledtext.ScrolledText', 'tkinter.scrolledtext.ScrolledText', (['main'], {}), '(main)\n', (15115, 15121), False, 'import tkinter, tkinter.scrolledtext\n'), ((15193, 15234), 'tkinter.Listbox', 'tkinter.Listbox', (['main'], {'height': '(6)', 'width': '(24)'}), '(main, height=6, width=24)\n', (15208, 15234), False, 'import tkinter, tkinter.scrolledtext\n'), ((15594, 15613), 'glob.glob', 'glob.glob', (['transfer'], {}), '(transfer)\n', (15603, 15613), False, 'import glob\n'), ((344, 383), 'os.path.exists', 'os.path.exists', (['"""MultiAV\\\\Quarantine\\\\"""'], {}), "('MultiAV\\\\Quarantine\\\\')\n", (358, 383), False, 'import os\n'), ((393, 429), 'os.makedirs', 'os.makedirs', (['"""MultiAV\\\\Quarantine\\\\"""'], {}), "('MultiAV\\\\Quarantine\\\\')\n", (404, 429), False, 'import os\n'), ((543, 554), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (552, 554), False, 'import os\n'), ((591, 628), 'os.path.exists', 'os.path.exists', (['"""MultiAV/Quarantine/"""'], {}), "('MultiAV/Quarantine/')\n", (605, 628), False, 'import os\n'), ((638, 672), 'os.makedirs', 'os.makedirs', (['"""MultiAV/Quarantine/"""'], {}), "('MultiAV/Quarantine/')\n", (649, 672), False, 'import os\n'), ((782, 793), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (791, 793), False, 'import os\n'), ((3368, 3396), 'glob.glob', 'glob.glob', (['quarantine_folder'], {}), '(quarantine_folder)\n', (3377, 3396), False, 'import glob\n'), ((4587, 4615), 'glob.glob', 'glob.glob', (['quarantine_folder'], {}), '(quarantine_folder)\n', (4596, 4615), False, 'import glob\n'), ((7838, 7889), 'quarantaene.encode_base64', 'quarantaene.encode_base64', (['file', 'file_to_quarantine'], {}), '(file, file_to_quarantine)\n', (7863, 7889), False, 'import quarantaene\n'), ((10021, 10072), 'quarantaene.encode_base64', 'quarantaene.encode_base64', (['file', 'file_to_quarantine'], {}), '(file, file_to_quarantine)\n', (10046, 10072), False, 'import quarantaene\n'), ((2665, 2691), 'os.remove', 'os.remove', (['terminations[i]'], {}), '(terminations[i])\n', (2674, 2691), False, 'import os\n'), ((3049, 3064), 'os.remove', 'os.remove', (['file'], {}), '(file)\n', (3058, 3064), False, 'import os\n'), ((3875, 3917), 'quarantaene.decode_base64', 'quarantaene.decode_base64', (['terminations[i]'], {}), '(terminations[i])\n', (3900, 3917), False, 'import quarantaene\n'), ((4275, 4306), 'quarantaene.decode_base64', 'quarantaene.decode_base64', (['file'], {}), '(file)\n', (4300, 4306), False, 'import quarantaene\n'), ((7740, 7751), 'time.time', 'time.time', ([], {}), '()\n', (7749, 7751), False, 'import time\n'), ((9923, 9934), 'time.time', 'time.time', ([], {}), '()\n', (9932, 9934), False, 'import time\n')] |
import tensorflow as tf
import numpy as np
import os
import time
from utils import random_batch, normalize, similarity, loss_cal, optim
from configuration import get_config
from tensorflow.contrib import rnn
config = get_config()
def train(path):
tf.reset_default_graph() # reset graph
# draw graph
batch = tf.placeholder(shape= [None, config.N*config.M, 40], dtype=tf.float32) # input batch (time x batch x n_mel)
lr = tf.placeholder(dtype= tf.float32) # learning rate
global_step = tf.Variable(0, name='global_step', trainable=False)
w = tf.get_variable("w", initializer= np.array([10], dtype=np.float32))
b = tf.get_variable("b", initializer= np.array([-5], dtype=np.float32))
# embedding lstm (3-layer default)
with tf.variable_scope("lstm"):
lstm_cells = [tf.contrib.rnn.LSTMCell(num_units=config.hidden, num_proj=config.proj) for i in range(config.num_layer)]
lstm = tf.contrib.rnn.MultiRNNCell(lstm_cells) # define lstm op and variables
outputs, _ = tf.nn.dynamic_rnn(cell=lstm, inputs=batch, dtype=tf.float32, time_major=True) # for TI-VS must use dynamic rnn
embedded = outputs[-1] # the last ouput is the embedded d-vector
embedded = normalize(embedded) # normalize
print("embedded size: ", embedded.shape)
# loss
sim_matrix = similarity(embedded, w, b)
print("similarity matrix size: ", sim_matrix.shape)
loss = loss_cal(sim_matrix, type=config.loss)
# optimizer operation
trainable_vars= tf.trainable_variables() # get variable list
optimizer= optim(lr) # get optimizer (type is determined by configuration)
grads, vars= zip(*optimizer.compute_gradients(loss)) # compute gradients of variables with respect to loss
grads_clip, _ = tf.clip_by_global_norm(grads, 3.0) # l2 norm clipping by 3
grads_rescale= [0.01*grad for grad in grads_clip[:2]] + grads_clip[2:] # smaller gradient scale for w, b
train_op= optimizer.apply_gradients(zip(grads_rescale, vars), global_step= global_step) # gradient update operation
# check variables memory
variable_count = np.sum(np.array([np.prod(np.array(v.get_shape().as_list())) for v in trainable_vars]))
print("total variables :", variable_count)
# record loss
loss_summary = tf.summary.scalar("loss", loss)
merged = tf.summary.merge_all()
saver = tf.train.Saver()
# training session
with tf.Session() as sess:
tf.global_variables_initializer().run()
os.makedirs(os.path.join(path, "Check_Point"), exist_ok=True) # make folder to save model
os.makedirs(os.path.join(path, "logs"), exist_ok=True) # make folder to save log
writer = tf.summary.FileWriter(os.path.join(path, "logs"), sess.graph)
epoch = 0
lr_factor = 1 # lr decay factor ( 1/2 per 10000 iteration)
loss_acc = 0 # accumulated loss ( for running average of loss)
for iter in range(config.iteration):
# run forward and backward propagation and update parameters
_, loss_cur, summary = sess.run([train_op, loss, merged],
feed_dict={batch: random_batch(), lr: config.lr*lr_factor})
loss_acc += loss_cur # accumulated loss for each 100 iteration
if iter % 10 == 0:
writer.add_summary(summary, iter) # write at tensorboard
if (iter+1) % 100 == 0:
print("(iter : %d) loss: %.4f" % ((iter+1),loss_acc/100))
loss_acc = 0 # reset accumulated loss
if (iter+1) % 10000 == 0:
lr_factor /= 2 # lr decay
print("learning rate is decayed! current lr : ", config.lr*lr_factor)
if (iter+1) % 10000 == 0:
saver.save(sess, os.path.join(path, "./Check_Point/model.ckpt"), global_step=iter//10000)
print("model is saved!")
# Test Session
def test(path):
tf.reset_default_graph()
# draw graph
enroll = tf.placeholder(shape=[None, config.N*config.M, 40], dtype=tf.float32) # enrollment batch (time x batch x n_mel)
verif = tf.placeholder(shape=[None, config.N*config.M, 40], dtype=tf.float32) # verification batch (time x batch x n_mel)
batch = tf.concat([enroll, verif], axis=1)
# embedding lstm (3-layer default)
with tf.variable_scope("lstm"):
lstm_cells = [tf.contrib.rnn.LSTMCell(num_units=config.hidden, num_proj=config.proj) for i in range(config.num_layer)]
lstm = tf.contrib.rnn.MultiRNNCell(lstm_cells) # make lstm op and variables
outputs, _ = tf.nn.dynamic_rnn(cell=lstm, inputs=batch, dtype=tf.float32, time_major=True) # for TI-VS must use dynamic rnn
embedded = outputs[-1] # the last ouput is the embedded d-vector
embedded = normalize(embedded) # normalize
print("embedded size: ", embedded.shape)
# enrollment embedded vectors (speaker model)
enroll_embed = normalize(tf.reduce_mean(tf.reshape(embedded[:config.N*config.M, :], shape= [config.N, config.M, -1]), axis=1))
# verification embedded vectors
verif_embed = embedded[config.N*config.M:, :]
similarity_matrix = similarity(embedded=verif_embed, w=1., b=0., center=enroll_embed)
saver = tf.train.Saver(var_list=tf.global_variables())
with tf.Session() as sess:
tf.global_variables_initializer().run()
# load model
print("model path :", path)
ckpt = tf.train.get_checkpoint_state(checkpoint_dir=os.path.join(path, "Check_Point"))
ckpt_list = ckpt.all_model_checkpoint_paths
loaded = 0
for model in ckpt_list:
if config.model_num == int(model.split('-')[-1]): # find ckpt file which matches configuration model number
print("ckpt file is loaded !", model)
loaded = 1
saver.restore(sess, model) # restore variables from selected ckpt file
break
if loaded == 0:
raise AssertionError("ckpt file does not exist! Check config.model_num or config.model_path.")
print("test file path : ", config.test_path)
# return similarity matrix after enrollment and verification
time1 = time.time() # for check inference time
if config.tdsv:
S = sess.run(similarity_matrix, feed_dict={enroll:random_batch(shuffle=False, noise_filenum=1),
verif:random_batch(shuffle=False, noise_filenum=2)})
else:
S = sess.run(similarity_matrix, feed_dict={enroll:random_batch(shuffle=False),
verif:random_batch(shuffle=False, utter_start=config.M)})
S = S.reshape([config.N, config.M, -1])
time2 = time.time()
np.set_printoptions(precision=2)
print("inference time for %d utterences : %0.2fs"%(2*config.M*config.N, time2-time1))
print(S) # print similarity matrix
# calculating EER
diff = 1; EER=0; EER_thres = 0; EER_FAR=0; EER_FRR=0
# through thresholds calculate false acceptance ratio (FAR) and false reject ratio (FRR)
for thres in [0.01*i+0.5 for i in range(50)]:
S_thres = S>thres
# False acceptance ratio = false acceptance / mismatched population (enroll speaker != verification speaker)
FAR = sum([np.sum(S_thres[i])-np.sum(S_thres[i,:,i]) for i in range(config.N)])/(config.N-1)/config.M/config.N
# False reject ratio = false reject / matched population (enroll speaker = verification speaker)
FRR = sum([config.M-np.sum(S_thres[i][:,i]) for i in range(config.N)])/config.M/config.N
# Save threshold when FAR = FRR (=EER)
if diff> abs(FAR-FRR):
diff = abs(FAR-FRR)
EER = (FAR+FRR)/2
EER_thres = thres
EER_FAR = FAR
EER_FRR = FRR
print("\nEER : %0.2f (thres:%0.2f, FAR:%0.2f, FRR:%0.2f)"%(EER,EER_thres,EER_FAR,EER_FRR))
| [
"utils.random_batch",
"numpy.array",
"tensorflow.contrib.rnn.LSTMCell",
"tensorflow.clip_by_global_norm",
"tensorflow.placeholder",
"tensorflow.Session",
"tensorflow.nn.dynamic_rnn",
"tensorflow.concat",
"utils.similarity",
"tensorflow.summary.scalar",
"tensorflow.trainable_variables",
"tensor... | [((226, 238), 'configuration.get_config', 'get_config', ([], {}), '()\n', (236, 238), False, 'from configuration import get_config\n'), ((266, 290), 'tensorflow.reset_default_graph', 'tf.reset_default_graph', ([], {}), '()\n', (288, 290), True, 'import tensorflow as tf\n'), ((341, 412), 'tensorflow.placeholder', 'tf.placeholder', ([], {'shape': '[None, config.N * config.M, 40]', 'dtype': 'tf.float32'}), '(shape=[None, config.N * config.M, 40], dtype=tf.float32)\n', (355, 412), True, 'import tensorflow as tf\n'), ((460, 492), 'tensorflow.placeholder', 'tf.placeholder', ([], {'dtype': 'tf.float32'}), '(dtype=tf.float32)\n', (474, 492), True, 'import tensorflow as tf\n'), ((530, 581), 'tensorflow.Variable', 'tf.Variable', (['(0)'], {'name': '"""global_step"""', 'trainable': '(False)'}), "(0, name='global_step', trainable=False)\n", (541, 581), True, 'import tensorflow as tf\n'), ((1418, 1444), 'utils.similarity', 'similarity', (['embedded', 'w', 'b'], {}), '(embedded, w, b)\n', (1428, 1444), False, 'from utils import random_batch, normalize, similarity, loss_cal, optim\n'), ((1514, 1552), 'utils.loss_cal', 'loss_cal', (['sim_matrix'], {'type': 'config.loss'}), '(sim_matrix, type=config.loss)\n', (1522, 1552), False, 'from utils import random_batch, normalize, similarity, loss_cal, optim\n'), ((1603, 1627), 'tensorflow.trainable_variables', 'tf.trainable_variables', ([], {}), '()\n', (1625, 1627), True, 'import tensorflow as tf\n'), ((1679, 1688), 'utils.optim', 'optim', (['lr'], {}), '(lr)\n', (1684, 1688), False, 'from utils import random_batch, normalize, similarity, loss_cal, optim\n'), ((1914, 1948), 'tensorflow.clip_by_global_norm', 'tf.clip_by_global_norm', (['grads', '(3.0)'], {}), '(grads, 3.0)\n', (1936, 1948), True, 'import tensorflow as tf\n'), ((2443, 2474), 'tensorflow.summary.scalar', 'tf.summary.scalar', (['"""loss"""', 'loss'], {}), "('loss', loss)\n", (2460, 2474), True, 'import tensorflow as tf\n'), ((2489, 2511), 'tensorflow.summary.merge_all', 'tf.summary.merge_all', ([], {}), '()\n', (2509, 2511), True, 'import tensorflow as tf\n'), ((2525, 2541), 'tensorflow.train.Saver', 'tf.train.Saver', ([], {}), '()\n', (2539, 2541), True, 'import tensorflow as tf\n'), ((4181, 4205), 'tensorflow.reset_default_graph', 'tf.reset_default_graph', ([], {}), '()\n', (4203, 4205), True, 'import tensorflow as tf\n'), ((4240, 4311), 'tensorflow.placeholder', 'tf.placeholder', ([], {'shape': '[None, config.N * config.M, 40]', 'dtype': 'tf.float32'}), '(shape=[None, config.N * config.M, 40], dtype=tf.float32)\n', (4254, 4311), True, 'import tensorflow as tf\n'), ((4365, 4436), 'tensorflow.placeholder', 'tf.placeholder', ([], {'shape': '[None, config.N * config.M, 40]', 'dtype': 'tf.float32'}), '(shape=[None, config.N * config.M, 40], dtype=tf.float32)\n', (4379, 4436), True, 'import tensorflow as tf\n'), ((4493, 4527), 'tensorflow.concat', 'tf.concat', (['[enroll, verif]'], {'axis': '(1)'}), '([enroll, verif], axis=1)\n', (4502, 4527), True, 'import tensorflow as tf\n'), ((5478, 5545), 'utils.similarity', 'similarity', ([], {'embedded': 'verif_embed', 'w': '(1.0)', 'b': '(0.0)', 'center': 'enroll_embed'}), '(embedded=verif_embed, w=1.0, b=0.0, center=enroll_embed)\n', (5488, 5545), False, 'from utils import random_batch, normalize, similarity, loss_cal, optim\n'), ((788, 813), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""lstm"""'], {}), "('lstm')\n", (805, 813), True, 'import tensorflow as tf\n'), ((959, 998), 'tensorflow.contrib.rnn.MultiRNNCell', 'tf.contrib.rnn.MultiRNNCell', (['lstm_cells'], {}), '(lstm_cells)\n', (986, 998), True, 'import tensorflow as tf\n'), ((1055, 1132), 'tensorflow.nn.dynamic_rnn', 'tf.nn.dynamic_rnn', ([], {'cell': 'lstm', 'inputs': 'batch', 'dtype': 'tf.float32', 'time_major': '(True)'}), '(cell=lstm, inputs=batch, dtype=tf.float32, time_major=True)\n', (1072, 1132), True, 'import tensorflow as tf\n'), ((1289, 1308), 'utils.normalize', 'normalize', (['embedded'], {}), '(embedded)\n', (1298, 1308), False, 'from utils import random_batch, normalize, similarity, loss_cal, optim\n'), ((2578, 2590), 'tensorflow.Session', 'tf.Session', ([], {}), '()\n', (2588, 2590), True, 'import tensorflow as tf\n'), ((4580, 4605), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""lstm"""'], {}), "('lstm')\n", (4597, 4605), True, 'import tensorflow as tf\n'), ((4751, 4790), 'tensorflow.contrib.rnn.MultiRNNCell', 'tf.contrib.rnn.MultiRNNCell', (['lstm_cells'], {}), '(lstm_cells)\n', (4778, 4790), True, 'import tensorflow as tf\n'), ((4845, 4922), 'tensorflow.nn.dynamic_rnn', 'tf.nn.dynamic_rnn', ([], {'cell': 'lstm', 'inputs': 'batch', 'dtype': 'tf.float32', 'time_major': '(True)'}), '(cell=lstm, inputs=batch, dtype=tf.float32, time_major=True)\n', (4862, 4922), True, 'import tensorflow as tf\n'), ((5079, 5098), 'utils.normalize', 'normalize', (['embedded'], {}), '(embedded)\n', (5088, 5098), False, 'from utils import random_batch, normalize, similarity, loss_cal, optim\n'), ((5616, 5628), 'tensorflow.Session', 'tf.Session', ([], {}), '()\n', (5626, 5628), True, 'import tensorflow as tf\n'), ((6549, 6560), 'time.time', 'time.time', ([], {}), '()\n', (6558, 6560), False, 'import time\n'), ((7118, 7129), 'time.time', 'time.time', ([], {}), '()\n', (7127, 7129), False, 'import time\n'), ((7141, 7173), 'numpy.set_printoptions', 'np.set_printoptions', ([], {'precision': '(2)'}), '(precision=2)\n', (7160, 7173), True, 'import numpy as np\n'), ((625, 657), 'numpy.array', 'np.array', (['[10]'], {'dtype': 'np.float32'}), '([10], dtype=np.float32)\n', (633, 657), True, 'import numpy as np\n'), ((702, 734), 'numpy.array', 'np.array', (['[-5]'], {'dtype': 'np.float32'}), '([-5], dtype=np.float32)\n', (710, 734), True, 'import numpy as np\n'), ((838, 908), 'tensorflow.contrib.rnn.LSTMCell', 'tf.contrib.rnn.LSTMCell', ([], {'num_units': 'config.hidden', 'num_proj': 'config.proj'}), '(num_units=config.hidden, num_proj=config.proj)\n', (861, 908), True, 'import tensorflow as tf\n'), ((2670, 2703), 'os.path.join', 'os.path.join', (['path', '"""Check_Point"""'], {}), "(path, 'Check_Point')\n", (2682, 2703), False, 'import os\n'), ((2770, 2796), 'os.path.join', 'os.path.join', (['path', '"""logs"""'], {}), "(path, 'logs')\n", (2782, 2796), False, 'import os\n'), ((2888, 2914), 'os.path.join', 'os.path.join', (['path', '"""logs"""'], {}), "(path, 'logs')\n", (2900, 2914), False, 'import os\n'), ((4630, 4700), 'tensorflow.contrib.rnn.LSTMCell', 'tf.contrib.rnn.LSTMCell', ([], {'num_units': 'config.hidden', 'num_proj': 'config.proj'}), '(num_units=config.hidden, num_proj=config.proj)\n', (4653, 4700), True, 'import tensorflow as tf\n'), ((5276, 5353), 'tensorflow.reshape', 'tf.reshape', (['embedded[:config.N * config.M, :]'], {'shape': '[config.N, config.M, -1]'}), '(embedded[:config.N * config.M, :], shape=[config.N, config.M, -1])\n', (5286, 5353), True, 'import tensorflow as tf\n'), ((5583, 5604), 'tensorflow.global_variables', 'tf.global_variables', ([], {}), '()\n', (5602, 5604), True, 'import tensorflow as tf\n'), ((2609, 2642), 'tensorflow.global_variables_initializer', 'tf.global_variables_initializer', ([], {}), '()\n', (2640, 2642), True, 'import tensorflow as tf\n'), ((5647, 5680), 'tensorflow.global_variables_initializer', 'tf.global_variables_initializer', ([], {}), '()\n', (5678, 5680), True, 'import tensorflow as tf\n'), ((5809, 5842), 'os.path.join', 'os.path.join', (['path', '"""Check_Point"""'], {}), "(path, 'Check_Point')\n", (5821, 5842), False, 'import os\n'), ((4024, 4070), 'os.path.join', 'os.path.join', (['path', '"""./Check_Point/model.ckpt"""'], {}), "(path, './Check_Point/model.ckpt')\n", (4036, 4070), False, 'import os\n'), ((3338, 3352), 'utils.random_batch', 'random_batch', ([], {}), '()\n', (3350, 3352), False, 'from utils import random_batch, normalize, similarity, loss_cal, optim\n'), ((6676, 6720), 'utils.random_batch', 'random_batch', ([], {'shuffle': '(False)', 'noise_filenum': '(1)'}), '(shuffle=False, noise_filenum=1)\n', (6688, 6720), False, 'from utils import random_batch, normalize, similarity, loss_cal, optim\n'), ((6784, 6828), 'utils.random_batch', 'random_batch', ([], {'shuffle': '(False)', 'noise_filenum': '(2)'}), '(shuffle=False, noise_filenum=2)\n', (6796, 6828), False, 'from utils import random_batch, normalize, similarity, loss_cal, optim\n'), ((6909, 6936), 'utils.random_batch', 'random_batch', ([], {'shuffle': '(False)'}), '(shuffle=False)\n', (6921, 6936), False, 'from utils import random_batch, normalize, similarity, loss_cal, optim\n'), ((7000, 7049), 'utils.random_batch', 'random_batch', ([], {'shuffle': '(False)', 'utter_start': 'config.M'}), '(shuffle=False, utter_start=config.M)\n', (7012, 7049), False, 'from utils import random_batch, normalize, similarity, loss_cal, optim\n'), ((7986, 8010), 'numpy.sum', 'np.sum', (['S_thres[i][:, i]'], {}), '(S_thres[i][:, i])\n', (7992, 8010), True, 'import numpy as np\n'), ((7741, 7759), 'numpy.sum', 'np.sum', (['S_thres[i]'], {}), '(S_thres[i])\n', (7747, 7759), True, 'import numpy as np\n'), ((7760, 7784), 'numpy.sum', 'np.sum', (['S_thres[i, :, i]'], {}), '(S_thres[i, :, i])\n', (7766, 7784), True, 'import numpy as np\n')] |