code stringlengths 22 1.05M | apis listlengths 1 3.31k | extract_api stringlengths 75 3.25M |
|---|---|---|
#coding:utf-8
'''
author : linkin
e-mail : <EMAIL>
date : 2018-11-15
'''
import amipy
from amipy.BaseClass import Hub
from amipy.middlewares import MiddleWareManager
from amipy.util.load import load_py
from amipy.log import getLogger
class SpiderHub(Hub):
def __new__(cls, *args, **kwargs):
if not hasattr(cls,'_instance'):
cls._instance = super(SpiderHub, cls).__new__(cls)
return cls._instance
def __init__(self,settings,crawler):
super(SpiderHub, self).__init__()
self.settings = settings
self._success_counter = 0
self._failed_counter = 0
self._exception_counter = 0
self.active = False
self.looper = None
self._crawler = crawler
self.logger = getLogger(__name__)
self._set_queue()
def _set_queue(self):
_queue = self.settings.gets('PROJECT_REQUESTS_QUEUE')
self.requests = load_py(_queue)()
self.logger.debug(f'Loaded Requests Queue:{type(self.requests).__name__}')
def start(self,looper):
self.looper = looper
self.active = True
for i in self.spiders:
for seed in i.start_requests():
i.status = 'RUNNING'
if isinstance(seed, amipy.Request):
self.requests.put_nowait(seed)
if self.requests.empty():
print(f'* No start requests.Shutting down Amipy.\r\n')
raise StopAsyncIteration
self.logger.info(f'Got {self.requests.qsize()} start requests.')
def takeover(self,spiders):
self.spiders =spiders
self.logger.debug(f'Takeover:{[i.name+":"+i.__class__.__name__ for i in spiders]}')
self._binding()
def _binding(self):
for spider in self.spiders:
spider.binding_hub = self
spider.status = 'BOUND'
self.priorities += spider.priority
def accept(self,request):
_all_req = []
if isinstance(request,list):
for req in request:
if not isinstance(req, amipy.Request):
continue
else:
_all_req.append(req)
elif isinstance(request, amipy.Request):
_all_req.append(request)
return _all_req
@MiddleWareManager.handle_resp
def delegate(self,response):
_res = []
req = response.request
spider = response.spider
if response.status == 200:
self._success_counter += 1
spider._success += 1
self.logger.info(f'[Success]{spider.name} {req.method}-{req.url}')
a = self.accept(response.callback(response))
elif response.status == -1:
self._exception_counter += 1
spider._exc +=1
self.logger.info(f'[{response.exception.__class__.__name__}] {spider.name}'
f' {req.method}-{req.url} ')
a = self.accept(response.excback(response))
else:
self._failed_counter += 1
spider._fail += 1
self.logger.info(f'[{response.status} Error]{spider.name} {req.method}-{req.url}')
a = self.accept(response.errback(response))
_res.extend(a)
[self.requests.put_nowait(i) for i in _res if i]
def __str__(self):
return f'<SpiderHub obj at {hex(id(self))} active:{self.active}' \
f' [spiders:{len(self.spiders)} success:{self._success_counter} ' \
f'fail:{self._failed_counter} exc:{self._exception_counter}]>'
| [
"amipy.log.getLogger",
"amipy.util.load.load_py"
] | [((773, 792), 'amipy.log.getLogger', 'getLogger', (['__name__'], {}), '(__name__)\n', (782, 792), False, 'from amipy.log import getLogger\n'), ((932, 947), 'amipy.util.load.load_py', 'load_py', (['_queue'], {}), '(_queue)\n', (939, 947), False, 'from amipy.util.load import load_py\n')] |
import numpy as np
def ratios(pops1, pops2):
totals1 = np.array(pops1[0]) + np.array(pops1[1])
totals2 = np.array(pops2[0]) + np.array(pops2[1])
change_ratio = np.delete(totals2, 0) / np.delete(totals1, -1)
change_ratio = np.delete(change_ratio, -1)
baby_ratio = totals2[0] / np.sum(np.array(pops1[1])[3:10])
tail_ratio = totals2[-1] / np.sum(totals1[-2:])
return change_ratio.tolist(), baby_ratio, tail_ratio
def simulate(pops, change_ratio, baby_ratio, tail_ratio):
estimates = [[], []]
mothers = np.sum(np.array(pops[1])[3:10])
estimates[0].append(mothers * baby_ratio * (105 / (105 + 100)))
estimates[1].append(mothers * baby_ratio * (100 / (105 + 100)))
males = (np.array(pops[0])[:-2] * np.array(change_ratio)).tolist()
females = (np.array(pops[1])[:-2] * np.array(change_ratio)).tolist()
estimates[0] += males
estimates[1] += females
estimates[0].append(np.sum(pops[0][-2:]) * tail_ratio)
estimates[1].append(np.sum(pops[1][-2:]) * tail_ratio)
return estimates
| [
"numpy.delete",
"numpy.array",
"numpy.sum"
] | [((241, 268), 'numpy.delete', 'np.delete', (['change_ratio', '(-1)'], {}), '(change_ratio, -1)\n', (250, 268), True, 'import numpy as np\n'), ((61, 79), 'numpy.array', 'np.array', (['pops1[0]'], {}), '(pops1[0])\n', (69, 79), True, 'import numpy as np\n'), ((82, 100), 'numpy.array', 'np.array', (['pops1[1]'], {}), '(pops1[1])\n', (90, 100), True, 'import numpy as np\n'), ((115, 133), 'numpy.array', 'np.array', (['pops2[0]'], {}), '(pops2[0])\n', (123, 133), True, 'import numpy as np\n'), ((136, 154), 'numpy.array', 'np.array', (['pops2[1]'], {}), '(pops2[1])\n', (144, 154), True, 'import numpy as np\n'), ((175, 196), 'numpy.delete', 'np.delete', (['totals2', '(0)'], {}), '(totals2, 0)\n', (184, 196), True, 'import numpy as np\n'), ((199, 221), 'numpy.delete', 'np.delete', (['totals1', '(-1)'], {}), '(totals1, -1)\n', (208, 221), True, 'import numpy as np\n'), ((363, 383), 'numpy.sum', 'np.sum', (['totals1[-2:]'], {}), '(totals1[-2:])\n', (369, 383), True, 'import numpy as np\n'), ((549, 566), 'numpy.array', 'np.array', (['pops[1]'], {}), '(pops[1])\n', (557, 566), True, 'import numpy as np\n'), ((934, 954), 'numpy.sum', 'np.sum', (['pops[0][-2:]'], {}), '(pops[0][-2:])\n', (940, 954), True, 'import numpy as np\n'), ((993, 1013), 'numpy.sum', 'np.sum', (['pops[1][-2:]'], {}), '(pops[1][-2:])\n', (999, 1013), True, 'import numpy as np\n'), ((306, 324), 'numpy.array', 'np.array', (['pops1[1]'], {}), '(pops1[1])\n', (314, 324), True, 'import numpy as np\n'), ((749, 771), 'numpy.array', 'np.array', (['change_ratio'], {}), '(change_ratio)\n', (757, 771), True, 'import numpy as np\n'), ((822, 844), 'numpy.array', 'np.array', (['change_ratio'], {}), '(change_ratio)\n', (830, 844), True, 'import numpy as np\n'), ((724, 741), 'numpy.array', 'np.array', (['pops[0]'], {}), '(pops[0])\n', (732, 741), True, 'import numpy as np\n'), ((797, 814), 'numpy.array', 'np.array', (['pops[1]'], {}), '(pops[1])\n', (805, 814), True, 'import numpy as np\n')] |
#!/usr/bin/python
# Copyright 2010 Google Inc.
# Licensed under the Apache License, Version 2.0
# http://www.apache.org/licenses/LICENSE-2.0
# Google's Python Class
# http://code.google.com/edu/languages/google-python-class/
import sys
import re
import os
import shutil
import subprocess
import zipfile
"""
Copy Special exercise
"""
# +++your code here+++
# Write functions and modify main() to call them
def get_special_paths(directory):
"""
Returns a list of the absolute paths of the special files in the given directory.
"special" file is one where the name contains the pattern __w__ somewhere,
where the w is one or more word chars.
"""
special_files = []
filenames = os.listdir(directory)
for filename in filenames:
special_file_match = re.search(r'__\w+__', filename)
if special_file_match:
special_files.append(os.path.abspath(os.path.join(directory, filename)))
return special_files
def copy_to(paths, directory):
"""
Copy all paths to given directory
"""
if not os.path.exists(directory):
os.mkdir(directory)
for path in paths:
shutil.copy(path, directory)
def zip_to(paths, zip_path):
"""
Add all files from given paths to zip file
"""
# REVIEW: did you mean: zipfile.ZipFile(zip_path, 'w')
with zipfile.ZipFile(zip_path, 'w') as zipf:
for path in paths:
zipf.write(path)
def zip_to_command(paths, zip_path):
"""
Add all files from given paths to zip file
"""
subprocess.run(['zip', '-j', zip_path] + paths)
def main():
# This basic command line argument parsing code is provided.
# Add code to call your functions below.
# Make a list of command line arguments, omitting the [0] element
# which is the script itself.
args = sys.argv[1:]
if not args:
print("usage: [--todir dir][--tozip zipfile] dir [dir ...]")
sys.exit(1)
# todir and tozip are either set from command line
# or left as the empty string.
# The args array is left just containing the dirs.
to_dir = ''
if args[0] == '--todir':
to_dir = args[1]
del args[0:2]
to_zip = ''
if args[0] == '--tozip':
to_zip = args[1]
del args[0:2]
if len(args) == 0:
print("error: must specify one or more dirs")
sys.exit(1)
# +++your code here+++
# Call your functions
for directory in args:
paths = get_special_paths(directory)
if to_dir:
copy_to(paths, to_dir)
elif to_zip:
zip_to_command(paths, to_zip)
else:
print('\n'.join(paths))
if __name__ == "__main__":
main()
| [
"os.path.exists",
"os.listdir",
"zipfile.ZipFile",
"subprocess.run",
"os.path.join",
"os.mkdir",
"shutil.copy",
"sys.exit",
"re.search"
] | [((707, 728), 'os.listdir', 'os.listdir', (['directory'], {}), '(directory)\n', (717, 728), False, 'import os\n'), ((1541, 1588), 'subprocess.run', 'subprocess.run', (["(['zip', '-j', zip_path] + paths)"], {}), "(['zip', '-j', zip_path] + paths)\n", (1555, 1588), False, 'import subprocess\n'), ((789, 820), 're.search', 're.search', (['"""__\\\\w+__"""', 'filename'], {}), "('__\\\\w+__', filename)\n", (798, 820), False, 'import re\n'), ((1061, 1086), 'os.path.exists', 'os.path.exists', (['directory'], {}), '(directory)\n', (1075, 1086), False, 'import os\n'), ((1096, 1115), 'os.mkdir', 'os.mkdir', (['directory'], {}), '(directory)\n', (1104, 1115), False, 'import os\n'), ((1148, 1176), 'shutil.copy', 'shutil.copy', (['path', 'directory'], {}), '(path, directory)\n', (1159, 1176), False, 'import shutil\n'), ((1339, 1369), 'zipfile.ZipFile', 'zipfile.ZipFile', (['zip_path', '"""w"""'], {}), "(zip_path, 'w')\n", (1354, 1369), False, 'import zipfile\n'), ((1936, 1947), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (1944, 1947), False, 'import sys\n'), ((2365, 2376), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (2373, 2376), False, 'import sys\n'), ((901, 934), 'os.path.join', 'os.path.join', (['directory', 'filename'], {}), '(directory, filename)\n', (913, 934), False, 'import os\n')] |
from distutils.core import setup
from distutils.extension import Extension
from Cython.Distutils import build_ext
setup(
name="giraffe",
version="0.1",
package_dir={"giraffe" : ""},
packages=["giraffe"],
cmdclass = {'build_ext': build_ext},
ext_modules = [
Extension("graph", ["giraffe/graph.pyx"]),
Extension("graph_mixin", ["giraffe/graph_mixin.pyx"]),
]
)
| [
"distutils.extension.Extension"
] | [((315, 356), 'distutils.extension.Extension', 'Extension', (['"""graph"""', "['giraffe/graph.pyx']"], {}), "('graph', ['giraffe/graph.pyx'])\n", (324, 356), False, 'from distutils.extension import Extension\n'), ((379, 432), 'distutils.extension.Extension', 'Extension', (['"""graph_mixin"""', "['giraffe/graph_mixin.pyx']"], {}), "('graph_mixin', ['giraffe/graph_mixin.pyx'])\n", (388, 432), False, 'from distutils.extension import Extension\n')] |
import click
from modules.processor import build_report, print_report
@click.group(invoke_without_command=True)
@click.option('--files', '-f', required=True, type=str, prompt="Provide the path to data files")
@click.pass_context
def cli_root(ctx, files):
ctx.meta['files'] = files
@cli_root.command()
@click.argument('name', type=str)
@click.pass_context
def driver(ctx, name):
files = ctx.meta['files']
report = build_report(files, driver=name)
print_report(report)
@cli_root.command()
@click.argument('order', type=click.Choice(["asc", "desc"]), default="asc")
@click.pass_context
def ls(ctx, order):
files = ctx.meta['files']
if order not in ("asc", "desc"):
raise IOError("'Wrong sorting direction")
report = build_report(files, order=order)
# добавить логинку того, что если нет error_log, то это это ошибка. подумать, куда добавть эту проверку
print_report(report)
| [
"click.Choice",
"click.argument",
"modules.processor.build_report",
"click.group",
"click.option",
"modules.processor.print_report"
] | [((73, 113), 'click.group', 'click.group', ([], {'invoke_without_command': '(True)'}), '(invoke_without_command=True)\n', (84, 113), False, 'import click\n'), ((115, 215), 'click.option', 'click.option', (['"""--files"""', '"""-f"""'], {'required': '(True)', 'type': 'str', 'prompt': '"""Provide the path to data files"""'}), "('--files', '-f', required=True, type=str, prompt=\n 'Provide the path to data files')\n", (127, 215), False, 'import click\n'), ((310, 342), 'click.argument', 'click.argument', (['"""name"""'], {'type': 'str'}), "('name', type=str)\n", (324, 342), False, 'import click\n'), ((429, 461), 'modules.processor.build_report', 'build_report', (['files'], {'driver': 'name'}), '(files, driver=name)\n', (441, 461), False, 'from modules.processor import build_report, print_report\n'), ((466, 486), 'modules.processor.print_report', 'print_report', (['report'], {}), '(report)\n', (478, 486), False, 'from modules.processor import build_report, print_report\n'), ((755, 787), 'modules.processor.build_report', 'build_report', (['files'], {'order': 'order'}), '(files, order=order)\n', (767, 787), False, 'from modules.processor import build_report, print_report\n'), ((900, 920), 'modules.processor.print_report', 'print_report', (['report'], {}), '(report)\n', (912, 920), False, 'from modules.processor import build_report, print_report\n'), ((539, 568), 'click.Choice', 'click.Choice', (["['asc', 'desc']"], {}), "(['asc', 'desc'])\n", (551, 568), False, 'import click\n')] |
import numpy as np
import matplotlib.pyplot as plt
import matplotlib
# Set fontsize larger for latex plots
matplotlib.rcParams.update({'font.size': 20})
# Generate data from file
x, y = np.genfromtxt("bin/python_Aufgabe2.txt", unpack=True)
m, n = x[-1], y[-1]
# Plotting
plt.figure(figsize=(12,7))
plt.grid()
plt.xlabel("x")
plt.ylabel("y")
x_new = np.linspace(min(x)-x[:-1].std()/2, max(x)+x[:-1].std()/2)
plt.plot(x[:-1], y[:-1], "x", mew=2., alpha=2, label="Datenpunkte")
plt.plot(x_new, m*x_new+n, "-", linewidth=3, label="Ausgleichsgerade")
plt.legend()
plt.tight_layout()
plt.savefig("bin/figure.pdf", dpi=1200)
| [
"matplotlib.pyplot.grid",
"matplotlib.pyplot.savefig",
"matplotlib.rcParams.update",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.plot",
"matplotlib.pyplot.figure",
"matplotlib.pyplot.tight_layout",
"numpy.genfromtxt",
"matplotlib.pyplot.legend"
] | [((108, 153), 'matplotlib.rcParams.update', 'matplotlib.rcParams.update', (["{'font.size': 20}"], {}), "({'font.size': 20})\n", (134, 153), False, 'import matplotlib\n'), ((188, 241), 'numpy.genfromtxt', 'np.genfromtxt', (['"""bin/python_Aufgabe2.txt"""'], {'unpack': '(True)'}), "('bin/python_Aufgabe2.txt', unpack=True)\n", (201, 241), True, 'import numpy as np\n'), ((275, 302), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(12, 7)'}), '(figsize=(12, 7))\n', (285, 302), True, 'import matplotlib.pyplot as plt\n'), ((302, 312), 'matplotlib.pyplot.grid', 'plt.grid', ([], {}), '()\n', (310, 312), True, 'import matplotlib.pyplot as plt\n'), ((313, 328), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""x"""'], {}), "('x')\n", (323, 328), True, 'import matplotlib.pyplot as plt\n'), ((329, 344), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""y"""'], {}), "('y')\n", (339, 344), True, 'import matplotlib.pyplot as plt\n'), ((411, 479), 'matplotlib.pyplot.plot', 'plt.plot', (['x[:-1]', 'y[:-1]', '"""x"""'], {'mew': '(2.0)', 'alpha': '(2)', 'label': '"""Datenpunkte"""'}), "(x[:-1], y[:-1], 'x', mew=2.0, alpha=2, label='Datenpunkte')\n", (419, 479), True, 'import matplotlib.pyplot as plt\n'), ((479, 553), 'matplotlib.pyplot.plot', 'plt.plot', (['x_new', '(m * x_new + n)', '"""-"""'], {'linewidth': '(3)', 'label': '"""Ausgleichsgerade"""'}), "(x_new, m * x_new + n, '-', linewidth=3, label='Ausgleichsgerade')\n", (487, 553), True, 'import matplotlib.pyplot as plt\n'), ((550, 562), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (560, 562), True, 'import matplotlib.pyplot as plt\n'), ((563, 581), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (579, 581), True, 'import matplotlib.pyplot as plt\n'), ((582, 621), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""bin/figure.pdf"""'], {'dpi': '(1200)'}), "('bin/figure.pdf', dpi=1200)\n", (593, 621), True, 'import matplotlib.pyplot as plt\n')] |
#!/usr/bin/env python3
# This script assumes that the non-numerical column headers
# in train and predi files are identical.
# Thus the sm header(s) in the train file must be numeric (day/month/year).
import sys
import numpy as np
import pandas as pd
from sklearn.decomposition import PCA #TruncatedSVD as SVD
from sklearn.preprocessing import StandardScaler
from sklearn.pipeline import Pipeline
def mask(df, f):
return df[f(df)]
def is_int(val):
try:
int(val)
return True
except:
return False
def remove_sparse_rows(data, error=-99999.0):
data_matrix = data.as_matrix()
data_matrix = [row for row in data_matrix if error in row]
return pd.DataFrame(data_matrix, columns=data.columns)
def fit_data(train_data, num_comps="mle"):
# Build pipeline and fit it to training data.
scaler = StandardScaler()
# https://github.com/scikit-learn/scikit-learn/issues/9884
pca = PCA(n_components=num_comps, svd_solver="full")
pipeline = Pipeline([("scaler", scaler), ("pca", pca)])
pipeline.fit(train_data)
return pipeline
#Select the target number of components.
# Uses Avereage Eigenvalue technique from:
# http://pubs.acs.org/doi/pdf/10.1021/ie990110i
def choose_num_comps(train_data, bound=1):
model = fit_data(train_data)
eigenvals = model.named_steps['pca'].explained_variance_
#print(f"eigenvals:\n{eigenvals}\n")
return len([ev for ev in eigenvals if (ev >= bound)])
# Assumes the first two columns are x/y-coordinates
# and integer-headed columns are sm data, not covariates.
def get_params(data):
columns = list(data.columns)[2:]
return [col for col in columns if not is_int(col)]
# Apply to {df} pca transformatio {model}
# that maps {params}-headed data to {num_comps} new columns.
def apply_model(df, model, params, num_comps):
pre_model = df[params]
post_model = model.transform(pre_model)
#print(f"one row of post_model:\n{post_model[0]}")
new_cols = [f"Component{i}" for i in range(num_comps)]
post_model = pd.DataFrame(post_model, columns=new_cols)
#print(f"one row of post_model:\n{post_model.iloc[0]}")
pre_base = df.drop(params, axis=1)
#print(f"one row of pre_base:\n{pre_base.iloc[0]}")
post_model.reset_index(drop=True, inplace=True)
pre_base.reset_index(drop=True, inplace=True)
post_full = pd.concat([pre_base, post_model], axis=1)
#print(f"one row of post_fill:\n{post_full.iloc[0]}")
#print(f"sizes:\npost_model: {post_model.shape}\npre_base: {pre_base.shape}\npost_full: {post_full.shape}\n")
return post_full
def joint_pca(train_data, predi_data, params):
# Run PCA on train_data to create a dimension-reduction model.
pca_train = train_data[params]
num_comps = choose_num_comps(pca_train)
#print(f"num_comps:\n{num_comps}\n")
model = fit_data(pca_train, num_comps)
#print(f"model:\n{model}\n")
#print(f"one row of train_data before:\n{train_data.iloc[1]}")
#print(f"one row of predi_data before:\n{predi_data.iloc[1]}")
# Apply the same model to both train and predi data.
train_data = apply_model(train_data, model, params, num_comps)
predi_data = apply_model(predi_data, model, params, num_comps)
#print(f"one row of train_data after:\n{train_data.iloc[1]}")
#print(f"one row of predi_data after:\n{predi_data.iloc[1]}")
components = model.named_steps["pca"].components_
return train_data, predi_data, components
if __name__ == "__main__":
train_in = sys.argv[1]
predi_in = sys.argv[2]
train_out = sys.argv[3]
predi_out = sys.argv[4]
log_file = sys.argv[5]
# Read in data files.
train_data = pd.read_csv(train_in, header=0)
predi_data = pd.read_csv(predi_in, header=0)
# Find param names.
params = get_params(train_data)
# Do that pca stuff.
train_pca, predi_pca, components = joint_pca(train_data, predi_data, params)
# Write the results to specified files.
train_pca.to_csv(path_or_buf=train_out, index=False)
predi_pca.to_csv(path_or_buf=predi_out, index=False)
# Log the pca components.
with open(log_file, "a") as log:
log.write("Component Eigenvalues:\n")
for i in range(len(params)):
log.write(f"{params[i]}:\n{[c[i] for c in components]}\n")
| [
"pandas.read_csv",
"sklearn.pipeline.Pipeline",
"sklearn.decomposition.PCA",
"sklearn.preprocessing.StandardScaler",
"pandas.DataFrame",
"pandas.concat"
] | [((694, 741), 'pandas.DataFrame', 'pd.DataFrame', (['data_matrix'], {'columns': 'data.columns'}), '(data_matrix, columns=data.columns)\n', (706, 741), True, 'import pandas as pd\n'), ((849, 865), 'sklearn.preprocessing.StandardScaler', 'StandardScaler', ([], {}), '()\n', (863, 865), False, 'from sklearn.preprocessing import StandardScaler\n'), ((939, 985), 'sklearn.decomposition.PCA', 'PCA', ([], {'n_components': 'num_comps', 'svd_solver': '"""full"""'}), "(n_components=num_comps, svd_solver='full')\n", (942, 985), False, 'from sklearn.decomposition import PCA\n'), ((1001, 1045), 'sklearn.pipeline.Pipeline', 'Pipeline', (["[('scaler', scaler), ('pca', pca)]"], {}), "([('scaler', scaler), ('pca', pca)])\n", (1009, 1045), False, 'from sklearn.pipeline import Pipeline\n'), ((2046, 2088), 'pandas.DataFrame', 'pd.DataFrame', (['post_model'], {'columns': 'new_cols'}), '(post_model, columns=new_cols)\n', (2058, 2088), True, 'import pandas as pd\n'), ((2362, 2403), 'pandas.concat', 'pd.concat', (['[pre_base, post_model]'], {'axis': '(1)'}), '([pre_base, post_model], axis=1)\n', (2371, 2403), True, 'import pandas as pd\n'), ((3686, 3717), 'pandas.read_csv', 'pd.read_csv', (['train_in'], {'header': '(0)'}), '(train_in, header=0)\n', (3697, 3717), True, 'import pandas as pd\n'), ((3735, 3766), 'pandas.read_csv', 'pd.read_csv', (['predi_in'], {'header': '(0)'}), '(predi_in, header=0)\n', (3746, 3766), True, 'import pandas as pd\n')] |
# Copyright (C) 2015 <NAME>
#
# This program is released under the "MIT License".
# Please see the file COPYING in this distribution for
# license terms.
import datetime
from flask import Blueprint, request, jsonify
from webargs import Arg
from webargs.flaskparser import use_args
import geoalchemy2.functions as func
from app import app, db
from app.models import Active, Locations
mod_api = Blueprint('api', __name__, url_prefix='/api')
@mod_api.errorhandler(422)
def handle_bad_request(err):
data = getattr(err, 'data')
if data: message = data['message']
else: message = 'Invalid request'
return jsonify({'error': message }), 422
@mod_api.route('/')
def index():
return "api module"
active_post_args = {
'vendor_id':Arg(str, required=True),
'active':Arg(str, required=True)
}
@mod_api.route("/active", methods=["POST"])
@use_args(active_post_args)
def active_post(args):
response = dict(**args)
response["success"] = True
try:
record = Active(**args)
db.session.add(record)
db.session.commit()
except Exception as e:
response["success"] = False
resonse["exception"] = str(e)
return jsonify(response)
def query_vendor_status(vendor_id):
return Active.query.filter_by(vendor_id=vendor_id)\
.order_by(Active.tstamp.desc())\
.first()
active_get_args = {
'vendor_id':Arg(str, required=True)
}
@mod_api.route('/active', methods=['GET'])
@use_args(active_get_args)
def active_get(args):
status = query_vendor_status(args["vendor_id"])
response = dict(**args)
# found matching record based on input vendor_id
# return status
if status:
response["success"] = True
response["active"] = status.active
# no matching vendor id
else:
response["success"] = False
response["msg"] = "no matching vendor_id"
return jsonify(response)
location_post_args = {
'vendor_id':Arg(str, required=True),
'lat':Arg(str, requred=True),
'lon':Arg(str, requred=True)
}
@mod_api.route("/location", methods=["POST"])
@use_args(location_post_args)
def location_post(args):
status = query_vendor_status(args["vendor_id"])
response = dict(vendor_id=args["vendor_id"], success=False)
if not status:
response["msg"] = "vendor does not exist".format(args["vendor_id"])
elif status and not status.active:
response["msg"] = "vendor is not currently active".\
format(args["vendor_id"])
elif status and status.active:
try:
location = Locations(**args)
db.session.add(location)
db.session.commit()
response["success"] = True
response["msg"] = "location updated".\
format(args["vendor_id"])
except Exception as e:
response["exception"] = str(e)
return jsonify(response)
def query_vendor_location(vendor_id):
return db.session.query(
Locations.vendor_id,
Locations.tstamp,
func.ST_X(Locations.geom).label("lon"),
func.ST_Y(Locations.geom).label("lat"))\
.filter_by(vendor_id=vendor_id)\
.order_by(Locations.tstamp.desc())\
.first()
location_get_args = {
'vendor_id':Arg(str, required=True)
}
@mod_api.route('/location', methods=['GET'])
@use_args(active_get_args)
def location_get(args):
status = query_vendor_status(args["vendor_id"])
response = dict(vendor_id=args["vendor_id"], success=False)
# vendor does not exists
if not status:
response["msg"] = "vendor does not exist".\
format(args["vendor_id"])
# vendor is not active
elif status and not status.active:
response["msg"] = "vendor is not currently active".\
format(args["vendor_id"])
# vendor is active so look up most recent coordinates
else:
location = query_vendor_location(args["vendor_id"])
if location:
response = dict(
success=True,
vendor_id=location.vendor_id,
tstamp=str(location.tstamp),
lat=location.lat,
lon=location.lon
)
else:
response["msg"] = "retrieving coordinates failed"
return jsonify(response)
| [
"app.db.session.commit",
"app.models.Locations.tstamp.desc",
"app.models.Active.tstamp.desc",
"webargs.Arg",
"app.models.Active",
"geoalchemy2.functions.ST_X",
"geoalchemy2.functions.ST_Y",
"app.models.Active.query.filter_by",
"app.db.session.add",
"webargs.flaskparser.use_args",
"app.models.Loc... | [((400, 445), 'flask.Blueprint', 'Blueprint', (['"""api"""', '__name__'], {'url_prefix': '"""/api"""'}), "('api', __name__, url_prefix='/api')\n", (409, 445), False, 'from flask import Blueprint, request, jsonify\n'), ((866, 892), 'webargs.flaskparser.use_args', 'use_args', (['active_post_args'], {}), '(active_post_args)\n', (874, 892), False, 'from webargs.flaskparser import use_args\n'), ((1474, 1499), 'webargs.flaskparser.use_args', 'use_args', (['active_get_args'], {}), '(active_get_args)\n', (1482, 1499), False, 'from webargs.flaskparser import use_args\n'), ((2104, 2132), 'webargs.flaskparser.use_args', 'use_args', (['location_post_args'], {}), '(location_post_args)\n', (2112, 2132), False, 'from webargs.flaskparser import use_args\n'), ((3359, 3384), 'webargs.flaskparser.use_args', 'use_args', (['active_get_args'], {}), '(active_get_args)\n', (3367, 3384), False, 'from webargs.flaskparser import use_args\n'), ((756, 779), 'webargs.Arg', 'Arg', (['str'], {'required': '(True)'}), '(str, required=True)\n', (759, 779), False, 'from webargs import Arg\n'), ((794, 817), 'webargs.Arg', 'Arg', (['str'], {'required': '(True)'}), '(str, required=True)\n', (797, 817), False, 'from webargs import Arg\n'), ((1187, 1204), 'flask.jsonify', 'jsonify', (['response'], {}), '(response)\n', (1194, 1204), False, 'from flask import Blueprint, request, jsonify\n'), ((1403, 1426), 'webargs.Arg', 'Arg', (['str'], {'required': '(True)'}), '(str, required=True)\n', (1406, 1426), False, 'from webargs import Arg\n'), ((1903, 1920), 'flask.jsonify', 'jsonify', (['response'], {}), '(response)\n', (1910, 1920), False, 'from flask import Blueprint, request, jsonify\n'), ((1962, 1985), 'webargs.Arg', 'Arg', (['str'], {'required': '(True)'}), '(str, required=True)\n', (1965, 1985), False, 'from webargs import Arg\n'), ((1997, 2019), 'webargs.Arg', 'Arg', (['str'], {'requred': '(True)'}), '(str, requred=True)\n', (2000, 2019), False, 'from webargs import Arg\n'), ((2031, 2053), 'webargs.Arg', 'Arg', (['str'], {'requred': '(True)'}), '(str, requred=True)\n', (2034, 2053), False, 'from webargs import Arg\n'), ((2890, 2907), 'flask.jsonify', 'jsonify', (['response'], {}), '(response)\n', (2897, 2907), False, 'from flask import Blueprint, request, jsonify\n'), ((3286, 3309), 'webargs.Arg', 'Arg', (['str'], {'required': '(True)'}), '(str, required=True)\n', (3289, 3309), False, 'from webargs import Arg\n'), ((4303, 4320), 'flask.jsonify', 'jsonify', (['response'], {}), '(response)\n', (4310, 4320), False, 'from flask import Blueprint, request, jsonify\n'), ((624, 651), 'flask.jsonify', 'jsonify', (["{'error': message}"], {}), "({'error': message})\n", (631, 651), False, 'from flask import Blueprint, request, jsonify\n'), ((1001, 1015), 'app.models.Active', 'Active', ([], {}), '(**args)\n', (1007, 1015), False, 'from app.models import Active, Locations\n'), ((1024, 1046), 'app.db.session.add', 'db.session.add', (['record'], {}), '(record)\n', (1038, 1046), False, 'from app import app, db\n'), ((1055, 1074), 'app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (1072, 1074), False, 'from app import app, db\n'), ((1321, 1341), 'app.models.Active.tstamp.desc', 'Active.tstamp.desc', ([], {}), '()\n', (1339, 1341), False, 'from app.models import Active, Locations\n'), ((3204, 3227), 'app.models.Locations.tstamp.desc', 'Locations.tstamp.desc', ([], {}), '()\n', (3225, 3227), False, 'from app.models import Active, Locations\n'), ((1254, 1297), 'app.models.Active.query.filter_by', 'Active.query.filter_by', ([], {'vendor_id': 'vendor_id'}), '(vendor_id=vendor_id)\n', (1276, 1297), False, 'from app.models import Active, Locations\n'), ((2582, 2599), 'app.models.Locations', 'Locations', ([], {}), '(**args)\n', (2591, 2599), False, 'from app.models import Active, Locations\n'), ((2612, 2636), 'app.db.session.add', 'db.session.add', (['location'], {}), '(location)\n', (2626, 2636), False, 'from app import app, db\n'), ((2649, 2668), 'app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (2666, 2668), False, 'from app import app, db\n'), ((3052, 3077), 'geoalchemy2.functions.ST_X', 'func.ST_X', (['Locations.geom'], {}), '(Locations.geom)\n', (3061, 3077), True, 'import geoalchemy2.functions as func\n'), ((3104, 3129), 'geoalchemy2.functions.ST_Y', 'func.ST_Y', (['Locations.geom'], {}), '(Locations.geom)\n', (3113, 3129), True, 'import geoalchemy2.functions as func\n')] |
'''
Created on April 15, 2018
@author: <NAME>
'''
import numpy as np
import warnings
from scipy.stats import gamma, lognorm
from sklearn.linear_model import ElasticNet
from spn.structure.leaves.conditional.Conditional import Conditional_Gaussian, Conditional_Poisson, \
Conditional_Bernoulli
import statsmodels.api as sm
from os.path import dirname
path = dirname(__file__) + "/"
def update_glm_parameters_mle(node, data, scope): # assume data is tuple (output np array, conditional np array)
assert len(scope) == 1, 'more than one output variable in scope?'
data = data[~np.isnan(data)].reshape(data.shape)
dataOut = data[:, :len(scope)]
dataIn = data[:, len(scope):]
assert dataOut.shape[1] == 1, 'more than one output variable in scope?'
if dataOut.shape[0] == 0:
return
dataIn = np.c_[dataIn, np.ones((dataIn.shape[0]))]
if isinstance(node, Conditional_Gaussian):
reg = ElasticNet(random_state=0, alpha=0.01, max_iter=2000, fit_intercept=False)
reg.fit(dataIn, dataOut)
if reg.n_iter_ < reg.max_iter:
node.weights = reg.coef_.tolist()
return
family = sm.families.Gaussian()
elif isinstance(node, Conditional_Poisson):
family = sm.families.Poisson()
elif isinstance(node, Conditional_Bernoulli):
family = sm.families.Binomial()
else:
raise Exception("Unknown conditional " + str(type(node)))
glmfit = sm.GLM(dataOut, dataIn, family=family).fit_regularized(alpha=0.0001, maxiter=5)
node.weights = glmfit.params.tolist()
return
try:
import tensorflow as tf
import tensorflow_probability as tfp;
tfd = tfp.distributions
dataOut = dataOut.reshape(-1)
w, linear_response, is_converged, num_iter = tfp.glm.fit(
model_matrix=tf.constant(dataIn),
response=tf.constant(dataOut),
model=tfp.glm.Poisson(),
l2_regularizer=0.0001)
log_likelihood = tfp.glm.Poisson().log_prob(tf.constant(dataOut), linear_response)
with tf.Session() as sess:
[w_, linear_response_, is_converged_, num_iter_, Y_, log_likelihood_] = sess.run(
[w, linear_response, is_converged, num_iter, tf.constant(dataOut), log_likelihood])
node.weights = w_
print("node.weights", node.weights)
# glmfit = sm.GLM(dataOut, dataIn, family=family).fit_regularized(alpha=0.001)
# node.weights = glmfit.params
# # if glmfit.converged is False:
# # warnings.warn("Maximum number of iterations reached")
except Exception:
glmfit = sm.GLM(dataOut, dataIn, family=family).fit_regularized(alpha=0.0001)
node.weights = glmfit.params
print("node.weights with glmfit", node.weights)
np.savez(path + "tmp_glm_mle_data", dataIn=dataIn, dataOut=dataOut)
| [
"statsmodels.api.families.Poisson",
"numpy.savez",
"sklearn.linear_model.ElasticNet",
"numpy.ones",
"statsmodels.api.families.Gaussian",
"tensorflow_probability.glm.Poisson",
"tensorflow.Session",
"statsmodels.api.families.Binomial",
"os.path.dirname",
"tensorflow.constant",
"numpy.isnan",
"st... | [((364, 381), 'os.path.dirname', 'dirname', (['__file__'], {}), '(__file__)\n', (371, 381), False, 'from os.path import dirname\n'), ((939, 1013), 'sklearn.linear_model.ElasticNet', 'ElasticNet', ([], {'random_state': '(0)', 'alpha': '(0.01)', 'max_iter': '(2000)', 'fit_intercept': '(False)'}), '(random_state=0, alpha=0.01, max_iter=2000, fit_intercept=False)\n', (949, 1013), False, 'from sklearn.linear_model import ElasticNet\n'), ((1171, 1193), 'statsmodels.api.families.Gaussian', 'sm.families.Gaussian', ([], {}), '()\n', (1191, 1193), True, 'import statsmodels.api as sm\n'), ((849, 873), 'numpy.ones', 'np.ones', (['dataIn.shape[0]'], {}), '(dataIn.shape[0])\n', (856, 873), True, 'import numpy as np\n'), ((1259, 1280), 'statsmodels.api.families.Poisson', 'sm.families.Poisson', ([], {}), '()\n', (1278, 1280), True, 'import statsmodels.api as sm\n'), ((1461, 1499), 'statsmodels.api.GLM', 'sm.GLM', (['dataOut', 'dataIn'], {'family': 'family'}), '(dataOut, dataIn, family=family)\n', (1467, 1499), True, 'import statsmodels.api as sm\n'), ((2031, 2051), 'tensorflow.constant', 'tf.constant', (['dataOut'], {}), '(dataOut)\n', (2042, 2051), True, 'import tensorflow as tf\n'), ((2084, 2096), 'tensorflow.Session', 'tf.Session', ([], {}), '()\n', (2094, 2096), True, 'import tensorflow as tf\n'), ((2818, 2885), 'numpy.savez', 'np.savez', (["(path + 'tmp_glm_mle_data')"], {'dataIn': 'dataIn', 'dataOut': 'dataOut'}), "(path + 'tmp_glm_mle_data', dataIn=dataIn, dataOut=dataOut)\n", (2826, 2885), True, 'import numpy as np\n'), ((1348, 1370), 'statsmodels.api.families.Binomial', 'sm.families.Binomial', ([], {}), '()\n', (1368, 1370), True, 'import statsmodels.api as sm\n'), ((1843, 1862), 'tensorflow.constant', 'tf.constant', (['dataIn'], {}), '(dataIn)\n', (1854, 1862), True, 'import tensorflow as tf\n'), ((1885, 1905), 'tensorflow.constant', 'tf.constant', (['dataOut'], {}), '(dataOut)\n', (1896, 1905), True, 'import tensorflow as tf\n'), ((1925, 1942), 'tensorflow_probability.glm.Poisson', 'tfp.glm.Poisson', ([], {}), '()\n', (1940, 1942), True, 'import tensorflow_probability as tfp\n'), ((2004, 2021), 'tensorflow_probability.glm.Poisson', 'tfp.glm.Poisson', ([], {}), '()\n', (2019, 2021), True, 'import tensorflow_probability as tfp\n'), ((592, 606), 'numpy.isnan', 'np.isnan', (['data'], {}), '(data)\n', (600, 606), True, 'import numpy as np\n'), ((2261, 2281), 'tensorflow.constant', 'tf.constant', (['dataOut'], {}), '(dataOut)\n', (2272, 2281), True, 'import tensorflow as tf\n'), ((2648, 2686), 'statsmodels.api.GLM', 'sm.GLM', (['dataOut', 'dataIn'], {'family': 'family'}), '(dataOut, dataIn, family=family)\n', (2654, 2686), True, 'import statsmodels.api as sm\n')] |
# Copyright (c) 2019 Science and Technology Facilities Council
# All rights reserved.
# Modifications made as part of the fparser project are distributed
# under the following license:
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
'''Tests for a Fortran 2003 R1011 control edit descriptor.'''
import pytest
from fparser.two.Fortran2003 import Control_Edit_Desc
from fparser.two.utils import NoMatchError, InternalError
def test_descriptors_match(f2003_create):
'''Check that valid control edit descriptors are parsed correctly when
they are dealt with by the match method. These are '/', ':', 'P'
and '$'. '$' is dealt with in separate tests as it is an
extension. We test with and without spaces.
'''
for my_input in ["/", " / ", "2/", " 2 / ", ":", " : ", "2P", " 2 P ",
"2p", " 2 p "]:
ast = Control_Edit_Desc(my_input)
assert str(ast) == my_input.upper().replace(" ", "")
def test_descriptors_subclass(f2003_create):
'''Check that valid control edit descriptors are parsed correctly when
they are passed onto subclasses. In this case we just test a
single example for each subclass to see if valid values are passed
on, as the subclass tests check all the options.
'''
for my_input in ["T2", "SS", "BN", "RU", "DC"]:
ast = Control_Edit_Desc(my_input)
assert str(ast) == my_input.upper()
def test_dollar_valid(f2003_create, monkeypatch):
'''Check that valid $ format specifications are parsed correctly if
the dollar-descriptor extension is specified. Also include an
example with spaces.
'''
from fparser.two import utils
monkeypatch.setattr(utils, "EXTENSIONS", ["dollar-descriptor"])
for my_input in ["$", " $ "]:
ast = Control_Edit_Desc(my_input)
assert str(ast) == my_input.upper().replace(" ", "")
def test_dollar_invalid(f2003_create, monkeypatch):
'''Check that valid '$' format specifications raise a NoMatchError if
the 'dollar-format' extension is not in the EXTENSIONS list.
'''
from fparser.two import utils
monkeypatch.setattr(utils, "EXTENSIONS", [])
for my_input in ["$", " $ "]:
with pytest.raises(NoMatchError):
_ = Control_Edit_Desc(my_input)
def test_invalid_format_errors(f2003_create):
'''Check that invalid format for the match method raises a
NoMatchError exception.
'''
for my_input in [None, "", " ", "//", "a /", "/ a", "::", "a :",
": a", "pp", "a p", "p a"]:
with pytest.raises(NoMatchError):
_ = Control_Edit_Desc(my_input)
def test_internal_error1(f2003_create, monkeypatch):
'''Check that an internal error is raised if the length of the Items
list is not 2 as the str() method assumes that it is.
'''
my_input = "3P"
ast = Control_Edit_Desc(my_input)
monkeypatch.setattr(ast, "items", [None, None, None])
with pytest.raises(InternalError) as excinfo:
str(ast)
assert "has '3' items, but expecting 2." in str(excinfo.value)
def test_internal_error2(f2003_create, monkeypatch):
'''Check that an internal error is raised if the descriptor name
(first entry of items) is empty or None as the str() method assumes
that it is a string with content.
'''
my_input = "3P"
ast = Control_Edit_Desc(my_input)
for content in [None, ""]:
monkeypatch.setattr(ast, "items", [ast.items[0], content])
with pytest.raises(InternalError) as excinfo:
str(ast)
assert "should be an edit descriptor name but is empty or None" \
in str(excinfo.value)
| [
"pytest.raises",
"fparser.two.Fortran2003.Control_Edit_Desc"
] | [((4275, 4302), 'fparser.two.Fortran2003.Control_Edit_Desc', 'Control_Edit_Desc', (['my_input'], {}), '(my_input)\n', (4292, 4302), False, 'from fparser.two.Fortran2003 import Control_Edit_Desc\n'), ((4768, 4795), 'fparser.two.Fortran2003.Control_Edit_Desc', 'Control_Edit_Desc', (['my_input'], {}), '(my_input)\n', (4785, 4795), False, 'from fparser.two.Fortran2003 import Control_Edit_Desc\n'), ((2282, 2309), 'fparser.two.Fortran2003.Control_Edit_Desc', 'Control_Edit_Desc', (['my_input'], {}), '(my_input)\n', (2299, 2309), False, 'from fparser.two.Fortran2003 import Control_Edit_Desc\n'), ((2757, 2784), 'fparser.two.Fortran2003.Control_Edit_Desc', 'Control_Edit_Desc', (['my_input'], {}), '(my_input)\n', (2774, 2784), False, 'from fparser.two.Fortran2003 import Control_Edit_Desc\n'), ((3203, 3230), 'fparser.two.Fortran2003.Control_Edit_Desc', 'Control_Edit_Desc', (['my_input'], {}), '(my_input)\n', (3220, 3230), False, 'from fparser.two.Fortran2003 import Control_Edit_Desc\n'), ((4370, 4398), 'pytest.raises', 'pytest.raises', (['InternalError'], {}), '(InternalError)\n', (4383, 4398), False, 'import pytest\n'), ((3624, 3651), 'pytest.raises', 'pytest.raises', (['NoMatchError'], {}), '(NoMatchError)\n', (3637, 3651), False, 'import pytest\n'), ((3669, 3696), 'fparser.two.Fortran2003.Control_Edit_Desc', 'Control_Edit_Desc', (['my_input'], {}), '(my_input)\n', (3686, 3696), False, 'from fparser.two.Fortran2003 import Control_Edit_Desc\n'), ((3977, 4004), 'pytest.raises', 'pytest.raises', (['NoMatchError'], {}), '(NoMatchError)\n', (3990, 4004), False, 'import pytest\n'), ((4022, 4049), 'fparser.two.Fortran2003.Control_Edit_Desc', 'Control_Edit_Desc', (['my_input'], {}), '(my_input)\n', (4039, 4049), False, 'from fparser.two.Fortran2003 import Control_Edit_Desc\n'), ((4907, 4935), 'pytest.raises', 'pytest.raises', (['InternalError'], {}), '(InternalError)\n', (4920, 4935), False, 'import pytest\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import division
from __future__ import print_function
from NumPyNet.activations import Activations
from NumPyNet.utils import _check_activation
from NumPyNet.utils import check_is_fitted
import numpy as np
from NumPyNet.layers.base import BaseLayer
__author__ = ['<NAME>', '<NAME>']
__email__ = ['<EMAIL>', '<EMAIL>']
class Activation_layer(BaseLayer):
'''
Activation layer
Parameters
----------
input_shape : tuple (default=None)
Input dimensions as tuple of 4 integers
activation : str or Activation object
Activation function to apply into the layer.
Example
-------
>>> import os
>>> import pylab as plt
>>> from PIL import Image
>>> from NumPyNet import activations
>>>
>>> activation_func = activations.Relu()
>>>
>>> img_2_float = lambda im : ((im - im.min()) * (1./(im.max() - im.min()) * 1.)).astype(float)
>>> float_2_img = lambda im : ((im - im.min()) * (1./(im.max() - im.min()) * 255.)).astype(np.uint8)
>>>
>>> filename = os.path.join(os.path.dirname(__file__), '..', '..', 'data', 'dog.jpg')
>>> inpt = np.asarray(Image.open(filename), dtype=float)
>>> inpt.setflags(write=1)
>>> inpt = img_2_float(inpt)
>>> # Relu activation constrain
>>> inpt = inpt * 2 - 1
>>>
>>> # add batch = 1
>>> inpt = np.expand_dims(inpt, axis=0)
>>>
>>> layer = Activation_layer(input_shape=inpt.shape, activation=activation_func)
>>>
>>> # FORWARD
>>>
>>> layer.forward(inpt)
>>> forward_out = layer.output
>>> print(layer)
>>>
>>> # BACKWARD
>>>
>>> layer.delta = np.ones(shape=inpt.shape, dtype=float)
>>> delta = np.zeros(shape=inpt.shape, dtype=float)
>>> layer.backward(delta, copy=True)
>>>
>>> # Visualizations
>>>
>>> fig, (ax1, ax2, ax3) = plt.subplots(nrows=1, ncols=3, figsize=(10, 5))
>>> fig.subplots_adjust(left=0.1, right=0.95, top=0.95, bottom=0.15)
>>>
>>> fig.suptitle('Activation Layer : {}'.format(activation_func.name))
>>>
>>> ax1.imshow(float_2_img(inpt[0]))
>>> ax1.set_title('Original image')
>>> ax1.axis('off')
>>>
>>> ax2.imshow(float_2_img(forward_out[0]))
>>> ax2.set_title("Forward")
>>> ax2.axis("off")
>>>
>>> ax3.imshow(float_2_img(delta[0]))
>>> ax3.set_title('Backward')
>>> ax3.axis('off')
>>>
>>> fig.tight_layout()
>>> plt.show()
.. image:: ../../../NumPyNet/images/activation_relu.png
References
----------
- TODO
'''
def __init__(self, input_shape=None, activation=Activations, **kwargs):
activation = _check_activation(self, activation)
self.activation = activation.activate
self.gradient = activation.gradient
super(Activation_layer, self).__init__(input_shape=input_shape)
def __str__(self):
'''
Printer
'''
batch, out_width, out_height, out_channels = self.out_shape
return 'activ {0:>4d} x{1:>4d} x{2:>4d} x{3:>4d} -> {0:>4d} x{1:>4d} x{2:>4d} x{3:>4d}'.format(
batch, out_width, out_height, out_channels)
def forward(self, inpt, copy=True):
'''
Forward of the activation layer, apply the selected activation function to
the input.
Parameters
----------
inpt: array-like
Input array to activate.
copy: bool (default=True)
If True make a copy of the input before applying the activation.
Returns
-------
self
'''
self._check_dims(shape=self.out_shape, arr=inpt, func='Forward')
self.output = self.activation(inpt, copy=copy)
self.delta = np.zeros(shape=self.out_shape, dtype=float)
return self
def backward(self, delta, copy=False):
'''
Compute the backward of the activation layer
Parameters
----------
delta : array-like
Global error to be backpropagated.
Returns
-------
self
'''
check_is_fitted(self, 'delta')
self._check_dims(shape=self.out_shape, arr=delta, func='Backward')
self.delta *= self.gradient(self.output, copy=copy)
delta[:] = self.delta
return self
if __name__ == '__main__':
import os
import pylab as plt
from PIL import Image
from NumPyNet import activations
activation_func = activations.Hardtan()
img_2_float = lambda im : ((im - im.min()) * (1./(im.max() - im.min()) * 1.)).astype(float)
float_2_img = lambda im : ((im - im.min()) * (1./(im.max() - im.min()) * 255.)).astype(np.uint8)
filename = os.path.join(os.path.dirname(__file__), '..', '..', 'data', 'dog.jpg')
inpt = np.asarray(Image.open(filename), dtype=float)
inpt.setflags(write=1)
inpt = img_2_float(inpt)
# Relu activation constrain
inpt = inpt * 2 - 1
# add batch = 1
inpt = np.expand_dims(inpt, axis=0)
layer = Activation_layer(input_shape=inpt.shape, activation=activation_func)
# FORWARD
layer.forward(inpt)
forward_out = layer.output
print(layer)
# BACKWARD
layer.delta = np.ones(shape=inpt.shape, dtype=float)
delta = np.zeros(shape=inpt.shape, dtype=float)
layer.backward(delta, copy=True)
# Visualizations
fig, (ax1, ax2, ax3) = plt.subplots(nrows=1, ncols=3, figsize=(10, 5))
fig.subplots_adjust(left=0.1, right=0.95, top=0.95, bottom=0.15)
fig.suptitle('Activation Layer\nfunction : {}'.format(activation_func.name))
ax1.imshow(float_2_img(inpt[0]))
ax1.set_title('Original image')
ax1.axis('off')
ax2.imshow(float_2_img(forward_out[0]))
ax2.set_title("Forward")
ax2.axis("off")
ax3.imshow(float_2_img(delta[0]))
ax3.set_title('Backward')
ax3.axis('off')
fig.tight_layout()
plt.show()
| [
"PIL.Image.open",
"numpy.ones",
"NumPyNet.utils._check_activation",
"NumPyNet.utils.check_is_fitted",
"os.path.dirname",
"numpy.zeros",
"NumPyNet.activations.Hardtan",
"numpy.expand_dims",
"pylab.subplots",
"pylab.show"
] | [((4263, 4284), 'NumPyNet.activations.Hardtan', 'activations.Hardtan', ([], {}), '()\n', (4282, 4284), False, 'from NumPyNet import activations\n'), ((4751, 4779), 'numpy.expand_dims', 'np.expand_dims', (['inpt'], {'axis': '(0)'}), '(inpt, axis=0)\n', (4765, 4779), True, 'import numpy as np\n'), ((4971, 5009), 'numpy.ones', 'np.ones', ([], {'shape': 'inpt.shape', 'dtype': 'float'}), '(shape=inpt.shape, dtype=float)\n', (4978, 5009), True, 'import numpy as np\n'), ((5020, 5059), 'numpy.zeros', 'np.zeros', ([], {'shape': 'inpt.shape', 'dtype': 'float'}), '(shape=inpt.shape, dtype=float)\n', (5028, 5059), True, 'import numpy as np\n'), ((5141, 5188), 'pylab.subplots', 'plt.subplots', ([], {'nrows': '(1)', 'ncols': '(3)', 'figsize': '(10, 5)'}), '(nrows=1, ncols=3, figsize=(10, 5))\n', (5153, 5188), True, 'import pylab as plt\n'), ((5619, 5629), 'pylab.show', 'plt.show', ([], {}), '()\n', (5627, 5629), True, 'import pylab as plt\n'), ((2613, 2648), 'NumPyNet.utils._check_activation', '_check_activation', (['self', 'activation'], {}), '(self, activation)\n', (2630, 2648), False, 'from NumPyNet.utils import _check_activation\n'), ((3607, 3650), 'numpy.zeros', 'np.zeros', ([], {'shape': 'self.out_shape', 'dtype': 'float'}), '(shape=self.out_shape, dtype=float)\n', (3615, 3650), True, 'import numpy as np\n'), ((3915, 3945), 'NumPyNet.utils.check_is_fitted', 'check_is_fitted', (['self', '"""delta"""'], {}), "(self, 'delta')\n", (3930, 3945), False, 'from NumPyNet.utils import check_is_fitted\n'), ((4506, 4531), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (4521, 4531), False, 'import os\n'), ((4584, 4604), 'PIL.Image.open', 'Image.open', (['filename'], {}), '(filename)\n', (4594, 4604), False, 'from PIL import Image\n')] |
from oled import TrackerOled
from color_tracker import ColorTracker
import cv2
from threading import Thread
tracker_oled = TrackerOled()
color_tracker = ColorTracker()
def write_fps():
tracker_oled.writeTextCenter("FPS: {:.2f}".format(color_tracker.fps.fps()))
tracker_oled.writeTextCenter("READY")
while True:
color_tracker.processFrame()
# t = Thread(target=write_fps, args=(), daemon=True)
# t.start()
(frame_height, frame_width, frame_channels) = color_tracker.frame.shape
if color_tracker.center is not None:
(point_x, point_y) = color_tracker.center
draw_x = int(round(point_x * tracker_oled.oled.width / frame_width))
draw_y = int(round(point_y * tracker_oled.oled.height / frame_height))
tracker_oled.drawPoint(draw_x, draw_y)
# else:
# tracker_oled.writeTextCenter("NOT FOUND")
key = cv2.waitKey(1) & 0xFF
# if the 'q' key is pressed, stop the loop
if key == ord("q"):
color_tracker.release()
out.release()
tracker_oled.clearDisplay()
break
| [
"color_tracker.ColorTracker",
"oled.TrackerOled",
"cv2.waitKey"
] | [((124, 137), 'oled.TrackerOled', 'TrackerOled', ([], {}), '()\n', (135, 137), False, 'from oled import TrackerOled\n'), ((154, 168), 'color_tracker.ColorTracker', 'ColorTracker', ([], {}), '()\n', (166, 168), False, 'from color_tracker import ColorTracker\n'), ((871, 885), 'cv2.waitKey', 'cv2.waitKey', (['(1)'], {}), '(1)\n', (882, 885), False, 'import cv2\n')] |
"""
This script shows the usage of scikit-learns linear regression functionality.
"""
# %% [markdown]
# # Linear Regression using Scikit-Learn #
# %% [markdown]
# ## Ice Cream Dataset ##
# | Temperature C° | Ice Cream Sales |
# |:--------------:|:---------------:|
# | 15 | 34 |
# | 24 | 587 |
# | 34 | 1200 |
# | 31 | 1080 |
# | 29 | 989 |
# | 26 | 734 |
# | 17 | 80 |
# | 11 | 1 |
# | 23 | 523 |
# | 25 | 651 |
# %% [markdown]
# ### Dependencies ###
# Install Numpy for number crunching and Matplotlib for plotting graphs:
# ```bash
# pip install sklearn
# ```
# %% [markdown]
# ### Imports ###
import numpy as np
import matplotlib.pyplot as plt
from sklearn import linear_model
from sklearn.model_selection import train_test_split
from sklearn.metrics import mean_squared_error
# %% [markdown]
# ### Ice Cream Dataset as Numpy Array ###
data = np.array([[15, 34],
[24, 587],
[34, 1200],
[31, 1080],
[29, 989],
[26, 734],
[17, 80],
[11, 1],
[23, 523],
[25, 651],
[0, 0],
[2, 0],
[12, 5]])
# %% [markdown]
# ### Plotting the Dataset ###
x_values, y_values = data.T
plt.style.use('ggplot')
plt.scatter(x_values, y_values)
plt.show()
# %% [markdown]
# ### Prepare Train and Test Data ###
x_train, x_test, y_train, y_test = train_test_split(
x_values, y_values, test_size=1/3)
x_train = x_train.reshape(-1, 1)
x_test = x_test.reshape(-1, 1)
y_train = y_train.reshape(-1, 1)
y_test = y_test.reshape(-1, 1)
# %% [markdown]
# ### Train model ###
regression = linear_model.LinearRegression()
regression.fit(x_train, y_train)
# %% [markdown]
# ### Predict ###
y_prediction = regression.predict(x_test)
# %% [markdown]
# ### Plot Predicted Results ###
plt.scatter(x_test, y_test)
plt.plot(x_test, y_prediction, color='blue')
plt.show()
# %% [markdown]
# ### Print Metrics ###
print('Coefficient: \n', regression.coef_)
print('Intercept: \n', regression.intercept_)
print('Mean Squared Error: %.2f' % mean_squared_error(y_test, y_prediction))
| [
"sklearn.model_selection.train_test_split",
"matplotlib.pyplot.plot",
"matplotlib.pyplot.style.use",
"sklearn.metrics.mean_squared_error",
"numpy.array",
"matplotlib.pyplot.scatter",
"sklearn.linear_model.LinearRegression",
"matplotlib.pyplot.show"
] | [((1100, 1247), 'numpy.array', 'np.array', (['[[15, 34], [24, 587], [34, 1200], [31, 1080], [29, 989], [26, 734], [17, 80\n ], [11, 1], [23, 523], [25, 651], [0, 0], [2, 0], [12, 5]]'], {}), '([[15, 34], [24, 587], [34, 1200], [31, 1080], [29, 989], [26, 734],\n [17, 80], [11, 1], [23, 523], [25, 651], [0, 0], [2, 0], [12, 5]])\n', (1108, 1247), True, 'import numpy as np\n'), ((1526, 1549), 'matplotlib.pyplot.style.use', 'plt.style.use', (['"""ggplot"""'], {}), "('ggplot')\n", (1539, 1549), True, 'import matplotlib.pyplot as plt\n'), ((1550, 1581), 'matplotlib.pyplot.scatter', 'plt.scatter', (['x_values', 'y_values'], {}), '(x_values, y_values)\n', (1561, 1581), True, 'import matplotlib.pyplot as plt\n'), ((1582, 1592), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1590, 1592), True, 'import matplotlib.pyplot as plt\n'), ((1685, 1738), 'sklearn.model_selection.train_test_split', 'train_test_split', (['x_values', 'y_values'], {'test_size': '(1 / 3)'}), '(x_values, y_values, test_size=1 / 3)\n', (1701, 1738), False, 'from sklearn.model_selection import train_test_split\n'), ((1925, 1956), 'sklearn.linear_model.LinearRegression', 'linear_model.LinearRegression', ([], {}), '()\n', (1954, 1956), False, 'from sklearn import linear_model\n'), ((2121, 2148), 'matplotlib.pyplot.scatter', 'plt.scatter', (['x_test', 'y_test'], {}), '(x_test, y_test)\n', (2132, 2148), True, 'import matplotlib.pyplot as plt\n'), ((2149, 2193), 'matplotlib.pyplot.plot', 'plt.plot', (['x_test', 'y_prediction'], {'color': '"""blue"""'}), "(x_test, y_prediction, color='blue')\n", (2157, 2193), True, 'import matplotlib.pyplot as plt\n'), ((2194, 2204), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2202, 2204), True, 'import matplotlib.pyplot as plt\n'), ((2372, 2412), 'sklearn.metrics.mean_squared_error', 'mean_squared_error', (['y_test', 'y_prediction'], {}), '(y_test, y_prediction)\n', (2390, 2412), False, 'from sklearn.metrics import mean_squared_error\n')] |
# create this file
# rerouting all requests that have ‘api’ in the url to the <code>apps.core.urls
from django.conf.urls import url
from django.urls import path
from rest_framework import routers
from base.src import views
from base.src.views import InitViewSet
#from base.src.views import UploadFileForm
#upload stuff
from django.conf import settings
from django.conf.urls.static import static
router = routers.DefaultRouter()
router.register(r'users', views.UserViewSet)
router.register(r'groups', views.GroupViewSet)
#router.register(r'titles', TitlesViewSet, base_name='titles')
urlpatterns = [
path(r'pawsc', InitViewSet.as_view()),
path(r'pawsc/upload', views.simple_upload, name='simple_upload'),
path(r'pawsc/home', views.home, name='home')
]
urlpatterns += router.urls
| [
"django.urls.path",
"rest_framework.routers.DefaultRouter",
"base.src.views.InitViewSet.as_view"
] | [((408, 431), 'rest_framework.routers.DefaultRouter', 'routers.DefaultRouter', ([], {}), '()\n', (429, 431), False, 'from rest_framework import routers\n'), ((653, 716), 'django.urls.path', 'path', (['"""pawsc/upload"""', 'views.simple_upload'], {'name': '"""simple_upload"""'}), "('pawsc/upload', views.simple_upload, name='simple_upload')\n", (657, 716), False, 'from django.urls import path\n'), ((724, 767), 'django.urls.path', 'path', (['"""pawsc/home"""', 'views.home'], {'name': '"""home"""'}), "('pawsc/home', views.home, name='home')\n", (728, 767), False, 'from django.urls import path\n'), ((625, 646), 'base.src.views.InitViewSet.as_view', 'InitViewSet.as_view', ([], {}), '()\n', (644, 646), False, 'from base.src.views import InitViewSet\n')] |
#!/usr/local/bin/python
# -*- coding: utf-8 -*-
'''test_Rainbow_pen
'''
import sys, os
import numpy as np
from matplotlib import pyplot as plt
from PIL import Image
FN_OUT = 'rainbow_pen_320x240.png'
def mk_col(w, h, x, y):
a = 255
i = int(7 * y / h)
if i == 0: c, u, v = (192, 0, 0), (32, 0, 0), (0, 32, 0) # R
elif i == 1: c, u, v = (192, 96, 0), (0, -32, 0), (0, 32, 0) # O-
elif i == 2: c, u, v = (192, 192, 0), (0, -32, 0), (-32, 0, 0) # Y
elif i == 3: c, u, v = (0, 192, 0), (64, 0, 0), (0, 0, 64) # G
elif i == 4: c, u, v = (0, 192, 192), (0, 0, -64), (0, -64, 0) # C
elif i == 5: c, u, v = (0, 0, 192), (0, 64, 0), (32, 0, 0) # B
elif i == 6: c, u, v = (96, 0, 192), (-32, 0, 0), (32, 0, 0) # M-
return (i, a, c, u, v)
def mk_dum(w, h, x, y):
# return (64, 64, 64, 192)
i, a, (r, g, b), u, v = mk_col(w, h, x, y)
return (r, g, b, a)
def mk_rainbow(w, h, x, y):
# return (x % 256, y % 256, 128, 255)
i, a, (r, g, b), u, v = mk_col(w, h, x, y)
d = h / 7.0
z = int(y - i * d)
e = d / 3.0
f = 1 if z < e else (-1 if z > 2*e else 0)
rgb = np.array((r, g, b))
if f > 0: rgb += np.array(u)
if f < 0: rgb += np.array(v)
r, g, b = rgb
if x < w / 4:
j, k = 2.0 * d * x / w, d / 2.0
t = z + j < k or z - j > k
if x < w / 36 or t: return (255, 255, 255, 0) # transparent
if x < w / 12: return (r, g, b, a)
else: return (224, 128, 0, 255) # light brown
return (r, g, b, a)
def rainbow_pen(w, h):
fig = plt.figure(figsize=(6, 4), dpi=96)
dm = np.ndarray((h, w, 4), dtype=np.uint8)
for y in range(h):
for x in range(w):
dm[y][x] = mk_dum(w, h, x, y)
dum = Image.fromarray(dm[::-1,:,:], 'RGBA')
im = np.ndarray((h, w, 4), dtype=np.uint8)
for y in range(h):
for x in range(w):
im[y][x] = mk_rainbow(w, h, x, y)
img = Image.fromarray(im[::-1,:,:], 'RGBA')
Image.fromarray(im, 'RGBA').save(FN_OUT, 'PNG')
ax = fig.add_subplot(231)
ax.imshow(img)
ax = fig.add_subplot(232)
ax.imshow(img.convert('L'), cmap='gray', vmin=0, vmax=255)
ax = fig.add_subplot(233)
ax.imshow(img.convert('L')) # auto heatmap
ax = fig.add_subplot(234)
ax.imshow(img.convert('YCbCr')) # ok ?
ax = fig.add_subplot(235)
ax.imshow(dum) # img.convert('LAB')) # not supported on PIL <= py 2.5 ?
ax = fig.add_subplot(236)
ax.imshow(dum) # img.convert('HSV')) # not supported on PIL <= py 2.5 ?
plt.show()
if __name__ == '__main__':
rainbow_pen(320, 240)
| [
"PIL.Image.fromarray",
"numpy.array",
"matplotlib.pyplot.figure",
"numpy.ndarray",
"matplotlib.pyplot.show"
] | [((1087, 1106), 'numpy.array', 'np.array', (['(r, g, b)'], {}), '((r, g, b))\n', (1095, 1106), True, 'import numpy as np\n'), ((1475, 1509), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(6, 4)', 'dpi': '(96)'}), '(figsize=(6, 4), dpi=96)\n', (1485, 1509), True, 'from matplotlib import pyplot as plt\n'), ((1518, 1555), 'numpy.ndarray', 'np.ndarray', (['(h, w, 4)'], {'dtype': 'np.uint8'}), '((h, w, 4), dtype=np.uint8)\n', (1528, 1555), True, 'import numpy as np\n'), ((1644, 1683), 'PIL.Image.fromarray', 'Image.fromarray', (['dm[::-1, :, :]', '"""RGBA"""'], {}), "(dm[::-1, :, :], 'RGBA')\n", (1659, 1683), False, 'from PIL import Image\n'), ((1690, 1727), 'numpy.ndarray', 'np.ndarray', (['(h, w, 4)'], {'dtype': 'np.uint8'}), '((h, w, 4), dtype=np.uint8)\n', (1700, 1727), True, 'import numpy as np\n'), ((1820, 1859), 'PIL.Image.fromarray', 'Image.fromarray', (['im[::-1, :, :]', '"""RGBA"""'], {}), "(im[::-1, :, :], 'RGBA')\n", (1835, 1859), False, 'from PIL import Image\n'), ((2393, 2403), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2401, 2403), True, 'from matplotlib import pyplot as plt\n'), ((1126, 1137), 'numpy.array', 'np.array', (['u'], {}), '(u)\n', (1134, 1137), True, 'import numpy as np\n'), ((1157, 1168), 'numpy.array', 'np.array', (['v'], {}), '(v)\n', (1165, 1168), True, 'import numpy as np\n'), ((1860, 1887), 'PIL.Image.fromarray', 'Image.fromarray', (['im', '"""RGBA"""'], {}), "(im, 'RGBA')\n", (1875, 1887), False, 'from PIL import Image\n')] |
# k3d.py
#
# Copyright 2020 <NAME>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import logging
import os
import ssl
import subprocess
import time
import urllib.error
import urllib.request
import datetime
from dateutil.parser import parse
from typing import Dict, Iterator, List
from typing import Optional, Tuple, Callable
from gi.repository import GObject
from .config import APP_ENV_PREFIX
from .config import ApplicationSettings
from .config import (DEFAULT_EXTRA_PATH,
DEFAULT_API_SERVER_PORT_RANGE,
DEFAULT_K3D_WAIT_TIME)
from .docker import DockerController
from .helm import HelmChart, cleanup_for_owner
from .utils import (call_in_main_thread,
find_unused_port_in_range,
parse_or_get_address,
find_executable,
run_command_stdout)
from .utils_ui import show_notification
# the header/footer length in the "k3d list" output
K3D_LIST_HEADER_LEN = 3
K3D_LIST_FOOTER_LEN = 1
# directory in the K3s conatiner where we should put manifests for being automatically loaded
K3D_DOCKER_MANIFESTS_DIR = "/var/lib/rancher/k3s/server/manifests/"
###############################################################################
k3d_exe = find_executable("k3d", extra_paths=DEFAULT_EXTRA_PATH)
logging.debug(f"k3d found at {k3d_exe}")
def run_k3d_command(*args, **kwargs) -> Iterator[str]:
"""
Run a k3d command
"""
logging.debug(f"[K3D] Running k3d command: {args}")
yield from run_command_stdout(k3d_exe, *args, **kwargs)
###############################################################################
# errors
###############################################################################
class K3dError(Exception):
"""Base class for other k3d exceptions"""
pass
class EmptyClusterNameError(K3dError):
"""No cluster name"""
pass
class InvalidNumWorkersError(K3dError):
"""Invalid num workers"""
pass
class ClusterCreationError(K3dError):
"""Cluster creation error"""
pass
class ClusterDestructionError(K3dError):
"""Cluster destruction error"""
pass
class ClusterNotFoundError(K3dError):
"""Cluster not found error"""
pass
class NoKubeconfigObtainedError(K3dError):
"""No kubeconfig obtained error"""
pass
class NoServerError(K3dError):
"""No Docker server error"""
pass
###############################################################################
# k3d clusters
###############################################################################
class K3dCluster(GObject.GObject):
name: str = ""
status: str = "running"
num_workers: int = 0
use_registry: bool = False
registry_name: str = None
registry_port: str = None
registry_volume: str = None
cache_hub: bool = False
api_server: str = None
image: str = None
volumes: Dict[str, str] = {}
charts: List[HelmChart] = []
server_args: str = None
__gsignals__ = {
# a signal emmited when the cluster has been created
"created": (GObject.SIGNAL_RUN_LAST, GObject.TYPE_NONE, (str,)),
# a signal emmited when the cluster has been destroyed
"destroyed": (GObject.SIGNAL_RUN_LAST, GObject.TYPE_NONE, (str,))
}
def __init__(self, settings: ApplicationSettings, docker: DockerController, **kwargs):
super().__init__()
self._docker = docker
self._settings = settings
self._kubeconfig = None
self._docker_created: Optional[datetime.datetime] = None
self._docker_server_ip = None
self._destroyed = False
self._status = kwargs.pop("status", "running")
self.__dict__.update(kwargs)
# TODO: check the name is valid
if len(self.name) == 0:
raise InvalidNumWorkersError
if self.num_workers < 0:
raise InvalidNumWorkersError
def __str__(self) -> str:
return f"{self.name}"
def __eq__(self, other) -> bool:
if other is None:
return False
if isinstance(other, K3dCluster):
return self.name == other.name
if isinstance(other, str):
return self.name == other
logging.warning(f"Comparing cluster {self.name} to incompatible type {other}")
return NotImplemented
def __ne__(self, other) -> bool:
if other is None:
return True
if isinstance(other, K3dCluster):
return self.name != other.name
if isinstance(other, str):
return self.name != other
logging.warning(f"Comparing cluster {self.name} to incompatible type {other}")
return NotImplemented
def quit(self):
pass
def create(self, wait=True) -> None:
"""
Create the cluster by invoking `k3d create`
"""
args = []
kwargs = {}
if not self.name:
raise EmptyClusterNameError()
args += [f"--name={self.name}"]
if self.use_registry:
args += ["--enable-registry"]
if self.cache_hub:
args += ["--enable-registry-cache"]
if self.registry_volume:
args += [f"--registry-volume={self.registry_volume}"]
if self.registry_name:
args += [f"--registry-name={self.registry_name}"]
if self.registry_port:
args += [f"--registry-port={self.registry_port}"]
if wait:
args += [f"--wait={DEFAULT_K3D_WAIT_TIME}"]
if self.num_workers > 0:
args += [f"--workers={self.num_workers}"]
if self.image:
args += [f"--image={self.image}"]
# create some k3s server arguments
# by default, we add a custom DNS domain with the same name as the cluster
args += [f"--server-arg=--cluster-domain={self.name}.local"]
if self.server_args:
args += [f"--server-arg={arg}" for arg in self.server_args if len(arg) > 0]
# append any extra volumes
for vol_k, vol_v in self.volumes.items():
args += [f"--volume={vol_k}:{vol_v}"]
# append any extra Charts as volumes too
for chart in self.charts:
src = chart.generate(self)
dst = f"{K3D_DOCKER_MANIFESTS_DIR}/{chart.name}.yaml"
args += [f"--volume={src}:{dst}"]
# use the given API port or find an unused one
self.api_server = parse_or_get_address(self.api_server, *DEFAULT_API_SERVER_PORT_RANGE)
logging.info(f"[K3D] Using API address {self.api_server}")
args += [f"--api-port={self.api_server}"]
# check if we must use an env variable for the DOCKER_HOST
docker_host = self._docker.docker_host
default_docker_host = self._docker.default_docker_host
if docker_host != self._docker.default_docker_host:
logging.debug(f"[K3D] Overriding DOCKER_HOST={docker_host} (!= {default_docker_host})")
new_env = os.environ.copy()
new_env["DOCKER_HOST"] = docker_host
kwargs["env"] = new_env
try:
logging.info(f"[K3D] Creating cluster (with {args})")
while True:
try:
line = next(run_k3d_command("create", *args, **kwargs))
logging.debug(f"[K3D] {line}")
# detect errors in the output
if "level=fatal" in line:
raise ClusterCreationError(line.strip())
except StopIteration:
break
except Exception as e:
logging.error(f"Could not create cluster: {e}. Cleaning up...")
self._cleanup()
self._destroyed = True
raise e
logging.info("[K3D] The cluster has been created")
self._status = "running"
call_in_main_thread(lambda: self.emit("created", self.name))
def destroy(self) -> None:
"""
Destroy this cluster with `k3d delete`
"""
logging.info("[K3D] Destroying cluster")
if not self.name:
raise EmptyClusterNameError()
if self._destroyed:
raise ClusterDestructionError("Trying to destroy an already destroyed cluster")
args = []
args += [f"--name={self.name}"]
args += ["--keep-registry-volume"]
while True:
try:
line = next(run_k3d_command("delete", *args))
logging.debug(f"[K3D] {line}")
except StopIteration:
break
self._cleanup()
self._destroyed = True
call_in_main_thread(lambda: self.emit("destroyed", self.name))
def _cleanup(self) -> None:
"""
Cleanup any remaining things after destroying a cluster
"""
logging.debug(f"[K3D] Cleaning up for {self.name}")
cleanup_for_owner(self.name)
@property
def kubeconfig(self) -> Optional[str]:
"""
Get the kubeconfig file for this cluster, or None if no
"""
if self._destroyed:
return None
# cache the kubeconfig: once obtained, it will not change
if not self._kubeconfig:
for _ in range(0, 20):
try:
line = next(run_k3d_command("get-kubeconfig", f"--name={self.name}"))
except StopIteration:
break
except subprocess.CalledProcessError:
logging.debug(f"[K3D] ... KUBECONFIG for {self.name} not ready yet...")
time.sleep(1)
else:
logging.debug(f"[K3D] ... obtained KUBECONFIG for {self.name} at {line}")
self._kubeconfig = line
break
return self._kubeconfig
@property
def running(self) -> bool:
return self._status == "running"
def start(self) -> None:
if not self.running:
args = []
args += [f"--name={self.name}"]
logging.debug(f"[K3D] Starting {self.name}...")
while True:
try:
line = next(run_k3d_command("start", *args))
logging.debug(f"[K3D] {line}")
except StopIteration:
break
def stop(self) -> None:
if self.running:
args = []
args += [f"--name={self.name}"]
logging.debug(f"[K3D] Stopping {self.name}...")
while True:
try:
line = next(run_k3d_command("stop", *args))
logging.debug(f"[K3D] {line}")
except StopIteration:
break
@property
def docker_created(self) -> Optional[datetime.datetime]:
if self._destroyed:
return None
if self._docker_created is None:
c = self._docker.get_container_by_name(self.docker_server_name)
if c:
t = self._docker.get_container_created(c)
if t:
try:
self._docker_created = parse(t)
except Exception as e:
logging.error(f"[K3D] could not parse time string {t}: {e}")
return self._docker_created
@property
def docker_server_name(self) -> Optional[str]:
if self._destroyed:
return None
return f"k3d-{self.name}-server"
@property
def docker_network_name(self) -> Optional[str]:
if self._destroyed:
return None
return f"k3d-{self.name}"
@property
def docker_server_ip(self) -> Optional[str]:
if self._destroyed:
return None
if not self._docker_server_ip:
c = self._docker.get_container_by_name(self.docker_server_name)
if c:
ip = self._docker.get_container_ip(c, self.docker_network_name)
if ip is None:
raise NoServerError(
f"could not obtain server IP for {self.docker_server_name} in network {self.docker_network_name}")
self._docker_server_ip = ip
return self._docker_server_ip
@property
def dashboard_url(self) -> Optional[str]:
if self._destroyed:
return None
ip = self.docker_server_ip
if ip:
return f"https://{self.docker_server_ip}/"
def check_dashboard(self, *args) -> bool:
"""
Check that the Dashboard is ready
"""
try:
context = ssl._create_unverified_context()
return urllib.request.urlopen(self.dashboard_url, context=context).getcode()
except urllib.error.URLError as e:
logging.info(f"Error when checking {self.dashboard_url}: {e}")
return False
def open_dashboard(self, *args) -> None:
import webbrowser
u = self.dashboard_url
if u is not None:
logging.debug(f"[K3D] Opening '{u}' in default web browser")
webbrowser.open(u)
else:
logging.warning(f"[K3D] No URL to open")
@property
def script_environment(self) -> Dict[str, str]:
"""
Return a dictionary with env variables for running scripts for this cluster
"""
# Note: make sure we do not return any non-string value or subprocess.run will throw an exception.
env = {
f"{APP_ENV_PREFIX}_CLUSTER_NAME": str(self.name),
}
if not self._destroyed:
env.update({
f"{APP_ENV_PREFIX}_REGISTRY_ENABLED": "1" if self.use_registry else "",
f"{APP_ENV_PREFIX}_REGISTRY_NAME": str(self.registry_name) if self.registry_name else "",
f"{APP_ENV_PREFIX}_REGISTRY_PORT": str(self.registry_port) if self.registry_port else "",
f"{APP_ENV_PREFIX}_MASTER_IP": str(self.docker_server_ip) if self.docker_server_ip is not None else "",
f"{APP_ENV_PREFIX}_KUBECONFIG": self.kubeconfig if self.kubeconfig is not None else "",
})
return env
GObject.type_register(K3dCluster)
| [
"dateutil.parser.parse",
"logging.debug",
"logging.warning",
"ssl._create_unverified_context",
"os.environ.copy",
"webbrowser.open",
"time.sleep",
"logging.error",
"logging.info",
"gi.repository.GObject.type_register"
] | [((1906, 1946), 'logging.debug', 'logging.debug', (['f"""k3d found at {k3d_exe}"""'], {}), "(f'k3d found at {k3d_exe}')\n", (1919, 1946), False, 'import logging\n'), ((14769, 14802), 'gi.repository.GObject.type_register', 'GObject.type_register', (['K3dCluster'], {}), '(K3dCluster)\n', (14790, 14802), False, 'from gi.repository import GObject\n'), ((2046, 2097), 'logging.debug', 'logging.debug', (['f"""[K3D] Running k3d command: {args}"""'], {}), "(f'[K3D] Running k3d command: {args}')\n", (2059, 2097), False, 'import logging\n'), ((4817, 4895), 'logging.warning', 'logging.warning', (['f"""Comparing cluster {self.name} to incompatible type {other}"""'], {}), "(f'Comparing cluster {self.name} to incompatible type {other}')\n", (4832, 4895), False, 'import logging\n'), ((5181, 5259), 'logging.warning', 'logging.warning', (['f"""Comparing cluster {self.name} to incompatible type {other}"""'], {}), "(f'Comparing cluster {self.name} to incompatible type {other}')\n", (5196, 5259), False, 'import logging\n'), ((7131, 7189), 'logging.info', 'logging.info', (['f"""[K3D] Using API address {self.api_server}"""'], {}), "(f'[K3D] Using API address {self.api_server}')\n", (7143, 7189), False, 'import logging\n'), ((8380, 8430), 'logging.info', 'logging.info', (['"""[K3D] The cluster has been created"""'], {}), "('[K3D] The cluster has been created')\n", (8392, 8430), False, 'import logging\n'), ((8645, 8685), 'logging.info', 'logging.info', (['"""[K3D] Destroying cluster"""'], {}), "('[K3D] Destroying cluster')\n", (8657, 8685), False, 'import logging\n'), ((9437, 9488), 'logging.debug', 'logging.debug', (['f"""[K3D] Cleaning up for {self.name}"""'], {}), "(f'[K3D] Cleaning up for {self.name}')\n", (9450, 9488), False, 'import logging\n'), ((7490, 7582), 'logging.debug', 'logging.debug', (['f"""[K3D] Overriding DOCKER_HOST={docker_host} (!= {default_docker_host})"""'], {}), "(\n f'[K3D] Overriding DOCKER_HOST={docker_host} (!= {default_docker_host})')\n", (7503, 7582), False, 'import logging\n'), ((7600, 7617), 'os.environ.copy', 'os.environ.copy', ([], {}), '()\n', (7615, 7617), False, 'import os\n'), ((7729, 7782), 'logging.info', 'logging.info', (['f"""[K3D] Creating cluster (with {args})"""'], {}), "(f'[K3D] Creating cluster (with {args})')\n", (7741, 7782), False, 'import logging\n'), ((10658, 10705), 'logging.debug', 'logging.debug', (['f"""[K3D] Starting {self.name}..."""'], {}), "(f'[K3D] Starting {self.name}...')\n", (10671, 10705), False, 'import logging\n'), ((11064, 11111), 'logging.debug', 'logging.debug', (['f"""[K3D] Stopping {self.name}..."""'], {}), "(f'[K3D] Stopping {self.name}...')\n", (11077, 11111), False, 'import logging\n'), ((13215, 13247), 'ssl._create_unverified_context', 'ssl._create_unverified_context', ([], {}), '()\n', (13245, 13247), False, 'import ssl\n'), ((13621, 13681), 'logging.debug', 'logging.debug', (['f"""[K3D] Opening \'{u}\' in default web browser"""'], {}), '(f"[K3D] Opening \'{u}\' in default web browser")\n', (13634, 13681), False, 'import logging\n'), ((13694, 13712), 'webbrowser.open', 'webbrowser.open', (['u'], {}), '(u)\n', (13709, 13712), False, 'import webbrowser\n'), ((13739, 13779), 'logging.warning', 'logging.warning', (['f"""[K3D] No URL to open"""'], {}), "(f'[K3D] No URL to open')\n", (13754, 13779), False, 'import logging\n'), ((8224, 8287), 'logging.error', 'logging.error', (['f"""Could not create cluster: {e}. Cleaning up..."""'], {}), "(f'Could not create cluster: {e}. Cleaning up...')\n", (8237, 8287), False, 'import logging\n'), ((9094, 9124), 'logging.debug', 'logging.debug', (['f"""[K3D] {line}"""'], {}), "(f'[K3D] {line}')\n", (9107, 9124), False, 'import logging\n'), ((13392, 13454), 'logging.info', 'logging.info', (['f"""Error when checking {self.dashboard_url}: {e}"""'], {}), "(f'Error when checking {self.dashboard_url}: {e}')\n", (13404, 13454), False, 'import logging\n'), ((7924, 7954), 'logging.debug', 'logging.debug', (['f"""[K3D] {line}"""'], {}), "(f'[K3D] {line}')\n", (7937, 7954), False, 'import logging\n'), ((10256, 10329), 'logging.debug', 'logging.debug', (['f"""[K3D] ... obtained KUBECONFIG for {self.name} at {line}"""'], {}), "(f'[K3D] ... obtained KUBECONFIG for {self.name} at {line}')\n", (10269, 10329), False, 'import logging\n'), ((10836, 10866), 'logging.debug', 'logging.debug', (['f"""[K3D] {line}"""'], {}), "(f'[K3D] {line}')\n", (10849, 10866), False, 'import logging\n'), ((11241, 11271), 'logging.debug', 'logging.debug', (['f"""[K3D] {line}"""'], {}), "(f'[K3D] {line}')\n", (11254, 11271), False, 'import logging\n'), ((10108, 10179), 'logging.debug', 'logging.debug', (['f"""[K3D] ... KUBECONFIG for {self.name} not ready yet..."""'], {}), "(f'[K3D] ... KUBECONFIG for {self.name} not ready yet...')\n", (10121, 10179), False, 'import logging\n'), ((10200, 10213), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (10210, 10213), False, 'import time\n'), ((11752, 11760), 'dateutil.parser.parse', 'parse', (['t'], {}), '(t)\n', (11757, 11760), False, 'from dateutil.parser import parse\n'), ((11828, 11888), 'logging.error', 'logging.error', (['f"""[K3D] could not parse time string {t}: {e}"""'], {}), "(f'[K3D] could not parse time string {t}: {e}')\n", (11841, 11888), False, 'import logging\n')] |
import os
from utility import write_to_output, print_board, color_is_black, board_to_list, print_results
from board import Board
import time
from algorithm import minimax, minimax_alpha_beta, minimax_alpha_beta_final, minimax_alpha_beta_rand
from math import sqrt, floor
start = time.time()
# parse input file
with open("input.txt", "r") as input_file:
game_mode = input_file.readline().rstrip() # 1st line: game mode
color = input_file.readline().rstrip() # 2nd line: player color
time_left = float(input_file.readline().rstrip()) # 3rd line: remaining time
board_list = []
for i in range(8): # next 8 lines: 2-d list representing the board
board_list.append(list(input_file.readline().rstrip()))
# create initial board object
is_black = color_is_black(color)
start = time.time()
board = Board(board_list, is_black)
end = time.time()
print("time to make board object =", end - start)
# write mean runtimes to calibrate.txt
with open('test.txt', 'w') as output:
# print description of game
print("d_b = 4; d_w = 4; simple heuristic for both b/w", file = output)
#print("v3 changes: changed king weight from 30 to 20, added delta weight to small opp piece case", file = output)
# play 100 games and store in game_results_1.txt
black_wins = 0
white_wins = 0
timeouts = 0
for i in range(10):
start = time.time()
# parse input file
with open("input.txt", "r") as input_file:
game_mode = input_file.readline().rstrip() # 1st line: game mode
color = input_file.readline().rstrip() # 2nd line: player color
time_left = float(input_file.readline().rstrip()) # 3rd line: remaining time
board_list = []
for i in range(8): # next 8 lines: 2-d list representing the board
board_list.append(list(input_file.readline().rstrip()))
# create initial board object
is_black = color_is_black(color)
start = time.time()
board = Board(board_list, is_black)
end = time.time()
print("time to make board object =", end - start)
max_iterations = 100
iteration_count = 1
total_time_black = 0
total_time_white = 0
# loop until someone wins or maximum iterations exceeded
while True:
start = time.time()
minimax_alpha_beta_rand.count = 0
minimax_alpha_beta_final.count = 0
move_count = floor(iteration_count/2)
if board.active_player: # black's turn
# if iteration_count > 50:
# if move_count % 2 == 0:
# value, result, new_board = minimax_alpha_beta(board, board.active_player, 1, float("-inf"), float("inf"), True, (), board)
# # elif move_count % 9 == 0:
# # value, result, new_board = minimax_alpha_beta(board, 8, float("-inf"), float("inf"), True)
# else:
# value, result, new_board = minimax_alpha_beta(board, board.active_player, 6, float("-inf"), float("inf"), True, (), board)
if move_count%2 == 0:
value, result, new_board = minimax_alpha_beta_final(board, board, board.active_player, 4, 4, float("-inf"), float("inf"), True, (), board)
else:
value, result, new_board = minimax_alpha_beta_final(board, board, board.active_player, 4, 4, float("-inf"), float("inf"), True, (), board)
# if move_count < 5:
# value, result, new_board = minimax_alpha_beta(board, board.active_player, 4, 4, float("-inf"), float("inf"), True, (), board)
# elif board.num_pieces_black < 4:
# if move_count%2 == 0:
# value, result, new_board = minimax_alpha_beta(board, board.active_player, 4, 4, float("-inf"), float("inf"), True, (), board)
# else:
# value, result, new_board = minimax_alpha_beta(board, board.active_player, 4, 4, float("-inf"), float("inf"), True, (), board)
# else:
# if move_count%2 == 0:
# value, result, new_board = minimax_alpha_beta(board, board.active_player, 4, 4, float("-inf"), float("inf"), True, (), board)
# else:
# value, result, new_board = minimax_alpha_beta(board, board.active_player, 4, 4, float("-inf"), float("inf"), True, (), board)
else: # white's turn
# value, result, new_board = minimax_alpha_beta(board, board.active_player, 4, 4, float("-inf"), float("inf"), True, (), board)
if move_count%2 == 0:
value, result, new_board = minimax_alpha_beta_rand(board, board, board.active_player, 2, 2, float("-inf"), float("inf"), True, (), board)
else:
value, result, new_board = minimax_alpha_beta_rand(board, board, board.active_player, 2, 2, float("-inf"), float("inf"), True, (), board)
end = time.time()
runtime = end - start
# if we run into a blocked board with lots of pieces left (i.e. it wasn't caught in game_over method):
if result == None:
print("total time black =", total_time_black)
print("total time white =", total_time_white)
if board.num_pieces_black == 0:
white_wins += 1
elif board.num_pieces_white == 0:
black_wins += 1
else:
timeouts += 1
break
# set up new board
board = new_board
# create new board_list (for printing later)
board_list = board_to_list(board)
# print result to game_output.txt
print_results(board, result, board_list, iteration_count, runtime)
# accumulate total runtime
if board.active_player: # black's total time
total_time_black += runtime
else: # white's total time
total_time_white += runtime
# switch player
board.active_player = not board.active_player
# break loop if someone won or exceeded max iterations
if board.game_over() or iteration_count >= max_iterations:
print("total time black =", total_time_black)
print("total time white =", total_time_white)
if board.num_pieces_black == 0:
white_wins += 1
elif board.num_pieces_white == 0:
black_wins += 1
else:
timeouts += 1
break
iteration_count += 1
# print final results to file
print("black wins =", black_wins, file = output)
print("white wins =", white_wins, file = output)
print("timeouts =", timeouts, file = output)
# def print_results(board, result, board_list, iteration_count, runtime):
# if board.active_player == True:
# player = "black"
# else:
# player = "white"
# print("iteration:", iteration_count)
# print("runtime:", runtime)
# print("player:", player)
# print("move:", result)
# for row in board_list:
# print(row) | [
"utility.board_to_list",
"math.floor",
"utility.print_results",
"utility.color_is_black",
"board.Board",
"time.time"
] | [((281, 292), 'time.time', 'time.time', ([], {}), '()\n', (290, 292), False, 'import time\n'), ((770, 791), 'utility.color_is_black', 'color_is_black', (['color'], {}), '(color)\n', (784, 791), False, 'from utility import write_to_output, print_board, color_is_black, board_to_list, print_results\n'), ((800, 811), 'time.time', 'time.time', ([], {}), '()\n', (809, 811), False, 'import time\n'), ((820, 847), 'board.Board', 'Board', (['board_list', 'is_black'], {}), '(board_list, is_black)\n', (825, 847), False, 'from board import Board\n'), ((854, 865), 'time.time', 'time.time', ([], {}), '()\n', (863, 865), False, 'import time\n'), ((1375, 1386), 'time.time', 'time.time', ([], {}), '()\n', (1384, 1386), False, 'import time\n'), ((1944, 1965), 'utility.color_is_black', 'color_is_black', (['color'], {}), '(color)\n', (1958, 1965), False, 'from utility import write_to_output, print_board, color_is_black, board_to_list, print_results\n'), ((1982, 1993), 'time.time', 'time.time', ([], {}), '()\n', (1991, 1993), False, 'import time\n'), ((2010, 2037), 'board.Board', 'Board', (['board_list', 'is_black'], {}), '(board_list, is_black)\n', (2015, 2037), False, 'from board import Board\n'), ((2052, 2063), 'time.time', 'time.time', ([], {}), '()\n', (2061, 2063), False, 'import time\n'), ((2343, 2354), 'time.time', 'time.time', ([], {}), '()\n', (2352, 2354), False, 'import time\n'), ((2473, 2499), 'math.floor', 'floor', (['(iteration_count / 2)'], {}), '(iteration_count / 2)\n', (2478, 2499), False, 'from math import sqrt, floor\n'), ((5099, 5110), 'time.time', 'time.time', ([], {}), '()\n', (5108, 5110), False, 'import time\n'), ((5811, 5831), 'utility.board_to_list', 'board_to_list', (['board'], {}), '(board)\n', (5824, 5831), False, 'from utility import write_to_output, print_board, color_is_black, board_to_list, print_results\n'), ((5890, 5956), 'utility.print_results', 'print_results', (['board', 'result', 'board_list', 'iteration_count', 'runtime'], {}), '(board, result, board_list, iteration_count, runtime)\n', (5903, 5956), False, 'from utility import write_to_output, print_board, color_is_black, board_to_list, print_results\n')] |
from __future__ import print_function
from os import getenv
from datetime import datetime
def vprint(*a, **k):
if not getenv('VERBOSE'):
return
print(datetime.now(), ' ', end='')
print(*a, **k)
| [
"datetime.datetime.now",
"os.getenv"
] | [((124, 141), 'os.getenv', 'getenv', (['"""VERBOSE"""'], {}), "('VERBOSE')\n", (130, 141), False, 'from os import getenv\n'), ((168, 182), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (180, 182), False, 'from datetime import datetime\n')] |
from talon import Context, Module
ctx = Context()
mod = Module()
mod.tag("code_data_null", desc="Tag for enabling commands relating to null")
@mod.action_class
class Actions:
def code_insert_null():
"""Inserts null"""
def code_insert_is_null():
"""Inserts check for null"""
def code_insert_is_not_null():
"""Inserts check for non-null"""
| [
"talon.Module",
"talon.Context"
] | [((41, 50), 'talon.Context', 'Context', ([], {}), '()\n', (48, 50), False, 'from talon import Context, Module\n'), ((57, 65), 'talon.Module', 'Module', ([], {}), '()\n', (63, 65), False, 'from talon import Context, Module\n')] |
from numpy import array as np_array, zeros as np_zeros, sum as np_sum, empty as np_empty, \
amax as np_amax, interp as np_interp, ones as np_ones, tile as np_tile, isnan as np_isnan
import yaml
from seir_model import SEIR_matrix
from common import Window, get_datetime, timesteps_between_dates, get_datetime_array, timesteps_over_timedelta_weeks
from sys import exit
def epidemiology_model():
with open(r'common_params.yaml') as file:
common_params = yaml.full_load(file)
with open(r'regions.yaml') as file:
regions = yaml.full_load(file)
with open(r'seir_params.yaml') as file:
seir_params_multivar = yaml.full_load(file)
nvars=len(seir_params_multivar) # (var=1 is baseline model, var=2 is delta variant)
nregions = len(regions)
epi = []
intl_visitors = []
between_region_mobility_rate = []
between_locality_mobility_rate = []
beds_per_1000 = []
baseline_hosp = []
for rgn in regions:
beds_per_1000.append(rgn['initial']['beds per 1000'])
baseline_hosp.append(rgn['initial']['population'] * rgn['initial']['beds per 1000']/1000)
epivar=[]
for var in seir_params_multivar:
epivar.append(SEIR_matrix(rgn, var, common_params))
if 'international travel' in rgn:
intl_visitors.append(rgn['international travel']['daily arrivals'] * rgn['international travel']['duration of stay'])
else:
intl_visitors.append(0.0)
between_locality_mobility_rate.append(rgn['between locality mobility rate'])
between_region_mobility_rate.append(rgn['between region mobility rate'])
epi.append(epivar) # contains objects with following order: [[rgn1/var1, rgn2/var1], [rgn1/var2, rgn2/var2]]
proportion_total = [e.proportion_global_infected for e in epi[0]]
test1=np_sum(proportion_total,axis=0)
if any(test1<0.999) or any(test1>1.001):
print('Error test1: aborted')
print('proportions of global infections across variants do not sum to 1')
exit()
start_datetime = get_datetime(common_params['time']['COVID start'])
start_time = timesteps_between_dates(common_params['time']['start date'], common_params['time']['COVID start'])
end_time = timesteps_between_dates(common_params['time']['start date'], common_params['time']['end date'])
epi_datetime_array = get_datetime_array(common_params['time']['COVID start'], common_params['time']['end date'])
ntimesteps = end_time - start_time
# All the epidemiological regional models will give the same values for these parameters
epi_invisible_fraction = epi[0][0].invisible_fraction_1stinfection
total_population=0
for i in range(0,len(epi[:][0])):
total_population += epi[i][0].N
normal_bed_occupancy_fraction = common_params['bed occupancy']['normal']
max_reduction_in_normal_bed_occupancy = common_params['bed occupancy']['max reduction']
if 'vaccinate at risk first' in common_params['vaccination']:
vaccinate_at_risk = common_params['vaccination']['vaccinate at risk first']
else:
vaccinate_at_risk = False
avoid_elective_operations= common_params['avoid elective operations']
# Global infection rate per person
global_infection_points = common_params['global infection rate']
global_infection_npoints = len(global_infection_points)
global_infection_traj_start = global_infection_points[0][0]
if get_datetime(global_infection_traj_start) > start_datetime:
global_infection_traj_start = common_params['time']['COVID start']
global_infection_traj_timesteps_array = np_array(range(0,timesteps_between_dates(global_infection_traj_start, common_params['time']['end date']) + 1))
global_infection_ts = np_empty(global_infection_npoints)
global_infection_val = np_empty(global_infection_npoints)
for i in range(0,global_infection_npoints):
global_infection_ts[i] = timesteps_between_dates(global_infection_traj_start, global_infection_points[i][0])
global_infection_val[i] = global_infection_points[i][1]/1000 # Values are entered per 1000
global_infection_rate = np_interp(global_infection_traj_timesteps_array, global_infection_ts, global_infection_val)
# Trunctate at start as necessary
ntrunc = timesteps_between_dates(global_infection_traj_start, common_params['time']['COVID start'])
global_infection_rate = global_infection_rate[ntrunc:]
# Maximum vaccination rate
vaccination_points = common_params['vaccination']['maximum doses per day']
vaccination_delay = timesteps_over_timedelta_weeks(common_params['vaccination']['time to efficacy'])
vaccination_npoints = len(vaccination_points)
vaccination_timesteps_array = np_array(range(0,timesteps_between_dates(common_params['time']['COVID start'], common_params['time']['end date']) + 1))
vaccination_ts = np_empty(vaccination_npoints)
vaccination_val = np_empty(vaccination_npoints)
for i in range(0,vaccination_npoints):
vaccination_ts[i] = timesteps_between_dates(common_params['time']['COVID start'], vaccination_points[i][0]) + vaccination_delay
vaccination_val[i] = vaccination_points[i][1]
vaccination_max_doses = np_interp(vaccination_timesteps_array, vaccination_ts, vaccination_val)
isolate_symptomatic_cases_windows = []
if 'isolate symptomatic cases' in common_params:
for window in common_params['isolate symptomatic cases']:
if window['apply']:
isolate_symptomatic_cases_windows.append(Window((get_datetime(window['start date']) - start_datetime).days,
(get_datetime(window['end date']) - start_datetime).days,
window['ramp up for'],
window['ramp down for'],
(1 - epi_invisible_fraction) * window['fraction of cases isolated']))
isolate_at_risk_windows = []
if 'isolate at risk' in common_params:
for window in common_params['isolate at risk']:
if window['apply']:
isolate_at_risk_windows.append(Window((get_datetime(window['start date']) - start_datetime).days,
(get_datetime(window['end date']) - start_datetime).days,
window['ramp up for'],
window['ramp down for'],
window['fraction of population isolated']))
test_and_trace_windows = []
if 'test and trace' in common_params:
for window in common_params['test and trace']:
if window['apply']:
test_and_trace_windows.append(Window((get_datetime(window['start date']) - start_datetime).days,
(get_datetime(window['end date']) - start_datetime).days,
window['ramp up for'],
window['ramp down for'],
window['fraction of infectious cases isolated']))
soc_dist_windows = []
if 'social distance' in common_params:
for window in common_params['social distance']:
if window['apply']:
soc_dist_windows.append(Window((get_datetime(window['start date']) - start_datetime).days,
(get_datetime(window['end date']) - start_datetime).days,
window['ramp up for'],
window['ramp down for'],
window['effectiveness']))
travel_restrictions_windows = []
if 'international travel restrictions' in common_params:
for window in common_params['international travel restrictions']:
if window['apply']:
travel_restrictions_windows.append(Window((get_datetime(window['start date']) - start_datetime).days,
(get_datetime(window['end date']) - start_datetime).days,
window['ramp up for'],
window['ramp down for'],
window['effectiveness']))
# Initialize values for indicator graphs
Itot_allvars=np_zeros(nregions)
comm_spread_frac_allvars = np_zeros((nregions, nvars))
deaths = np_zeros((nregions, nvars))
deaths_reinf = np_zeros((nregions, nvars))
cumulative_cases = np_zeros((nregions, nvars))
deaths_over_time = np_zeros((nregions, ntimesteps, nvars))
new_deaths_over_time = np_zeros((nregions, ntimesteps, nvars))
deaths_reinf_over_time = np_zeros((nregions, ntimesteps, nvars))
recovered_over_time = np_zeros((nregions, ntimesteps, nvars))
vaccinated_over_time = np_zeros((nregions, ntimesteps, nvars))
rerecovered_over_time = np_zeros((nregions, ntimesteps, nvars))
mortality_rate_over_time = np_zeros((nregions, ntimesteps, nvars))
hospitalization_index_region = np_ones(nregions)
hospitalization_index = np_ones(ntimesteps)
mortality_rate = np_ones(ntimesteps)
infective_over_time = np_zeros((nregions, ntimesteps, nvars))
reinfective_over_time = np_zeros((nregions, ntimesteps, nvars))
susceptible_over_time = np_zeros((nregions, ntimesteps, nvars))
for j in range(0,nregions):
susceptible_over_time[j,0,:] = [e.S for e in epi[j]]
# susceptible_over_time = np_zeros((nregions, ntimesteps, nvars))
# for j in range(0,nregions):
# e=epi[j]
# for v in range(0, len(e)):
# susceptible_over_time[j,0,v] = e[v].S
exposed_over_time = np_zeros((nregions, ntimesteps, nvars))
for j in range(0,nregions):
exposed_over_time[j,0,:] = [np_sum(e.E_nr) + np_sum(e.E_r) for e in epi[j]]
reexposed_over_time = np_zeros((nregions, ntimesteps, nvars))
for j in range(0,nregions):
reexposed_over_time[j,0,:] = [np_sum(e.RE_nr) + np_sum(e.RE_r) for e in epi[j]]
comm_spread_frac_over_time = np_zeros((nregions, ntimesteps, nvars))
for j in range(0,nregions):
comm_spread_frac_over_time[j,0,:] = [e.comm_spread_frac for e in epi[j]]
for i in range(0, ntimesteps):
# Public health measures
PHA_social_distancing = 0
for w in soc_dist_windows:
PHA_social_distancing += w.window(i)
PHA_travel_restrictions = 0
for w in travel_restrictions_windows:
PHA_travel_restrictions += w.window(i)
PHA_isolate_visible_cases = 0
for w in isolate_symptomatic_cases_windows:
PHA_isolate_visible_cases += w.window(i)
PHA_isolate_at_risk = 0
for w in isolate_at_risk_windows:
PHA_isolate_at_risk += w.window(i)
PHA_isolate_infectious_cases = 0
for w in test_and_trace_windows:
PHA_isolate_infectious_cases += w.window(i)
PHA_isolate_cases = max(PHA_isolate_visible_cases, PHA_isolate_infectious_cases)
public_health_adjustment = (1 - PHA_social_distancing) * (1 - PHA_isolate_cases)
# Beds and Mortality
if avoid_elective_operations:
bed_occupancy_factor = (1 - PHA_social_distancing * max_reduction_in_normal_bed_occupancy)
else:
bed_occupancy_factor = 1
bed_occupancy_fraction = bed_occupancy_factor * normal_bed_occupancy_fraction
#Community spread
for j in range(0, nregions):
comm_spread_frac_allvars[j,:] = [e.comm_spread_frac for e in epi[j]]
# Loop of variants
for v in range(0,nvars):
# Loop over regions
for j in range(0, nregions):
intl_infected_visitors = intl_visitors[j] * (epi[j][v].proportion_global_infected[i]*global_infection_rate[i]) * min(0, 1 - PHA_travel_restrictions)
dom_infected_visitors = 0
# Confirm current variant has been introduced already
if epi_datetime_array[i] >= epi[j][v].start_time:
if nregions > 1:
for k in range(0, nregions):
if k != j:
dom_infected_visitors += epi[k][v].Itot_prev * between_region_mobility_rate[k]/(nregions - 1)
# Run the model for one time step
epi[j][v].update(total_population,
dom_infected_visitors + intl_infected_visitors,
between_locality_mobility_rate[j],
public_health_adjustment,
PHA_isolate_at_risk,
bed_occupancy_fraction,
beds_per_1000[j],
vaccination_max_doses[i],
vaccinate_at_risk,
Itot_allvars[j],
comm_spread_frac_allvars[j],
nvars)
# Update values for indicator graphs
new_deaths_over_time[j,i,v] = epi[j][v].new_deaths + epi[j][v].new_deaths_reinf
deaths[j,v] += epi[j][v].new_deaths
deaths_reinf[j,v] += epi[j][v].new_deaths_reinf
#susceptible_over_time[j,i,v] = epi[j][v].S
exposed_over_time[j,i,v] = np_sum(epi[j][v].E_nr) + np_sum(epi[j][v].E_r)
reexposed_over_time[j,i,v] = np_sum(epi[j][v].RE_nr) + np_sum(epi[j][v].RE_r)
infective_over_time[j,i,v] = epi[j][v].Itot
reinfective_over_time[j,i,v] = epi[j][v].RItot
deaths_over_time[j,i,v] = deaths[j,v]
deaths_reinf_over_time[j,i,v] = deaths_reinf[j,v]
vaccinated_over_time[j,i,v] = epi[j][v].vaccinated
rerecovered_over_time[j,i,v] = epi[j][v].RR
cumulative_cases[j,v] += (1 - epi[j][v].invisible_fraction_1stinfection) * (epi[j][v].I_nr[1] + epi[j][v].I_r[1]) + \
(1 - epi[j][v].invisible_fraction_reinfection) * (epi[j][v].RI_nr[1] + epi[j][v].RI_r[1])
comm_spread_frac_over_time[j,i,v] = epi[j][v].comm_spread_frac
mortality_rate_over_time[j,i,v] = epi[j][v].curr_mortality_rate
# Calculate hospitalisation index across variants and track infected fraction across variants
Itot_allvars=np_zeros(nregions) ## Breaks if one variant infects everyone
hospitalized=np_zeros(nregions)
for j in range(0, nregions):
# Infected by regions
for e in epi[j]:
Itot_allvars[j]+= e.Itot_incl_reinf # add total infected for each variant in that region
hosp_per_infective_1stinfections = (1 - e.invisible_fraction_1stinfection) * e.ave_fraction_of_visible_1stinfections_requiring_hospitalization
hosp_per_infective_reinfections = (1 - e.invisible_fraction_reinfection) * e.ave_fraction_of_visible_reinfections_requiring_hospitalization
hospitalized[j] += ( hosp_per_infective_1stinfections * np_sum(e.I_r + e.I_nr) + hosp_per_infective_reinfections * np_sum(e.RI_r + e.RI_nr) )
hospitalization_index_region[j] = bed_occupancy_fraction + hospitalized[j] /baseline_hosp[j]
hospitalization_index[i] = np_amax(hospitalization_index_region)
mortality_rate[i] = np_sum(new_deaths_over_time[:,i,:] )/total_population* 100000 # per 100,000
#True up susceptible pools, total population and recovered pools between variants
for j in range(0, nregions):
for v in range(0,nvars):
if nvars>1:
if i==0:
epi[j][v].S-= (np_sum(epi[j][~v].E_nr[1]) + np_sum(epi[j][~v].E_r[1]) + np_sum(epi[j][~v].Itot))
if i > 0:
epi[j][v].S= max(0, epi[j][v].S - (np_sum(epi[j][~v].E_nr[1]) + np_sum(epi[j][~v].E_r[1])))
epi[j][v].N -= ( epi[j][~v].new_deaths +epi[j][~v].new_deaths_reinf)
if epi_datetime_array[i] < epi[j][v].start_time:
epi[j][v].S= max(0, epi[j][v].S - (epi[j][~v].vaccinated_nr + epi[j][~v].vaccinated_r))
epi[j][v].R_nr = epi[j][~v].R_nr
epi[j][v].R_r = epi[j][~v].R_r
else:
epi[j][v].R_nr -= epi[j][~v].new_reexposed_nr
epi[j][v].R_r -= epi[j][~v].new_reexposed_r
susceptible_over_time[j,i,v] = epi[j][v].S
recovered_over_time[j,i,v] = np_sum(epi[j][v].R_nr) + np_sum(epi[j][v].R_r)
return nvars, seir_params_multivar, nregions, regions, start_time, end_time, epi_datetime_array, susceptible_over_time, \
exposed_over_time, infective_over_time, recovered_over_time, vaccinated_over_time, deaths_over_time, deaths_reinf_over_time, reexposed_over_time, reinfective_over_time, \
rerecovered_over_time, hospitalization_index | [
"yaml.full_load",
"common.timesteps_over_timedelta_weeks",
"common.get_datetime",
"numpy.ones",
"common.get_datetime_array",
"common.timesteps_between_dates",
"numpy.sum",
"numpy.zeros",
"numpy.empty",
"numpy.interp",
"sys.exit",
"seir_model.SEIR_matrix",
"numpy.amax"
] | [((1888, 1920), 'numpy.sum', 'np_sum', (['proportion_total'], {'axis': '(0)'}), '(proportion_total, axis=0)\n', (1894, 1920), True, 'from numpy import array as np_array, zeros as np_zeros, sum as np_sum, empty as np_empty, amax as np_amax, interp as np_interp, ones as np_ones, tile as np_tile, isnan as np_isnan\n'), ((2128, 2178), 'common.get_datetime', 'get_datetime', (["common_params['time']['COVID start']"], {}), "(common_params['time']['COVID start'])\n", (2140, 2178), False, 'from common import Window, get_datetime, timesteps_between_dates, get_datetime_array, timesteps_over_timedelta_weeks\n'), ((2197, 2300), 'common.timesteps_between_dates', 'timesteps_between_dates', (["common_params['time']['start date']", "common_params['time']['COVID start']"], {}), "(common_params['time']['start date'], common_params[\n 'time']['COVID start'])\n", (2220, 2300), False, 'from common import Window, get_datetime, timesteps_between_dates, get_datetime_array, timesteps_over_timedelta_weeks\n'), ((2312, 2412), 'common.timesteps_between_dates', 'timesteps_between_dates', (["common_params['time']['start date']", "common_params['time']['end date']"], {}), "(common_params['time']['start date'], common_params[\n 'time']['end date'])\n", (2335, 2412), False, 'from common import Window, get_datetime, timesteps_between_dates, get_datetime_array, timesteps_over_timedelta_weeks\n'), ((2434, 2530), 'common.get_datetime_array', 'get_datetime_array', (["common_params['time']['COVID start']", "common_params['time']['end date']"], {}), "(common_params['time']['COVID start'], common_params[\n 'time']['end date'])\n", (2452, 2530), False, 'from common import Window, get_datetime, timesteps_between_dates, get_datetime_array, timesteps_over_timedelta_weeks\n'), ((3852, 3886), 'numpy.empty', 'np_empty', (['global_infection_npoints'], {}), '(global_infection_npoints)\n', (3860, 3886), True, 'from numpy import array as np_array, zeros as np_zeros, sum as np_sum, empty as np_empty, amax as np_amax, interp as np_interp, ones as np_ones, tile as np_tile, isnan as np_isnan\n'), ((3915, 3949), 'numpy.empty', 'np_empty', (['global_infection_npoints'], {}), '(global_infection_npoints)\n', (3923, 3949), True, 'from numpy import array as np_array, zeros as np_zeros, sum as np_sum, empty as np_empty, amax as np_amax, interp as np_interp, ones as np_ones, tile as np_tile, isnan as np_isnan\n'), ((4246, 4341), 'numpy.interp', 'np_interp', (['global_infection_traj_timesteps_array', 'global_infection_ts', 'global_infection_val'], {}), '(global_infection_traj_timesteps_array, global_infection_ts,\n global_infection_val)\n', (4255, 4341), True, 'from numpy import array as np_array, zeros as np_zeros, sum as np_sum, empty as np_empty, amax as np_amax, interp as np_interp, ones as np_ones, tile as np_tile, isnan as np_isnan\n'), ((4391, 4486), 'common.timesteps_between_dates', 'timesteps_between_dates', (['global_infection_traj_start', "common_params['time']['COVID start']"], {}), "(global_infection_traj_start, common_params['time'][\n 'COVID start'])\n", (4414, 4486), False, 'from common import Window, get_datetime, timesteps_between_dates, get_datetime_array, timesteps_over_timedelta_weeks\n'), ((4681, 4766), 'common.timesteps_over_timedelta_weeks', 'timesteps_over_timedelta_weeks', (["common_params['vaccination']['time to efficacy']"], {}), "(common_params['vaccination']['time to efficacy']\n )\n", (4711, 4766), False, 'from common import Window, get_datetime, timesteps_between_dates, get_datetime_array, timesteps_over_timedelta_weeks\n'), ((4990, 5019), 'numpy.empty', 'np_empty', (['vaccination_npoints'], {}), '(vaccination_npoints)\n', (4998, 5019), True, 'from numpy import array as np_array, zeros as np_zeros, sum as np_sum, empty as np_empty, amax as np_amax, interp as np_interp, ones as np_ones, tile as np_tile, isnan as np_isnan\n'), ((5043, 5072), 'numpy.empty', 'np_empty', (['vaccination_npoints'], {}), '(vaccination_npoints)\n', (5051, 5072), True, 'from numpy import array as np_array, zeros as np_zeros, sum as np_sum, empty as np_empty, amax as np_amax, interp as np_interp, ones as np_ones, tile as np_tile, isnan as np_isnan\n'), ((5338, 5409), 'numpy.interp', 'np_interp', (['vaccination_timesteps_array', 'vaccination_ts', 'vaccination_val'], {}), '(vaccination_timesteps_array, vaccination_ts, vaccination_val)\n', (5347, 5409), True, 'from numpy import array as np_array, zeros as np_zeros, sum as np_sum, empty as np_empty, amax as np_amax, interp as np_interp, ones as np_ones, tile as np_tile, isnan as np_isnan\n'), ((8793, 8811), 'numpy.zeros', 'np_zeros', (['nregions'], {}), '(nregions)\n', (8801, 8811), True, 'from numpy import array as np_array, zeros as np_zeros, sum as np_sum, empty as np_empty, amax as np_amax, interp as np_interp, ones as np_ones, tile as np_tile, isnan as np_isnan\n'), ((8844, 8871), 'numpy.zeros', 'np_zeros', (['(nregions, nvars)'], {}), '((nregions, nvars))\n', (8852, 8871), True, 'from numpy import array as np_array, zeros as np_zeros, sum as np_sum, empty as np_empty, amax as np_amax, interp as np_interp, ones as np_ones, tile as np_tile, isnan as np_isnan\n'), ((8886, 8913), 'numpy.zeros', 'np_zeros', (['(nregions, nvars)'], {}), '((nregions, nvars))\n', (8894, 8913), True, 'from numpy import array as np_array, zeros as np_zeros, sum as np_sum, empty as np_empty, amax as np_amax, interp as np_interp, ones as np_ones, tile as np_tile, isnan as np_isnan\n'), ((8934, 8961), 'numpy.zeros', 'np_zeros', (['(nregions, nvars)'], {}), '((nregions, nvars))\n', (8942, 8961), True, 'from numpy import array as np_array, zeros as np_zeros, sum as np_sum, empty as np_empty, amax as np_amax, interp as np_interp, ones as np_ones, tile as np_tile, isnan as np_isnan\n'), ((8986, 9013), 'numpy.zeros', 'np_zeros', (['(nregions, nvars)'], {}), '((nregions, nvars))\n', (8994, 9013), True, 'from numpy import array as np_array, zeros as np_zeros, sum as np_sum, empty as np_empty, amax as np_amax, interp as np_interp, ones as np_ones, tile as np_tile, isnan as np_isnan\n'), ((9040, 9079), 'numpy.zeros', 'np_zeros', (['(nregions, ntimesteps, nvars)'], {}), '((nregions, ntimesteps, nvars))\n', (9048, 9079), True, 'from numpy import array as np_array, zeros as np_zeros, sum as np_sum, empty as np_empty, amax as np_amax, interp as np_interp, ones as np_ones, tile as np_tile, isnan as np_isnan\n'), ((9108, 9147), 'numpy.zeros', 'np_zeros', (['(nregions, ntimesteps, nvars)'], {}), '((nregions, ntimesteps, nvars))\n', (9116, 9147), True, 'from numpy import array as np_array, zeros as np_zeros, sum as np_sum, empty as np_empty, amax as np_amax, interp as np_interp, ones as np_ones, tile as np_tile, isnan as np_isnan\n'), ((9178, 9217), 'numpy.zeros', 'np_zeros', (['(nregions, ntimesteps, nvars)'], {}), '((nregions, ntimesteps, nvars))\n', (9186, 9217), True, 'from numpy import array as np_array, zeros as np_zeros, sum as np_sum, empty as np_empty, amax as np_amax, interp as np_interp, ones as np_ones, tile as np_tile, isnan as np_isnan\n'), ((9245, 9284), 'numpy.zeros', 'np_zeros', (['(nregions, ntimesteps, nvars)'], {}), '((nregions, ntimesteps, nvars))\n', (9253, 9284), True, 'from numpy import array as np_array, zeros as np_zeros, sum as np_sum, empty as np_empty, amax as np_amax, interp as np_interp, ones as np_ones, tile as np_tile, isnan as np_isnan\n'), ((9313, 9352), 'numpy.zeros', 'np_zeros', (['(nregions, ntimesteps, nvars)'], {}), '((nregions, ntimesteps, nvars))\n', (9321, 9352), True, 'from numpy import array as np_array, zeros as np_zeros, sum as np_sum, empty as np_empty, amax as np_amax, interp as np_interp, ones as np_ones, tile as np_tile, isnan as np_isnan\n'), ((9382, 9421), 'numpy.zeros', 'np_zeros', (['(nregions, ntimesteps, nvars)'], {}), '((nregions, ntimesteps, nvars))\n', (9390, 9421), True, 'from numpy import array as np_array, zeros as np_zeros, sum as np_sum, empty as np_empty, amax as np_amax, interp as np_interp, ones as np_ones, tile as np_tile, isnan as np_isnan\n'), ((9454, 9493), 'numpy.zeros', 'np_zeros', (['(nregions, ntimesteps, nvars)'], {}), '((nregions, ntimesteps, nvars))\n', (9462, 9493), True, 'from numpy import array as np_array, zeros as np_zeros, sum as np_sum, empty as np_empty, amax as np_amax, interp as np_interp, ones as np_ones, tile as np_tile, isnan as np_isnan\n'), ((9532, 9549), 'numpy.ones', 'np_ones', (['nregions'], {}), '(nregions)\n', (9539, 9549), True, 'from numpy import array as np_array, zeros as np_zeros, sum as np_sum, empty as np_empty, amax as np_amax, interp as np_interp, ones as np_ones, tile as np_tile, isnan as np_isnan\n'), ((9579, 9598), 'numpy.ones', 'np_ones', (['ntimesteps'], {}), '(ntimesteps)\n', (9586, 9598), True, 'from numpy import array as np_array, zeros as np_zeros, sum as np_sum, empty as np_empty, amax as np_amax, interp as np_interp, ones as np_ones, tile as np_tile, isnan as np_isnan\n'), ((9621, 9640), 'numpy.ones', 'np_ones', (['ntimesteps'], {}), '(ntimesteps)\n', (9628, 9640), True, 'from numpy import array as np_array, zeros as np_zeros, sum as np_sum, empty as np_empty, amax as np_amax, interp as np_interp, ones as np_ones, tile as np_tile, isnan as np_isnan\n'), ((9670, 9709), 'numpy.zeros', 'np_zeros', (['(nregions, ntimesteps, nvars)'], {}), '((nregions, ntimesteps, nvars))\n', (9678, 9709), True, 'from numpy import array as np_array, zeros as np_zeros, sum as np_sum, empty as np_empty, amax as np_amax, interp as np_interp, ones as np_ones, tile as np_tile, isnan as np_isnan\n'), ((9739, 9778), 'numpy.zeros', 'np_zeros', (['(nregions, ntimesteps, nvars)'], {}), '((nregions, ntimesteps, nvars))\n', (9747, 9778), True, 'from numpy import array as np_array, zeros as np_zeros, sum as np_sum, empty as np_empty, amax as np_amax, interp as np_interp, ones as np_ones, tile as np_tile, isnan as np_isnan\n'), ((9810, 9849), 'numpy.zeros', 'np_zeros', (['(nregions, ntimesteps, nvars)'], {}), '((nregions, ntimesteps, nvars))\n', (9818, 9849), True, 'from numpy import array as np_array, zeros as np_zeros, sum as np_sum, empty as np_empty, amax as np_amax, interp as np_interp, ones as np_ones, tile as np_tile, isnan as np_isnan\n'), ((10189, 10228), 'numpy.zeros', 'np_zeros', (['(nregions, ntimesteps, nvars)'], {}), '((nregions, ntimesteps, nvars))\n', (10197, 10228), True, 'from numpy import array as np_array, zeros as np_zeros, sum as np_sum, empty as np_empty, amax as np_amax, interp as np_interp, ones as np_ones, tile as np_tile, isnan as np_isnan\n'), ((10376, 10415), 'numpy.zeros', 'np_zeros', (['(nregions, ntimesteps, nvars)'], {}), '((nregions, ntimesteps, nvars))\n', (10384, 10415), True, 'from numpy import array as np_array, zeros as np_zeros, sum as np_sum, empty as np_empty, amax as np_amax, interp as np_interp, ones as np_ones, tile as np_tile, isnan as np_isnan\n'), ((10578, 10617), 'numpy.zeros', 'np_zeros', (['(nregions, ntimesteps, nvars)'], {}), '((nregions, ntimesteps, nvars))\n', (10586, 10617), True, 'from numpy import array as np_array, zeros as np_zeros, sum as np_sum, empty as np_empty, amax as np_amax, interp as np_interp, ones as np_ones, tile as np_tile, isnan as np_isnan\n'), ((479, 499), 'yaml.full_load', 'yaml.full_load', (['file'], {}), '(file)\n', (493, 499), False, 'import yaml\n'), ((562, 582), 'yaml.full_load', 'yaml.full_load', (['file'], {}), '(file)\n', (576, 582), False, 'import yaml\n'), ((662, 682), 'yaml.full_load', 'yaml.full_load', (['file'], {}), '(file)\n', (676, 682), False, 'import yaml\n'), ((2097, 2103), 'sys.exit', 'exit', ([], {}), '()\n', (2101, 2103), False, 'from sys import exit\n'), ((3533, 3574), 'common.get_datetime', 'get_datetime', (['global_infection_traj_start'], {}), '(global_infection_traj_start)\n', (3545, 3574), False, 'from common import Window, get_datetime, timesteps_between_dates, get_datetime_array, timesteps_over_timedelta_weeks\n'), ((4033, 4120), 'common.timesteps_between_dates', 'timesteps_between_dates', (['global_infection_traj_start', 'global_infection_points[i][0]'], {}), '(global_infection_traj_start,\n global_infection_points[i][0])\n', (4056, 4120), False, 'from common import Window, get_datetime, timesteps_between_dates, get_datetime_array, timesteps_over_timedelta_weeks\n'), ((15144, 15162), 'numpy.zeros', 'np_zeros', (['nregions'], {}), '(nregions)\n', (15152, 15162), True, 'from numpy import array as np_array, zeros as np_zeros, sum as np_sum, empty as np_empty, amax as np_amax, interp as np_interp, ones as np_ones, tile as np_tile, isnan as np_isnan\n'), ((15227, 15245), 'numpy.zeros', 'np_zeros', (['nregions'], {}), '(nregions)\n', (15235, 15245), True, 'from numpy import array as np_array, zeros as np_zeros, sum as np_sum, empty as np_empty, amax as np_amax, interp as np_interp, ones as np_ones, tile as np_tile, isnan as np_isnan\n'), ((16077, 16114), 'numpy.amax', 'np_amax', (['hospitalization_index_region'], {}), '(hospitalization_index_region)\n', (16084, 16114), True, 'from numpy import array as np_array, zeros as np_zeros, sum as np_sum, empty as np_empty, amax as np_amax, interp as np_interp, ones as np_ones, tile as np_tile, isnan as np_isnan\n'), ((5146, 5237), 'common.timesteps_between_dates', 'timesteps_between_dates', (["common_params['time']['COVID start']", 'vaccination_points[i][0]'], {}), "(common_params['time']['COVID start'],\n vaccination_points[i][0])\n", (5169, 5237), False, 'from common import Window, get_datetime, timesteps_between_dates, get_datetime_array, timesteps_over_timedelta_weeks\n'), ((1250, 1286), 'seir_model.SEIR_matrix', 'SEIR_matrix', (['rgn', 'var', 'common_params'], {}), '(rgn, var, common_params)\n', (1261, 1286), False, 'from seir_model import SEIR_matrix\n'), ((3731, 3823), 'common.timesteps_between_dates', 'timesteps_between_dates', (['global_infection_traj_start', "common_params['time']['end date']"], {}), "(global_infection_traj_start, common_params['time'][\n 'end date'])\n", (3754, 3823), False, 'from common import Window, get_datetime, timesteps_between_dates, get_datetime_array, timesteps_over_timedelta_weeks\n'), ((4865, 4966), 'common.timesteps_between_dates', 'timesteps_between_dates', (["common_params['time']['COVID start']", "common_params['time']['end date']"], {}), "(common_params['time']['COVID start'], common_params\n ['time']['end date'])\n", (4888, 4966), False, 'from common import Window, get_datetime, timesteps_between_dates, get_datetime_array, timesteps_over_timedelta_weeks\n'), ((10299, 10313), 'numpy.sum', 'np_sum', (['e.E_nr'], {}), '(e.E_nr)\n', (10305, 10313), True, 'from numpy import array as np_array, zeros as np_zeros, sum as np_sum, empty as np_empty, amax as np_amax, interp as np_interp, ones as np_ones, tile as np_tile, isnan as np_isnan\n'), ((10316, 10329), 'numpy.sum', 'np_sum', (['e.E_r'], {}), '(e.E_r)\n', (10322, 10329), True, 'from numpy import array as np_array, zeros as np_zeros, sum as np_sum, empty as np_empty, amax as np_amax, interp as np_interp, ones as np_ones, tile as np_tile, isnan as np_isnan\n'), ((10488, 10503), 'numpy.sum', 'np_sum', (['e.RE_nr'], {}), '(e.RE_nr)\n', (10494, 10503), True, 'from numpy import array as np_array, zeros as np_zeros, sum as np_sum, empty as np_empty, amax as np_amax, interp as np_interp, ones as np_ones, tile as np_tile, isnan as np_isnan\n'), ((10506, 10520), 'numpy.sum', 'np_sum', (['e.RE_r'], {}), '(e.RE_r)\n', (10512, 10520), True, 'from numpy import array as np_array, zeros as np_zeros, sum as np_sum, empty as np_empty, amax as np_amax, interp as np_interp, ones as np_ones, tile as np_tile, isnan as np_isnan\n'), ((16144, 16181), 'numpy.sum', 'np_sum', (['new_deaths_over_time[:, i, :]'], {}), '(new_deaths_over_time[:, i, :])\n', (16150, 16181), True, 'from numpy import array as np_array, zeros as np_zeros, sum as np_sum, empty as np_empty, amax as np_amax, interp as np_interp, ones as np_ones, tile as np_tile, isnan as np_isnan\n'), ((17400, 17422), 'numpy.sum', 'np_sum', (['epi[j][v].R_nr'], {}), '(epi[j][v].R_nr)\n', (17406, 17422), True, 'from numpy import array as np_array, zeros as np_zeros, sum as np_sum, empty as np_empty, amax as np_amax, interp as np_interp, ones as np_ones, tile as np_tile, isnan as np_isnan\n'), ((17425, 17446), 'numpy.sum', 'np_sum', (['epi[j][v].R_r'], {}), '(epi[j][v].R_r)\n', (17431, 17446), True, 'from numpy import array as np_array, zeros as np_zeros, sum as np_sum, empty as np_empty, amax as np_amax, interp as np_interp, ones as np_ones, tile as np_tile, isnan as np_isnan\n'), ((14048, 14070), 'numpy.sum', 'np_sum', (['epi[j][v].E_nr'], {}), '(epi[j][v].E_nr)\n', (14054, 14070), True, 'from numpy import array as np_array, zeros as np_zeros, sum as np_sum, empty as np_empty, amax as np_amax, interp as np_interp, ones as np_ones, tile as np_tile, isnan as np_isnan\n'), ((14073, 14094), 'numpy.sum', 'np_sum', (['epi[j][v].E_r'], {}), '(epi[j][v].E_r)\n', (14079, 14094), True, 'from numpy import array as np_array, zeros as np_zeros, sum as np_sum, empty as np_empty, amax as np_amax, interp as np_interp, ones as np_ones, tile as np_tile, isnan as np_isnan\n'), ((14145, 14168), 'numpy.sum', 'np_sum', (['epi[j][v].RE_nr'], {}), '(epi[j][v].RE_nr)\n', (14151, 14168), True, 'from numpy import array as np_array, zeros as np_zeros, sum as np_sum, empty as np_empty, amax as np_amax, interp as np_interp, ones as np_ones, tile as np_tile, isnan as np_isnan\n'), ((14171, 14193), 'numpy.sum', 'np_sum', (['epi[j][v].RE_r'], {}), '(epi[j][v].RE_r)\n', (14177, 14193), True, 'from numpy import array as np_array, zeros as np_zeros, sum as np_sum, empty as np_empty, amax as np_amax, interp as np_interp, ones as np_ones, tile as np_tile, isnan as np_isnan\n'), ((15846, 15868), 'numpy.sum', 'np_sum', (['(e.I_r + e.I_nr)'], {}), '(e.I_r + e.I_nr)\n', (15852, 15868), True, 'from numpy import array as np_array, zeros as np_zeros, sum as np_sum, empty as np_empty, amax as np_amax, interp as np_interp, ones as np_ones, tile as np_tile, isnan as np_isnan\n'), ((15905, 15929), 'numpy.sum', 'np_sum', (['(e.RI_r + e.RI_nr)'], {}), '(e.RI_r + e.RI_nr)\n', (15911, 15929), True, 'from numpy import array as np_array, zeros as np_zeros, sum as np_sum, empty as np_empty, amax as np_amax, interp as np_interp, ones as np_ones, tile as np_tile, isnan as np_isnan\n'), ((16541, 16564), 'numpy.sum', 'np_sum', (['epi[j][~v].Itot'], {}), '(epi[j][~v].Itot)\n', (16547, 16564), True, 'from numpy import array as np_array, zeros as np_zeros, sum as np_sum, empty as np_empty, amax as np_amax, interp as np_interp, ones as np_ones, tile as np_tile, isnan as np_isnan\n'), ((5676, 5710), 'common.get_datetime', 'get_datetime', (["window['start date']"], {}), "(window['start date'])\n", (5688, 5710), False, 'from common import Window, get_datetime, timesteps_between_dates, get_datetime_array, timesteps_over_timedelta_weeks\n'), ((5801, 5833), 'common.get_datetime', 'get_datetime', (["window['end date']"], {}), "(window['end date'])\n", (5813, 5833), False, 'from common import Window, get_datetime, timesteps_between_dates, get_datetime_array, timesteps_over_timedelta_weeks\n'), ((6397, 6431), 'common.get_datetime', 'get_datetime', (["window['start date']"], {}), "(window['start date'])\n", (6409, 6431), False, 'from common import Window, get_datetime, timesteps_between_dates, get_datetime_array, timesteps_over_timedelta_weeks\n'), ((6512, 6544), 'common.get_datetime', 'get_datetime', (["window['end date']"], {}), "(window['end date'])\n", (6524, 6544), False, 'from common import Window, get_datetime, timesteps_between_dates, get_datetime_array, timesteps_over_timedelta_weeks\n'), ((7048, 7082), 'common.get_datetime', 'get_datetime', (["window['start date']"], {}), "(window['start date'])\n", (7060, 7082), False, 'from common import Window, get_datetime, timesteps_between_dates, get_datetime_array, timesteps_over_timedelta_weeks\n'), ((7162, 7194), 'common.get_datetime', 'get_datetime', (["window['end date']"], {}), "(window['end date'])\n", (7174, 7194), False, 'from common import Window, get_datetime, timesteps_between_dates, get_datetime_array, timesteps_over_timedelta_weeks\n'), ((7691, 7725), 'common.get_datetime', 'get_datetime', (["window['start date']"], {}), "(window['start date'])\n", (7703, 7725), False, 'from common import Window, get_datetime, timesteps_between_dates, get_datetime_array, timesteps_over_timedelta_weeks\n'), ((7799, 7831), 'common.get_datetime', 'get_datetime', (["window['end date']"], {}), "(window['end date'])\n", (7811, 7831), False, 'from common import Window, get_datetime, timesteps_between_dates, get_datetime_array, timesteps_over_timedelta_weeks\n'), ((8344, 8378), 'common.get_datetime', 'get_datetime', (["window['start date']"], {}), "(window['start date'])\n", (8356, 8378), False, 'from common import Window, get_datetime, timesteps_between_dates, get_datetime_array, timesteps_over_timedelta_weeks\n'), ((8452, 8484), 'common.get_datetime', 'get_datetime', (["window['end date']"], {}), "(window['end date'])\n", (8464, 8484), False, 'from common import Window, get_datetime, timesteps_between_dates, get_datetime_array, timesteps_over_timedelta_weeks\n'), ((16484, 16510), 'numpy.sum', 'np_sum', (['epi[j][~v].E_nr[1]'], {}), '(epi[j][~v].E_nr[1])\n', (16490, 16510), True, 'from numpy import array as np_array, zeros as np_zeros, sum as np_sum, empty as np_empty, amax as np_amax, interp as np_interp, ones as np_ones, tile as np_tile, isnan as np_isnan\n'), ((16513, 16538), 'numpy.sum', 'np_sum', (['epi[j][~v].E_r[1]'], {}), '(epi[j][~v].E_r[1])\n', (16519, 16538), True, 'from numpy import array as np_array, zeros as np_zeros, sum as np_sum, empty as np_empty, amax as np_amax, interp as np_interp, ones as np_ones, tile as np_tile, isnan as np_isnan\n'), ((16655, 16681), 'numpy.sum', 'np_sum', (['epi[j][~v].E_nr[1]'], {}), '(epi[j][~v].E_nr[1])\n', (16661, 16681), True, 'from numpy import array as np_array, zeros as np_zeros, sum as np_sum, empty as np_empty, amax as np_amax, interp as np_interp, ones as np_ones, tile as np_tile, isnan as np_isnan\n'), ((16684, 16709), 'numpy.sum', 'np_sum', (['epi[j][~v].E_r[1]'], {}), '(epi[j][~v].E_r[1])\n', (16690, 16709), True, 'from numpy import array as np_array, zeros as np_zeros, sum as np_sum, empty as np_empty, amax as np_amax, interp as np_interp, ones as np_ones, tile as np_tile, isnan as np_isnan\n')] |
from body.tests.login_test_case import LoginTestCase
from body.tests.model_helpers import create_ledger_entry, create_medicine
from freezegun import freeze_time
from django.utils.timezone import make_aware, datetime
@freeze_time(make_aware(datetime(2022, 3, 1)))
class MedicineTests(LoginTestCase):
def test_ledger_recalculates(self):
"""
Recalculating the current balance of a medicine correctly uses ledger entries to do so.
"""
medicine = create_medicine(self.user)
create_ledger_entry(medicine, 4)
create_ledger_entry(medicine, -1)
medicine.recalculate_balance_from_ledger()
self.assertEqual(medicine.current_balance, 3)
| [
"body.tests.model_helpers.create_medicine",
"body.tests.model_helpers.create_ledger_entry",
"django.utils.timezone.datetime"
] | [((480, 506), 'body.tests.model_helpers.create_medicine', 'create_medicine', (['self.user'], {}), '(self.user)\n', (495, 506), False, 'from body.tests.model_helpers import create_ledger_entry, create_medicine\n'), ((515, 547), 'body.tests.model_helpers.create_ledger_entry', 'create_ledger_entry', (['medicine', '(4)'], {}), '(medicine, 4)\n', (534, 547), False, 'from body.tests.model_helpers import create_ledger_entry, create_medicine\n'), ((556, 589), 'body.tests.model_helpers.create_ledger_entry', 'create_ledger_entry', (['medicine', '(-1)'], {}), '(medicine, -1)\n', (575, 589), False, 'from body.tests.model_helpers import create_ledger_entry, create_medicine\n'), ((242, 262), 'django.utils.timezone.datetime', 'datetime', (['(2022)', '(3)', '(1)'], {}), '(2022, 3, 1)\n', (250, 262), False, 'from django.utils.timezone import make_aware, datetime\n')] |
"""This module contains functions to generate strategies from annotations."""
from __future__ import annotations
import collections
import inspect
import sys
from itertools import chain
from itertools import combinations
from typing import Any
from typing import Callable
from typing import Iterable
from typing import Sequence
import numpy as np
import pytest
from hypothesis import given
from hypothesis.extra.numpy import complex_number_dtypes
from hypothesis.extra.numpy import floating_dtypes
from hypothesis.extra.numpy import from_dtype
from hypothesis.strategies import booleans
from hypothesis.strategies import complex_numbers
from hypothesis.strategies import data
from hypothesis.strategies import dictionaries
from hypothesis.strategies import floats
from hypothesis.strategies import integers
from hypothesis.strategies import iterables
from hypothesis.strategies import just
from hypothesis.strategies import lists
from hypothesis.strategies import one_of
from hypothesis.strategies import SearchStrategy
from hypothesis.strategies import sets
from hypothesis.strategies import text
from hypothesis.strategies import tuples
from bqskit.utils.test.strategies import circuit_location_likes
from bqskit.utils.test.strategies import circuit_locations
from bqskit.utils.test.strategies import circuit_points
from bqskit.utils.test.strategies import circuit_regions
from bqskit.utils.test.strategies import circuits
from bqskit.utils.test.strategies import cycle_intervals
from bqskit.utils.test.strategies import everything_except
from bqskit.utils.test.strategies import gates
from bqskit.utils.test.strategies import operations
from bqskit.utils.test.strategies import unitaries
from bqskit.utils.test.strategies import unitary_likes
def _powerset(iterable: Iterable[Any]) -> Iterable[Any]:
"""
Calculate the powerset of an iterable.
Examples:
>>> list(powerset([1,2,3]))
... [() (1,) (2,) (3,) (1,2) (1,3) (2,3) (1,2,3)]
References:
https://stackoverflow.com/questions/18035595/powersets-in-python-using-
itertools.
"""
s = list(iterable)
return chain.from_iterable(combinations(s, r) for r in range(len(s)))
def _split_generic_arguments(args: str) -> list[str]:
"""Split a generic's type arguments up."""
comma_indices = []
num_open_brackets = 0
for i, char in enumerate(args):
if char == '[':
num_open_brackets += 1
elif char == ']':
num_open_brackets -= 1
elif char == ',' and num_open_brackets == 0:
comma_indices.append(i)
if len(comma_indices) == 0:
return [args]
to_return: list[str] = []
last_index = 0
for comma_index in comma_indices:
to_return.append(args[last_index: comma_index])
last_index = comma_index + 1
to_return.append(args[last_index:])
return to_return
def type_annotation_to_valid_strategy(annotation: str) -> SearchStrategy[Any]:
"""Convert a type annotation into a hypothesis strategy."""
strategies: list[SearchStrategy[Any]] = []
annotation = annotation.replace('RealVector', 'Sequence[float]')
for type_str in annotation.split('|'):
type_str = type_str.strip()
if type_str == 'None':
strategies.append(just(None))
elif type_str == 'int':
strategies.append(integers())
elif type_str == 'float':
strategies.append(floats())
strategies.append(floating_dtypes().flatmap(from_dtype))
elif type_str == 'complex':
strategies.append(complex_numbers())
strategies.append(complex_number_dtypes().flatmap(from_dtype))
elif type_str == 'bool':
strategies.append(booleans())
elif type_str == 'str':
strategies.append(text())
elif type_str == 'Any':
strategies.append(just(None))
elif type_str.lower().startswith('tuple'):
inner_strategies = []
for arg in _split_generic_arguments(type_str[6:-1]):
inner_strategies.append(type_annotation_to_valid_strategy(arg))
strategies.append(tuples(*inner_strategies))
elif type_str.lower().startswith('dict'):
args = _split_generic_arguments(type_str[5:-1])
key_strat = type_annotation_to_valid_strategy(args[0])
val_strat = type_annotation_to_valid_strategy(args[1])
strategies.append(dictionaries(key_strat, val_strat))
elif type_str.lower().startswith('mapping'):
args = _split_generic_arguments(type_str[8:-1])
key_strat = type_annotation_to_valid_strategy(args[0])
val_strat = type_annotation_to_valid_strategy(args[1])
strategies.append(dictionaries(key_strat, val_strat))
elif type_str.lower().startswith('list'):
arg_strat = type_annotation_to_valid_strategy(type_str[5:-1])
strategies.append(lists(arg_strat))
elif type_str.lower().startswith('set'):
arg_strat = type_annotation_to_valid_strategy(type_str[4:-1])
strategies.append(sets(arg_strat))
elif type_str.lower().startswith('sequence'):
arg_strat = type_annotation_to_valid_strategy(type_str[9:-1])
strategies.append(lists(arg_strat))
elif type_str.lower().startswith('iterable'):
arg_strat = type_annotation_to_valid_strategy(type_str[9:-1])
strategies.append(iterables(arg_strat))
elif type_str.lower().startswith('intervallike'):
strat = type_annotation_to_valid_strategy('Tuple[int, int]')
strategies.append(strat)
strategies.append(cycle_intervals())
elif type_str.lower().startswith('cycleinterval'):
strategies.append(cycle_intervals())
elif type_str.lower().startswith('circuitpointlike'):
strat = type_annotation_to_valid_strategy('Tuple[int, int]')
strategies.append(strat)
strategies.append(circuit_points())
elif type_str.lower().startswith('circuitpoint'):
strategies.append(circuit_points())
elif type_str.lower().startswith('circuitregionlike'):
strat = type_annotation_to_valid_strategy('dict[int, IntervalLike]')
strategies.append(strat)
strategies.append(circuit_regions())
elif type_str.lower().startswith('circuitregion'):
strategies.append(circuit_regions())
elif type_str.lower().startswith('unitarylike'):
strategies.append(unitary_likes())
elif type_str.lower().startswith('unitarymatrix'):
strategies.append(unitaries())
elif type_str.lower().startswith('gate'):
strategies.append(gates())
elif type_str.lower().startswith('operation'):
strategies.append(operations())
elif type_str.lower().startswith('circuitlocationlike'):
strategies.append(circuit_locations())
elif type_str.lower().startswith('circuitlocation'):
strategies.append(circuit_location_likes())
elif type_str.lower().startswith('circuit'):
strategies.append(circuits(max_gates=1))
else:
raise ValueError(f'Cannot generate strategy for type: {type_str}')
return one_of(strategies)
def type_annotation_to_invalid_strategy(annotation: str) -> SearchStrategy[Any]:
"""Convert a type annotation into an invalid hypothesis strategy."""
strategies: list[SearchStrategy[Any]] = []
types_to_avoid: set[type] = set()
tuple_valids: dict[int, set[SearchStrategy[Any]]] = {}
tuple_invalids: dict[int, set[SearchStrategy[Any]]] = {}
dict_key_valids: set[SearchStrategy[Any]] = set()
dict_key_invalids: set[SearchStrategy[Any]] = set()
dict_val_valids: set[SearchStrategy[Any]] = set()
dict_val_invalids: set[SearchStrategy[Any]] = set()
list_invalids: set[SearchStrategy[Any]] = set()
set_invalids: set[SearchStrategy[Any]] = set()
iterable_invalids: set[SearchStrategy[Any]] = set()
annotation = annotation.replace('RealVector', 'Sequence[float]')
for type_str in annotation.split('|'):
type_str = type_str.strip()
if type_str == 'None':
types_to_avoid.add(type(None))
elif type_str == 'int':
types_to_avoid.add(int)
types_to_avoid.add(np.byte)
types_to_avoid.add(np.short)
types_to_avoid.add(np.intc)
types_to_avoid.add(np.longlong)
types_to_avoid.add(np.int8)
types_to_avoid.add(np.int16)
types_to_avoid.add(np.int32)
types_to_avoid.add(np.int64)
elif type_str == 'float':
types_to_avoid.add(float)
types_to_avoid.add(np.half)
types_to_avoid.add(np.single)
types_to_avoid.add(np.double)
types_to_avoid.add(np.longdouble)
types_to_avoid.add(np.float32)
types_to_avoid.add(np.float64)
elif type_str == 'complex':
types_to_avoid.add(complex)
types_to_avoid.add(np.csingle)
types_to_avoid.add(np.cdouble)
types_to_avoid.add(np.clongdouble)
types_to_avoid.add(np.complex64)
types_to_avoid.add(np.complex128)
elif type_str == 'bool':
types_to_avoid.add(bool)
types_to_avoid.add(np.bool_)
elif type_str == 'str':
types_to_avoid.add(str)
elif type_str == 'Any':
continue
elif type_str.lower().startswith('tuple'):
args = _split_generic_arguments(type_str[6:-1])
if len(args) not in tuple_valids:
tuple_valids[len(args)] = set()
tuple_invalids[len(args)] = set()
for arg in args:
valid_strat = type_annotation_to_valid_strategy(arg)
invalid_strat = type_annotation_to_invalid_strategy(arg)
tuple_valids[len(args)].add(valid_strat)
tuple_invalids[len(args)].add(invalid_strat)
types_to_avoid.add(tuple)
elif type_str.lower().startswith('dict'):
args = _split_generic_arguments(type_str[5:-1])
dict_key_valids.add(type_annotation_to_valid_strategy(args[0]))
dict_key_invalids.add(type_annotation_to_valid_strategy(args[1]))
dict_val_valids.add(type_annotation_to_invalid_strategy(args[0]))
dict_val_invalids.add(type_annotation_to_invalid_strategy(args[1]))
types_to_avoid.add(dict)
types_to_avoid.add(map)
elif type_str.lower().startswith('mapping'):
args = _split_generic_arguments(type_str[8:-1])
dict_key_valids.add(type_annotation_to_valid_strategy(args[0]))
dict_key_invalids.add(type_annotation_to_valid_strategy(args[1]))
dict_val_valids.add(type_annotation_to_invalid_strategy(args[0]))
dict_val_invalids.add(type_annotation_to_invalid_strategy(args[1]))
types_to_avoid.add(dict)
types_to_avoid.add(map)
elif type_str.lower().startswith('list'):
arg_strat = type_annotation_to_invalid_strategy(type_str[5:-1])
list_invalids.add(arg_strat)
types_to_avoid.add(list)
elif type_str.lower().startswith('set'):
arg_strat = type_annotation_to_invalid_strategy(type_str[4:-1])
set_invalids.add(arg_strat)
types_to_avoid.add(set)
types_to_avoid.add(collections.abc.MutableSet)
elif type_str.lower().startswith('sequence'):
arg_strat = type_annotation_to_invalid_strategy(type_str[9:-1])
list_invalids.add(arg_strat)
types_to_avoid.add(Sequence)
types_to_avoid.add(list)
types_to_avoid.add(tuple)
types_to_avoid.add(bytearray)
types_to_avoid.add(bytes)
elif type_str.lower().startswith('iterable'):
arg_strat = type_annotation_to_invalid_strategy(type_str[9:-1])
iterable_invalids.add(arg_strat)
types_to_avoid.add(Sequence)
types_to_avoid.add(list)
types_to_avoid.add(tuple)
types_to_avoid.add(Iterable)
types_to_avoid.add(set)
types_to_avoid.add(frozenset)
types_to_avoid.add(dict)
types_to_avoid.add(str)
types_to_avoid.add(bytearray)
types_to_avoid.add(bytes)
types_to_avoid.add(collections.abc.MutableSet)
types_to_avoid.add(enumerate)
types_to_avoid.add(map)
types_to_avoid.add(range)
types_to_avoid.add(reversed)
elif type_str.lower().startswith('intervallike'):
types_to_avoid.add(tuple)
elif type_str.lower().startswith('cycleinterval'):
continue
elif type_str.lower().startswith('circuitpointlike'):
types_to_avoid.add(tuple)
elif type_str.lower().startswith('circuitpoint'):
continue
elif type_str.lower().startswith('circuitregionlike'):
types_to_avoid.add(dict)
elif type_str.lower().startswith('circuitregion'):
continue
elif type_str.lower().startswith('circuitlocationlike'):
types_to_avoid.add(int)
types_to_avoid.add(Sequence)
types_to_avoid.add(Iterable)
types_to_avoid.add(list)
types_to_avoid.add(tuple)
types_to_avoid.add(collections.abc.MutableSet)
types_to_avoid.add(enumerate)
types_to_avoid.add(range)
types_to_avoid.add(reversed)
elif type_str.lower().startswith('circuitlocation'):
continue
elif type_str.lower().startswith('unitarylike'):
types_to_avoid.add(np.ndarray)
elif type_str.lower().startswith('unitarymatrix'):
continue
elif type_str.lower().startswith('gate'):
continue
elif type_str.lower().startswith('operation'):
continue
elif type_str.lower().startswith('circuit'):
continue
else:
raise ValueError(f'Cannot generate strategy for type: {type_str}')
strategies.append(everything_except(tuple(types_to_avoid)))
for tuple_len in tuple_valids:
for valid_set in _powerset(list(range(tuple_len))): # (), (0,), (1,)
strategy_builder = []
for i in range(tuple_len):
if i in valid_set:
strat = one_of(list(tuple_valids[tuple_len]))
strategy_builder.append(strat)
else:
strat = one_of(list(tuple_invalids[tuple_len]))
strategy_builder.append(strat)
strategies.append(tuples(*strategy_builder))
if len(dict_val_invalids) > 0:
strategies.append(
dictionaries(
one_of(list(dict_key_valids)),
one_of(list(dict_val_invalids)),
min_size=1,
),
)
strategies.append(
dictionaries(
one_of(list(dict_key_invalids)),
one_of(list(dict_val_valids)),
min_size=1,
),
)
strategies.append(
dictionaries(
one_of(list(dict_key_invalids)),
one_of(list(dict_val_invalids)),
min_size=1,
),
)
if len(list_invalids) > 0:
strategies.append(lists(one_of(list(list_invalids)), min_size=1))
if len(set_invalids) > 0:
strategies.append(sets(one_of(list(set_invalids)), min_size=1))
if len(iterable_invalids) > 0:
strategies.append(
iterables(
one_of(
list(iterable_invalids),
),
min_size=1,
),
)
return one_of(strategies)
def invalid_type_test(
func_to_test: Callable[..., Any],
other_allowed_errors: list[type] = [],
) -> Callable[..., Callable[..., None]]:
"""
Decorator to generate invalid type tests.
A valid type test ensures that a function called with incorrect types
does raise a TypeError.
Examples:
>>> class Foo:
... def foo(self, x: int, y: int) -> None:
... if not is_integer(x):
... raise TypeError("")
... if not is_integer(y):
... raise TypeError("")
>>> class TestFoo:
... @invalid_type_test(Foo().foo)
... def test_foo_invalid_type(self) -> None:
... pass
>>> @invalid_type_test(Foo().foo)
... def test_foo_invalid_type(self) -> None:
... pass
"""
if sys.version_info[0] == 3 and sys.version_info[1] < 9:
return lambda x: x
valids = []
invalids = []
for id, param in inspect.signature(func_to_test).parameters.items():
if param.annotation == inspect._empty: # type: ignore
raise ValueError(
'Need type annotation to generate invalid type tests.',
)
valids.append(type_annotation_to_valid_strategy(param.annotation))
invalids.append(type_annotation_to_invalid_strategy(param.annotation))
strategies = []
for valid_set in _powerset(list(range(len(valids)))):
strategy_builder = []
for i in range(len(valids)):
if i in valid_set:
strategy_builder.append(valids[i])
else:
strategy_builder.append(invalids[i])
strategies.append(tuples(*strategy_builder))
def inner(f: Callable[..., Any]) -> Callable[..., None]:
if 'self' in inspect.signature(f).parameters:
@pytest.mark.parametrize('strategy', strategies)
@given(data=data())
def invalid_type_test(self: Any, strategy: Any, data: Any) -> None:
args = data.draw(strategy)
with pytest.raises((TypeError,) + tuple(other_allowed_errors)):
func_to_test(*args)
return invalid_type_test
else:
@pytest.mark.parametrize('strategy', strategies)
@given(data=data())
def invalid_type_test(strategy: Any, data: Any) -> None:
args = data.draw(strategy)
with pytest.raises((TypeError,) + tuple(other_allowed_errors)):
func_to_test(*args)
return invalid_type_test
return inner
def valid_type_test(
func_to_test: Callable[..., Any],
) -> Callable[..., Callable[..., None]]:
"""
Decorator to generate valid type tests.
A valid type test ensures that a function called with correct types
does not raise a TypeError.
Examples:
>>> class Foo:
... def foo(self, x: int, y: int) -> None:
... if not is_integer(x):
... raise TypeError("")
... if not is_integer(y):
... raise TypeError("")
>>> class TestFoo:
... @valid_type_test(Foo().foo)
... def test_foo_valid_type(self) -> None:
... pass
>>> @valid_type_test(Foo().foo)
... def test_foo_valid_type(self) -> None:
... pass
"""
if sys.version_info[0] == 3 and sys.version_info[1] < 9:
return lambda x: x
strategies = []
for id, param in inspect.signature(func_to_test).parameters.items():
if param.annotation == inspect._empty: # type: ignore
raise ValueError(
'Need type annotation to generate invalid type tests.',
)
strategies.append(type_annotation_to_valid_strategy(param.annotation))
strategy = tuples(*strategies)
def inner(f: Callable[..., Any]) -> Callable[..., None]:
if 'self' in inspect.signature(f).parameters:
@given(data=strategy)
def valid_type_test(self: Any, data: Any) -> None:
try:
func_to_test(*data)
except TypeError:
assert False, 'Valid types caused TypeError.'
except Exception:
pass
return valid_type_test
else:
@given(data=strategy)
def valid_type_test(data: Any) -> None:
try:
func_to_test(*data)
except TypeError:
assert False, 'Valid types caused TypeError.'
except Exception:
pass
return valid_type_test
return inner
| [
"bqskit.utils.test.strategies.unitaries",
"inspect.signature",
"hypothesis.strategies.sets",
"bqskit.utils.test.strategies.unitary_likes",
"hypothesis.strategies.lists",
"hypothesis.strategies.booleans",
"bqskit.utils.test.strategies.circuit_points",
"hypothesis.given",
"hypothesis.strategies.dictio... | [((7359, 7377), 'hypothesis.strategies.one_of', 'one_of', (['strategies'], {}), '(strategies)\n', (7365, 7377), False, 'from hypothesis.strategies import one_of\n'), ((16054, 16072), 'hypothesis.strategies.one_of', 'one_of', (['strategies'], {}), '(strategies)\n', (16060, 16072), False, 'from hypothesis.strategies import one_of\n'), ((19955, 19974), 'hypothesis.strategies.tuples', 'tuples', (['*strategies'], {}), '(*strategies)\n', (19961, 19974), False, 'from hypothesis.strategies import tuples\n'), ((2148, 2166), 'itertools.combinations', 'combinations', (['s', 'r'], {}), '(s, r)\n', (2160, 2166), False, 'from itertools import combinations\n'), ((17786, 17811), 'hypothesis.strategies.tuples', 'tuples', (['*strategy_builder'], {}), '(*strategy_builder)\n', (17792, 17811), False, 'from hypothesis.strategies import tuples\n'), ((17942, 17989), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""strategy"""', 'strategies'], {}), "('strategy', strategies)\n", (17965, 17989), False, 'import pytest\n'), ((18330, 18377), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""strategy"""', 'strategies'], {}), "('strategy', strategies)\n", (18353, 18377), False, 'import pytest\n'), ((20104, 20124), 'hypothesis.given', 'given', ([], {'data': 'strategy'}), '(data=strategy)\n', (20109, 20124), False, 'from hypothesis import given\n'), ((20471, 20491), 'hypothesis.given', 'given', ([], {'data': 'strategy'}), '(data=strategy)\n', (20476, 20491), False, 'from hypothesis import given\n'), ((3289, 3299), 'hypothesis.strategies.just', 'just', (['None'], {}), '(None)\n', (3293, 3299), False, 'from hypothesis.strategies import just\n'), ((14937, 14962), 'hypothesis.strategies.tuples', 'tuples', (['*strategy_builder'], {}), '(*strategy_builder)\n', (14943, 14962), False, 'from hypothesis.strategies import tuples\n'), ((17075, 17106), 'inspect.signature', 'inspect.signature', (['func_to_test'], {}), '(func_to_test)\n', (17092, 17106), False, 'import inspect\n'), ((17896, 17916), 'inspect.signature', 'inspect.signature', (['f'], {}), '(f)\n', (17913, 17916), False, 'import inspect\n'), ((18125, 18144), 'hypothesis.strategies.data.draw', 'data.draw', (['strategy'], {}), '(strategy)\n', (18134, 18144), False, 'from hypothesis.strategies import data\n'), ((18502, 18521), 'hypothesis.strategies.data.draw', 'data.draw', (['strategy'], {}), '(strategy)\n', (18511, 18521), False, 'from hypothesis.strategies import data\n'), ((19629, 19660), 'inspect.signature', 'inspect.signature', (['func_to_test'], {}), '(func_to_test)\n', (19646, 19660), False, 'import inspect\n'), ((20058, 20078), 'inspect.signature', 'inspect.signature', (['f'], {}), '(f)\n', (20075, 20078), False, 'import inspect\n'), ((3364, 3374), 'hypothesis.strategies.integers', 'integers', ([], {}), '()\n', (3372, 3374), False, 'from hypothesis.strategies import integers\n'), ((18014, 18020), 'hypothesis.strategies.data', 'data', ([], {}), '()\n', (18018, 18020), False, 'from hypothesis.strategies import data\n'), ((18402, 18408), 'hypothesis.strategies.data', 'data', ([], {}), '()\n', (18406, 18408), False, 'from hypothesis.strategies import data\n'), ((3441, 3449), 'hypothesis.strategies.floats', 'floats', ([], {}), '()\n', (3447, 3449), False, 'from hypothesis.strategies import floats\n'), ((3587, 3604), 'hypothesis.strategies.complex_numbers', 'complex_numbers', ([], {}), '()\n', (3602, 3604), False, 'from hypothesis.strategies import complex_numbers\n'), ((3481, 3498), 'hypothesis.extra.numpy.floating_dtypes', 'floating_dtypes', ([], {}), '()\n', (3496, 3498), False, 'from hypothesis.extra.numpy import floating_dtypes\n'), ((3745, 3755), 'hypothesis.strategies.booleans', 'booleans', ([], {}), '()\n', (3753, 3755), False, 'from hypothesis.strategies import booleans\n'), ((3636, 3659), 'hypothesis.extra.numpy.complex_number_dtypes', 'complex_number_dtypes', ([], {}), '()\n', (3657, 3659), False, 'from hypothesis.extra.numpy import complex_number_dtypes\n'), ((3820, 3826), 'hypothesis.strategies.text', 'text', ([], {}), '()\n', (3824, 3826), False, 'from hypothesis.strategies import text\n'), ((3891, 3901), 'hypothesis.strategies.just', 'just', (['None'], {}), '(None)\n', (3895, 3901), False, 'from hypothesis.strategies import just\n'), ((4164, 4189), 'hypothesis.strategies.tuples', 'tuples', (['*inner_strategies'], {}), '(*inner_strategies)\n', (4170, 4189), False, 'from hypothesis.strategies import tuples\n'), ((4466, 4500), 'hypothesis.strategies.dictionaries', 'dictionaries', (['key_strat', 'val_strat'], {}), '(key_strat, val_strat)\n', (4478, 4500), False, 'from hypothesis.strategies import dictionaries\n'), ((4780, 4814), 'hypothesis.strategies.dictionaries', 'dictionaries', (['key_strat', 'val_strat'], {}), '(key_strat, val_strat)\n', (4792, 4814), False, 'from hypothesis.strategies import dictionaries\n'), ((4971, 4987), 'hypothesis.strategies.lists', 'lists', (['arg_strat'], {}), '(arg_strat)\n', (4976, 4987), False, 'from hypothesis.strategies import lists\n'), ((5143, 5158), 'hypothesis.strategies.sets', 'sets', (['arg_strat'], {}), '(arg_strat)\n', (5147, 5158), False, 'from hypothesis.strategies import sets\n'), ((5319, 5335), 'hypothesis.strategies.lists', 'lists', (['arg_strat'], {}), '(arg_strat)\n', (5324, 5335), False, 'from hypothesis.strategies import lists\n'), ((5496, 5516), 'hypothesis.strategies.iterables', 'iterables', (['arg_strat'], {}), '(arg_strat)\n', (5505, 5516), False, 'from hypothesis.strategies import iterables\n'), ((5717, 5734), 'bqskit.utils.test.strategies.cycle_intervals', 'cycle_intervals', ([], {}), '()\n', (5732, 5734), False, 'from bqskit.utils.test.strategies import cycle_intervals\n'), ((5826, 5843), 'bqskit.utils.test.strategies.cycle_intervals', 'cycle_intervals', ([], {}), '()\n', (5841, 5843), False, 'from bqskit.utils.test.strategies import cycle_intervals\n'), ((6048, 6064), 'bqskit.utils.test.strategies.circuit_points', 'circuit_points', ([], {}), '()\n', (6062, 6064), False, 'from bqskit.utils.test.strategies import circuit_points\n'), ((6155, 6171), 'bqskit.utils.test.strategies.circuit_points', 'circuit_points', ([], {}), '()\n', (6169, 6171), False, 'from bqskit.utils.test.strategies import circuit_points\n'), ((6385, 6402), 'bqskit.utils.test.strategies.circuit_regions', 'circuit_regions', ([], {}), '()\n', (6400, 6402), False, 'from bqskit.utils.test.strategies import circuit_regions\n'), ((6494, 6511), 'bqskit.utils.test.strategies.circuit_regions', 'circuit_regions', ([], {}), '()\n', (6509, 6511), False, 'from bqskit.utils.test.strategies import circuit_regions\n'), ((6601, 6616), 'bqskit.utils.test.strategies.unitary_likes', 'unitary_likes', ([], {}), '()\n', (6614, 6616), False, 'from bqskit.utils.test.strategies import unitary_likes\n'), ((6708, 6719), 'bqskit.utils.test.strategies.unitaries', 'unitaries', ([], {}), '()\n', (6717, 6719), False, 'from bqskit.utils.test.strategies import unitaries\n'), ((6802, 6809), 'bqskit.utils.test.strategies.gates', 'gates', ([], {}), '()\n', (6807, 6809), False, 'from bqskit.utils.test.strategies import gates\n'), ((6897, 6909), 'bqskit.utils.test.strategies.operations', 'operations', ([], {}), '()\n', (6907, 6909), False, 'from bqskit.utils.test.strategies import operations\n'), ((7007, 7026), 'bqskit.utils.test.strategies.circuit_locations', 'circuit_locations', ([], {}), '()\n', (7024, 7026), False, 'from bqskit.utils.test.strategies import circuit_locations\n'), ((7120, 7144), 'bqskit.utils.test.strategies.circuit_location_likes', 'circuit_location_likes', ([], {}), '()\n', (7142, 7144), False, 'from bqskit.utils.test.strategies import circuit_location_likes\n'), ((7230, 7251), 'bqskit.utils.test.strategies.circuits', 'circuits', ([], {'max_gates': '(1)'}), '(max_gates=1)\n', (7238, 7251), False, 'from bqskit.utils.test.strategies import circuits\n')] |
# Copyright 2019-2021 Swiss National Supercomputing Centre (CSCS/ETH Zurich)
# HPCTools Project Developers. See the top-level LICENSE file for details.
#
# SPDX-License-Identifier: BSD-3-Clause
import os
import sys
import reframe as rfm
import reframe.utility.sanity as sn
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__),
'../common'))) # noqa: E402
import sphexa.hooks as hooks
import sphexa.sanity as sphs
import sphexa.sanity_mpip as sphsmpip
# {{{ class SphExa_mpiP_Check
@rfm.simple_test
class SphExa_mpiP_Check(rfm.RegressionTest, hooks.setup_pe,
hooks.setup_code):
# {{{
'''
This class runs the test code with mpiP (mpi+openmp):
http://llnl.github.io/mpiP
'''
# }}}
steps = parameter([4])
compute_node = parameter([2])
# compute_node = parameter([100, 200, 300, 400, 500])
np_per_c = parameter([1e4])
mpip_flags = variable(bool, value=True)
def __init__(self):
# {{{ pe
self.descr = 'Tool validation'
self.valid_prog_environs = ['*']
self.valid_systems = ['*']
self.modules = ['mpiP']
self.maintainers = ['JG']
self.tags = {'sph', 'hpctools', 'cpu'}
# }}}
# {{{ compile
self.testname = 'sedov'
self.tool = 'pprof'
self.which_rpt = 'which.rpt'
self.prebuild_cmds = [
# f'which {self.tool} &> {self.which_rpt}',
# f'which $EBROOTPPROF/bin/{self.tool} >> {self.which_rpt}',
]
# }}}
# {{{ run
# self.variables = {'MPIP': '"-c"',}
# -c: concise version of report
# -d: suppress callsite details
# -p: report histogram of point-to-point MPI calls
# -y: report histogram of collective MPI calls
#
# libpath = '$EBROOTMPIP/lib'
# self.variables['LD_LIBRARY_PATH'] = f'{libpath}:$LD_LIBRARY_PATH'
self.rpt_file = 'mpip.rpt'
self.postrun_cmds += [
f'# -------------------------------------------------------------',
f'# -------------------------------------------------------------',
]
# }}}
# {{{ sanity
self.sanity_patterns = sn.all([
# check the job output:
sn.assert_found(r'Total time for iteration\(0\)', self.stdout),
# check the tool report:
sn.assert_found('^mpiP: Storing mpiP output in ', self.stdout),
])
# }}}
# {{{ hooks
@rfm.run_before('performance')
def set_tool_perf_patterns(self):
self.perf_patterns.update({
'mpip_avg_mpi_time': sphsmpip.mpip_perf_patterns(self, 1),
'mpip_avg_app_time': sphsmpip.mpip_perf_patterns(self, 2),
'%mpip_avg_mpi_time_max': sphsmpip.mpip_perf_patterns(self, 5),
'%mpip_avg_mpi_time': sphsmpip.mpip_perf_patterns(self, 3),
'%mpip_avg_mpi_time_min': sphsmpip.mpip_perf_patterns(self, 6),
'%mpip_avg_non_mpi_time': sphsmpip.mpip_perf_patterns(self, 4),
})
@rfm.run_before('performance')
def set_tool_perf_reference(self):
myzero_s = (0, None, None, 's')
myzero_p = (0, None, None, '%')
self.reference['*:mpip_avg_mpi_time'] = myzero_s
self.reference['*:mpip_avg_app_time'] = myzero_s
self.reference['*:%mpip_avg_mpi_time_max'] = myzero_p
self.reference['*:%mpip_avg_mpi_time'] = myzero_p
self.reference['*:%mpip_avg_mpi_time_min'] = myzero_p
self.reference['*:%mpip_avg_non_mpi_time'] = myzero_p
# }}}
# }}}
| [
"os.path.dirname",
"reframe.utility.sanity.assert_found",
"sphexa.sanity_mpip.mpip_perf_patterns",
"reframe.run_before"
] | [((2513, 2542), 'reframe.run_before', 'rfm.run_before', (['"""performance"""'], {}), "('performance')\n", (2527, 2542), True, 'import reframe as rfm\n'), ((3076, 3105), 'reframe.run_before', 'rfm.run_before', (['"""performance"""'], {}), "('performance')\n", (3090, 3105), True, 'import reframe as rfm\n'), ((319, 344), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (334, 344), False, 'import os\n'), ((2289, 2352), 'reframe.utility.sanity.assert_found', 'sn.assert_found', (['"""Total time for iteration\\\\(0\\\\)"""', 'self.stdout'], {}), "('Total time for iteration\\\\(0\\\\)', self.stdout)\n", (2304, 2352), True, 'import reframe.utility.sanity as sn\n'), ((2402, 2464), 'reframe.utility.sanity.assert_found', 'sn.assert_found', (['"""^mpiP: Storing mpiP output in """', 'self.stdout'], {}), "('^mpiP: Storing mpiP output in ', self.stdout)\n", (2417, 2464), True, 'import reframe.utility.sanity as sn\n'), ((2650, 2686), 'sphexa.sanity_mpip.mpip_perf_patterns', 'sphsmpip.mpip_perf_patterns', (['self', '(1)'], {}), '(self, 1)\n', (2677, 2686), True, 'import sphexa.sanity_mpip as sphsmpip\n'), ((2721, 2757), 'sphexa.sanity_mpip.mpip_perf_patterns', 'sphsmpip.mpip_perf_patterns', (['self', '(2)'], {}), '(self, 2)\n', (2748, 2757), True, 'import sphexa.sanity_mpip as sphsmpip\n'), ((2797, 2833), 'sphexa.sanity_mpip.mpip_perf_patterns', 'sphsmpip.mpip_perf_patterns', (['self', '(5)'], {}), '(self, 5)\n', (2824, 2833), True, 'import sphexa.sanity_mpip as sphsmpip\n'), ((2869, 2905), 'sphexa.sanity_mpip.mpip_perf_patterns', 'sphsmpip.mpip_perf_patterns', (['self', '(3)'], {}), '(self, 3)\n', (2896, 2905), True, 'import sphexa.sanity_mpip as sphsmpip\n'), ((2945, 2981), 'sphexa.sanity_mpip.mpip_perf_patterns', 'sphsmpip.mpip_perf_patterns', (['self', '(6)'], {}), '(self, 6)\n', (2972, 2981), True, 'import sphexa.sanity_mpip as sphsmpip\n'), ((3021, 3057), 'sphexa.sanity_mpip.mpip_perf_patterns', 'sphsmpip.mpip_perf_patterns', (['self', '(4)'], {}), '(self, 4)\n', (3048, 3057), True, 'import sphexa.sanity_mpip as sphsmpip\n')] |
import torch
from torch.nn import Module, Parameter
from torch.autograd import Function
class Forward_Warp_Python:
@staticmethod
def forward(im0, flow, interpolation_mode):
im1 = torch.zeros_like(im0)
B = im0.shape[0]
H = im0.shape[2]
W = im0.shape[3]
if interpolation_mode == 0:
for b in range(B):
for h in range(H):
for w in range(W):
x = w + flow[b, h, w, 0]
y = h + flow[b, h, w, 1]
nw = (int(torch.floor(x)), int(torch.floor(y)))
ne = (nw[0]+1, nw[1])
sw = (nw[0], nw[1]+1)
se = (nw[0]+1, nw[1]+1)
p = im0[b, :, h, w]
if nw[0] >= 0 and se[0] < W and nw[1] >= 0 and se[1] < H:
nw_k = (se[0]-x)*(se[1]-y)
ne_k = (x-sw[0])*(sw[1]-y)
sw_k = (ne[0]-x)*(y-ne[1])
se_k = (x-nw[0])*(y-nw[1])
im1[b, :, nw[1], nw[0]] += nw_k*p
im1[b, :, ne[1], ne[0]] += ne_k*p
im1[b, :, sw[1], sw[0]] += sw_k*p
im1[b, :, se[1], se[0]] += se_k*p
else:
round_flow = torch.round(flow)
for b in range(B):
for h in range(H):
for w in range(W):
x = w + int(round_flow[b, h, w, 0])
y = h + int(round_flow[b, h, w, 1])
if x >= 0 and x < W and y >= 0 and y < H:
im1[b, :, y, x] = im0[b, :, h, w]
return im1
@staticmethod
def backward(grad_output, im0, flow, interpolation_mode):
B = grad_output.shape[0]
C = grad_output.shape[1]
H = grad_output.shape[2]
W = grad_output.shape[3]
im0_grad = torch.zeros_like(grad_output)
flow_grad = torch.empty([B, H, W, 2])
if interpolation_mode == 0:
for b in range(B):
for h in range(H):
for w in range(W):
x = w + flow[b, h, w, 0]
y = h + flow[b, h, w, 1]
x_f = int(torch.floor(x))
y_f = int(torch.floor(y))
x_c = x_f+1
y_c = y_f+1
nw = (x_f, y_f)
ne = (x_c, y_f)
sw = (x_f, y_c)
se = (x_c, y_c)
p = im0[b, :, h, w]
if nw[0] >= 0 and se[0] < W and nw[1] >= 0 and se[1] < H:
nw_k = (se[0]-x)*(se[1]-y)
ne_k = (x-sw[0])*(sw[1]-y)
sw_k = (ne[0]-x)*(y-ne[1])
se_k = (x-nw[0])*(y-nw[1])
nw_grad = grad_output[b, :, nw[1], nw[0]]
ne_grad = grad_output[b, :, ne[1], ne[0]]
sw_grad = grad_output[b, :, sw[1], sw[0]]
se_grad = grad_output[b, :, se[1], se[0]]
im0_grad[b, :, h, w] += nw_k*nw_grad
im0_grad[b, :, h, w] += ne_k*ne_grad
im0_grad[b, :, h, w] += sw_k*sw_grad
im0_grad[b, :, h, w] += se_k*se_grad
flow_grad_x = torch.zeros(C)
flow_grad_y = torch.zeros(C)
flow_grad_x -= (y_c-y)*p*nw_grad
flow_grad_y -= (x_c-x)*p*nw_grad
flow_grad_x += (y_c-y)*p*ne_grad
flow_grad_y -= (x-x_f)*p*ne_grad
flow_grad_x -= (y-y_f)*p*sw_grad
flow_grad_y += (x_c-x)*p*sw_grad
flow_grad_x += (y-y_f)*p*se_grad
flow_grad_y += (x-x_f)*p*se_grad
flow_grad[b, h, w, 0] = torch.sum(flow_grad_x)
flow_grad[b, h, w, 1] = torch.sum(flow_grad_y)
else:
round_flow = torch.round(flow)
for b in range(B):
for h in range(H):
for w in range(W):
x = w + int(round_flow[b, h, w, 0])
y = h + int(round_flow[b, h, w, 1])
if x >= 0 and x < W and y >= 0 and y < H:
im0_grad[b, :, h, w] = grad_output[b, :, y, x]
return im0_grad, flow_grad
| [
"torch.floor",
"torch.round",
"torch.sum",
"torch.zeros_like",
"torch.empty",
"torch.zeros"
] | [((197, 218), 'torch.zeros_like', 'torch.zeros_like', (['im0'], {}), '(im0)\n', (213, 218), False, 'import torch\n'), ((2000, 2029), 'torch.zeros_like', 'torch.zeros_like', (['grad_output'], {}), '(grad_output)\n', (2016, 2029), False, 'import torch\n'), ((2050, 2075), 'torch.empty', 'torch.empty', (['[B, H, W, 2]'], {}), '([B, H, W, 2])\n', (2061, 2075), False, 'import torch\n'), ((1378, 1395), 'torch.round', 'torch.round', (['flow'], {}), '(flow)\n', (1389, 1395), False, 'import torch\n'), ((4324, 4341), 'torch.round', 'torch.round', (['flow'], {}), '(flow)\n', (4335, 4341), False, 'import torch\n'), ((2349, 2363), 'torch.floor', 'torch.floor', (['x'], {}), '(x)\n', (2360, 2363), False, 'import torch\n'), ((2399, 2413), 'torch.floor', 'torch.floor', (['y'], {}), '(y)\n', (2410, 2413), False, 'import torch\n'), ((3575, 3589), 'torch.zeros', 'torch.zeros', (['C'], {}), '(C)\n', (3586, 3589), False, 'import torch\n'), ((3632, 3646), 'torch.zeros', 'torch.zeros', (['C'], {}), '(C)\n', (3643, 3646), False, 'import torch\n'), ((4187, 4209), 'torch.sum', 'torch.sum', (['flow_grad_x'], {}), '(flow_grad_x)\n', (4196, 4209), False, 'import torch\n'), ((4262, 4284), 'torch.sum', 'torch.sum', (['flow_grad_y'], {}), '(flow_grad_y)\n', (4271, 4284), False, 'import torch\n'), ((567, 581), 'torch.floor', 'torch.floor', (['x'], {}), '(x)\n', (578, 581), False, 'import torch\n'), ((588, 602), 'torch.floor', 'torch.floor', (['y'], {}), '(y)\n', (599, 602), False, 'import torch\n')] |
import itertools
import Partitioning
class Algorithm( object ):
def __init__( self, linv, variant, init, repart, contwith, before, after, updates ):
self.linv = linv
self.variant = variant
if init:
#assert( len(init) == 1 )
self.init = init[0]
else:
self.init = None
self.repart = repart
self.contwith = contwith
self.before = before
self.after = after
self.updates = updates
# To be filled up for code generation
self.name = None
self.partition = None
self.partition_size = None
self.guard = None
self.repartition = None
self.repartition_size = None
self.basic_repart = None
self.cont_with = None
def prepare_for_code_generation( self ):
self.set_name()
self.set_partition()
self.set_partition_size()
self.set_guard()
self.set_repartition()
self.set_repartition_size()
self.set_basic_repart()
self.set_cont_with()
def set_name( self ):
self.name = "%s_blk_var%d" % (self.linv.operation.name, self.variant)
def set_partition( self ):
self.partition = dict()
traversals = self.linv.traversals[0][0]
#for op in self.linv.operation.operands: # [FIX] linv_operands?
for op in self.linv.linv_operands: # [FIX] linv_operands?
#part_size = self.linv.pme.part_shape[ op.get_name() ]
part_size = self.linv.linv_operands_part_shape[ op.get_name() ]
#part_flat = list(itertools.chain( *self.linv.pme.partitionings[ op.get_name() ] ))
part_flat = list(itertools.chain( *self.linv.linv_operands_basic_part[ op.get_name() ] ))
trav = traversals[op.get_name()]
if part_size == (1, 1):
continue
elif part_size == (1, 2):
if trav == (0, 1):
part_quad = "L"
else: # (0, -1)
part_quad = "R"
elif part_size == (2, 1):
if trav == (1, 0):
part_quad = "T"
else: # (-1, 0)
part_quad = "B"
elif part_size == (2, 2):
if trav == (1, 1):
part_quad = "TL"
elif trav == (1, -1):
part_quad = "TR"
elif trav == (-1, 1):
part_quad = "BL"
else: #(-1, -1):
part_quad = "BR"
else:
raise Exception
self.partition[ op.get_name() ] = (part_size, part_flat, part_quad)
def set_partition_size( self ):
self.partition_size = dict()
traversals = self.linv.traversals[0][0]
#for op in self.linv.operation.operands:
for op in self.linv.linv_operands:
name = op.get_name()
traversal = traversals[op.get_name()]
if traversal == (0, 0):
continue
elif traversal in ( (0, 1), (0, -1) ): # L|R (the specific quadrant can be retrieved from self.partition)
self.partition_size[ name ] = ( op.size[0], 0 )
elif traversal in ( (1, 0), (-1, 0) ): # T/B
self.partition_size[ name ] = ( 0, op.size[1] )
elif traversal in ( (1, 1), (1, -1), (-1, 1), (-1, -1) ): # 2x2
self.partition_size[ name ] = ( 0, 0 )
else:
print( name, traversal )
raise Exception
def set_guard( self ):
self.guard = []
traversals = self.linv.traversals[0][0]
#guard_dims = [bd[0] for bd in self.linv.linv_bound_dimensions[1:]]
guard_dims = []
#for bd in self.linv.linv_bound_dimensions[1:]:
for bd in self.linv.operation.bound_dimensions[1:]:
for d in bd:
op_name, dim = d.split("_")
op = [ o for o in self.linv.operation.operands if o.name == op_name ][0]
if op.st_info[1] != op:
continue
if dim == "r":
idx = 0
else:
idx = 1
if ( traversals[op_name][idx] == 0 ):
continue
self.guard.append( (op.get_size()[idx], guard(op, traversals[op_name])) )
break
def set_repartition( self ):
self.repartition = dict()
traversals = self.linv.traversals[0][0]
#for op in self.linv.operation.operands:
for op in self.linv.linv_operands:
part_size = self.linv.linv_operands_part_shape[ op.get_name() ]
#part_size = self.linv.pme.part_shape[ op.get_name() ]
repart = self.repart[ op.get_name() ]
traversal = traversals[op.get_name()]
if part_size == (1, 1):
continue
elif part_size == (1, 2):
repart_size = (1, 3)
if traversal == (0, 1): # ( 0 || 1 | 2 )
repart_quadrant = "R"
else: # ( 0 | 1 || 2 )
repart_quadrant = "L"
elif part_size == (2, 1):
repart_size = (3, 1)
if traversal == (1, 0): # ( 0 // 1 / 2 )
repart_quadrant = "B"
else: # ( 0 / 1 // 2 )
repart_quadrant = "T"
elif part_size == (2, 2):
repart_size = (3, 3)
if traversal == (1, 1): # BR becomes 2x2
repart_quadrant = "BR"
elif traversal == (1, -1): # BL becomes 2x2
repart_quadrant = "BL"
elif traversal == (-1, 1): # TR becomes 2x2
repart_quadrant = "TR"
else: #if traversal == (-1, -1): # TL becomes 2x2
repart_quadrant = "TL"
else:
raise Exception
repart_flat = list(flatten_repart(repart))
self.repartition[ op.get_name() ] = (repart_size, repart_flat, repart_quadrant)
def set_repartition_size( self ):
self.repartition_size = dict()
traversals = self.linv.traversals[0][0]
#for op in self.linv.operation.operands:
for op in self.linv.linv_operands:
name = op.get_name()
traversal = traversals[op.get_name()]
if traversal == (0, 0):
continue
elif traversal in ( (0, 1), (0, -1) ): # Quadrant is 1
self.repartition_size[ name ] = ( "1", op.size[0], "bs" )
elif traversal in ( (1, 0), (-1, 0) ): # Quadrant is 1
self.repartition_size[ name ] = ( "1", "bs", op.size[1] )
elif traversal in ( (1, 1), (1, -1), (-1, 1), (-1, -1) ): # Quadrant is 11
self.repartition_size[ name ] = ( "11", "bs", "bs" )
else:
print( name, traversal )
raise Exception
def set_basic_repart( self ):
self.basic_repart = dict()
traversals = self.linv.traversals[0][0]
for op in self.linv.linv_operands:
part_size = self.linv.linv_operands_part_shape[ op.get_name() ]
if part_size == (1, 1):
repart_size = (1, 1)
elif part_size == (1, 2):
repart_size = (1, 3)
elif part_size == (2, 1):
repart_size = (3, 1)
elif part_size == (2, 2):
repart_size = (3, 3)
else:
raise Exception
self.basic_repart[ op.get_name() ] = Partitioning.repartition_shape( op, repart_size )
def set_repartition_size( self ):
self.repartition_size = dict()
traversals = self.linv.traversals[0][0]
#for op in self.linv.operation.operands:
for op in self.linv.linv_operands:
name = op.get_name()
traversal = traversals[op.get_name()]
if traversal == (0, 0):
continue
def set_cont_with( self ):
self.cont_with = dict()
traversals = self.linv.traversals[0][0]
#for op in self.linv.operation.operands:
for op in self.linv.linv_operands:
part_size = self.linv.linv_operands_part_shape[ op.get_name() ]
#part_size = self.linv.pme.part_shape[ op.get_name() ]
traversal = traversals[op.get_name()]
if part_size == (1, 1):
continue
elif part_size == (1, 2):
if traversal == (0, 1): # ( 0 | 1 || 2 ) 1 appended to L
cont_with_quadrant = "L"
else: # ( 0 || 1 | 2 ) 1 appended to R
cont_with_quadrant = "R"
elif part_size == (2, 1):
if traversal == (1, 0): # ( 0 / 1 // 2 ) 1 appended to T
cont_with_quadrant = "T"
else: # ( 0 // 1 / 2 ) 1 appended to B
cont_with_quadrant = "B"
elif part_size == (2, 2):
if traversal == (1, 1): # TL grows
cont_with_quadrant = "TL"
elif traversal == (1, -1): # TR grows
cont_with_quadrant = "TR"
elif traversal == (-1, 1): # BL grows
cont_with_quadrant = "BL"
else: #if traversal == (-1, -1): # BR grows
cont_with_quadrant = "BR"
else:
raise Exception
self.cont_with[ op.get_name() ] = cont_with_quadrant
def guard( op, traversal ):
name = op.get_name()
#op = [ o for o in self.operations.operands if o.name == op_name ][0]
if traversal == (0, 1): # L -> R
return ("L", op)
elif traversal == (0, -1): # R -> L
return ("R", op)
elif traversal == (1, 0): # T -> B
return ("T", op)
elif traversal == (-1, 0): # B -> T
return ("B", op)
elif traversal == (1, 1): # TL -> BR
return ("TL", op)
elif traversal == (1, -1): # TR -> BL
return ("TR", op)
elif traversal == (-1, 1): # BL -> TR
return ("BL", op)
elif traversal == (-1, -1): # BR -> TL
return ("BR", op)
else:
print( op_name, traversal )
raise Exception
# Flattens a matrix of matrices resulting from a repartitioning
def flatten_repart( repart ):
r, c = 0, 0
chained = []
for row in repart:
for cell in row:
_r = r
_c = c
for _row in cell:
_c = c
for _cell in _row:
chained.append( (_r, _c, _cell) )
_c += 1
_r += 1
c += len( cell.children[0] )
r = len( cell.children )
c = 0
chained.sort()
for _, _, quadrant in chained:
yield quadrant
| [
"Partitioning.repartition_shape"
] | [((7650, 7697), 'Partitioning.repartition_shape', 'Partitioning.repartition_shape', (['op', 'repart_size'], {}), '(op, repart_size)\n', (7680, 7697), False, 'import Partitioning\n')] |
import numpy as np
import matplotlib.mlab as mlab
import matplotlib.pyplot as plt
import matplotlib.gridspec as gs
import sys
data = np.loadtxt('NbSe2.freq.gp')
symmetryfile = 'plotband.out'
lbd = np.loadtxt("lambda.dat")
lbd_val = np.where(lbd<1 , lbd, 1)
def Symmetries(fstring):
f = open(fstring, 'r')
x = np.zeros(0)
for i in f:
if "high-symmetry" in i:
x = np.append(x, float(i.split()[-1]))
f.close()
return x
x=np.tile(data.T[0],9)
val = lbd_val.T[1:].reshape(-1)
y=data.T[1:].reshape(-1,)
fig=plt.figure(figsize=(8,6))
labels=["G","M","K","G"]
plt.scatter(x,y*0.12398,c=val,cmap="copper",s=10)
sym_tick = Symmetries(symmetryfile)
for i in range(len(sym_tick)-1):
plt.axvline(sym_tick[i],linestyle='dashed', color='black', alpha=0.75)
plt.xticks(sym_tick,labels)
plt.xlim(min(sym_tick),max(sym_tick))
plt.ylim(0)
plt.ylabel("Energy (meV)")
plt.colorbar()
plt.savefig("epc.pdf")
plt.show() | [
"numpy.tile",
"matplotlib.pyplot.savefig",
"matplotlib.pyplot.xticks",
"matplotlib.pyplot.ylabel",
"numpy.where",
"matplotlib.pyplot.colorbar",
"matplotlib.pyplot.figure",
"numpy.zeros",
"matplotlib.pyplot.scatter",
"matplotlib.pyplot.ylim",
"numpy.loadtxt",
"matplotlib.pyplot.axvline",
"mat... | [((133, 160), 'numpy.loadtxt', 'np.loadtxt', (['"""NbSe2.freq.gp"""'], {}), "('NbSe2.freq.gp')\n", (143, 160), True, 'import numpy as np\n'), ((197, 221), 'numpy.loadtxt', 'np.loadtxt', (['"""lambda.dat"""'], {}), "('lambda.dat')\n", (207, 221), True, 'import numpy as np\n'), ((232, 257), 'numpy.where', 'np.where', (['(lbd < 1)', 'lbd', '(1)'], {}), '(lbd < 1, lbd, 1)\n', (240, 257), True, 'import numpy as np\n'), ((458, 479), 'numpy.tile', 'np.tile', (['data.T[0]', '(9)'], {}), '(data.T[0], 9)\n', (465, 479), True, 'import numpy as np\n'), ((541, 567), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(8, 6)'}), '(figsize=(8, 6))\n', (551, 567), True, 'import matplotlib.pyplot as plt\n'), ((592, 647), 'matplotlib.pyplot.scatter', 'plt.scatter', (['x', '(y * 0.12398)'], {'c': 'val', 'cmap': '"""copper"""', 's': '(10)'}), "(x, y * 0.12398, c=val, cmap='copper', s=10)\n", (603, 647), True, 'import matplotlib.pyplot as plt\n'), ((786, 814), 'matplotlib.pyplot.xticks', 'plt.xticks', (['sym_tick', 'labels'], {}), '(sym_tick, labels)\n', (796, 814), True, 'import matplotlib.pyplot as plt\n'), ((852, 863), 'matplotlib.pyplot.ylim', 'plt.ylim', (['(0)'], {}), '(0)\n', (860, 863), True, 'import matplotlib.pyplot as plt\n'), ((864, 890), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Energy (meV)"""'], {}), "('Energy (meV)')\n", (874, 890), True, 'import matplotlib.pyplot as plt\n'), ((891, 905), 'matplotlib.pyplot.colorbar', 'plt.colorbar', ([], {}), '()\n', (903, 905), True, 'import matplotlib.pyplot as plt\n'), ((906, 928), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""epc.pdf"""'], {}), "('epc.pdf')\n", (917, 928), True, 'import matplotlib.pyplot as plt\n'), ((929, 939), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (937, 939), True, 'import matplotlib.pyplot as plt\n'), ((317, 328), 'numpy.zeros', 'np.zeros', (['(0)'], {}), '(0)\n', (325, 328), True, 'import numpy as np\n'), ((715, 786), 'matplotlib.pyplot.axvline', 'plt.axvline', (['sym_tick[i]'], {'linestyle': '"""dashed"""', 'color': '"""black"""', 'alpha': '(0.75)'}), "(sym_tick[i], linestyle='dashed', color='black', alpha=0.75)\n", (726, 786), True, 'import matplotlib.pyplot as plt\n')] |
"""Python library to connect deCONZ and Home Assistant to work together."""
import logging
from .light import DeconzLightBase
_LOGGER = logging.getLogger(__name__)
class DeconzGroup(DeconzLightBase):
"""deCONZ light group representation.
Dresden Elektroniks documentation of light groups in deCONZ
http://dresden-elektronik.github.io/deconz-rest-doc/groups/
"""
def __init__(self, device_id, device, async_set_state_callback):
"""Set initial information about light group.
Set callback to set state of device.
"""
deconz_id = '/groups/' + device_id
self._all_on = device['state'].get('all_on')
self._any_on = device['state'].get('any_on')
self._bri = device['action'].get('bri')
self._class = device.get('class')
self._colormode = device['action'].get('colormode')
self._ct = device['action'].get('ct')
self._devicemembership = device.get('devicemembership')
self._effect = device['action'].get('effect')
self._hidden = device.get('hidden')
self._hue = device['action'].get('hue')
self._id = device.get('id')
self._lights = device.get('lights')
self._lightsequence = device.get('lightsequence')
self._multideviceids = device.get('multideviceids')
self._on = device['action'].get('on')
self._reachable = True
self._sat = device['action'].get('sat')
self._scenes = {}
self._x, self._y = device['action'].get('xy', (None, None))
super().__init__(deconz_id, device, async_set_state_callback)
self.async_add_scenes(device.get('scenes'), async_set_state_callback)
async def async_set_state(self, data):
"""Set state of light group.
{
"on": true,
"bri": 180,
"hue": 43680,
"sat": 255,
"transitiontime": 10
}
Also update local values of group since websockets doesn't.
"""
field = self.deconz_id + '/action'
await self._async_set_state_callback(field, data)
self.async_update({'state': data})
def as_dict(self):
"""Callback for __dict__."""
cdict = super().as_dict()
if '_scenes' in cdict:
del cdict['_scenes']
return cdict
@property
def state(self):
"""True if any light in light group is on."""
return self._any_on
@property
def groupclass(self):
""""""
return self._class
@property
def all_on(self):
"""True if all lights in light group are on"""
return self._all_on
@property
def devicemembership(self):
"""List of device ids (sensors) when group was created by a device."""
return self._devicemembership
@property
def hidden(self):
"""Indicate the hidden status of the group.
Has no effect at the gateway but apps can uses this to hide groups.
"""
return self._hidden
@property
def id(self):
"""The id of the group."""
return self._id
@property
def lights(self):
"""A list of all light ids of this group.
Sequence is defined by the gateway.
"""
return self._lights
@property
def lightsequence(self):
"""A list of light ids of this group that can be sorted by the user.
Need not to contain all light ids of this group.
"""
return self._lightsequence
@property
def multideviceids(self):
"""A list of light ids of this group.
Subsequent ids from multidevices with multiple endpoints.
"""
return self._multideviceids
@property
def scenes(self):
"""A list of scenes of the group."""
return self._scenes
def async_add_scenes(self, scenes, async_set_state_callback):
"""Add scenes belonging to group."""
self._scenes = {
scene['id']: DeconzScene(self, scene, async_set_state_callback)
for scene in scenes
if scene['id'] not in self._scenes
}
class DeconzScene:
"""deCONZ scene representation.
Dresden Elektroniks documentation of scenes in deCONZ
http://dresden-elektronik.github.io/deconz-rest-doc/scenes/
"""
def __init__(self, group, scene, async_set_state_callback):
"""Set initial information about scene.
Set callback to set state of device.
"""
self._group_id = group.id
self._group_name = group.name
self._id = scene.get('id')
self._name = scene.get('name')
self._deconz_id = group.deconz_id + '/scenes/' + self._id
self._async_set_state_callback = async_set_state_callback
async def async_set_state(self, data):
"""Recall scene to group."""
field = self._deconz_id + '/recall'
await self._async_set_state_callback(field, data)
@property
def deconz_id(self):
"""Id to call scene over API e.g. /groups/1/scenes/1."""
return self._deconz_id
@property
def full_name(self):
"""Full name."""
return self._group_name + ' ' + self._name
@property
def id(self):
"""Scene ID from deCONZ."""
return self._id
@property
def name(self):
"""Scene name."""
return self._name
def as_dict(self):
"""Callback for __dict__."""
cdict = self.__dict__.copy()
if '_async_set_state_callback' in cdict:
del cdict['_async_set_state_callback']
return cdict
| [
"logging.getLogger"
] | [((139, 166), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (156, 166), False, 'import logging\n')] |
"""
Regression tests for the REINFORCE agent on OpenAI gym environments
"""
import pytest
import numpy as np
import shutil
from yarlp.utils.env_utils import NormalizedGymEnv
from yarlp.agent.ddqn_agent import DDQNAgent
env = NormalizedGymEnv(
'PongNoFrameskip-v4',
is_atari=True
)
def test_ddqn():
agent = DDQNAgent(env, max_timesteps=10,
learning_start_timestep=1,
train_freq=5,
batch_size=1)
agent.train()
def test_seed():
agent = DDQNAgent(env, seed=143, max_timesteps=2)
agent.train()
ob, *_ = agent.replay_buffer.sample(1)
agent = DDQNAgent(env, seed=143, max_timesteps=2)
agent.train()
ob2, *_ = agent.replay_buffer.sample(1)
assert np.all(
np.array(ob) == np.array(ob2))
def test_save_models():
agent = DDQNAgent(env, max_timesteps=2)
agent.train()
agent.save('testy_ddqn')
agent = DDQNAgent.load('testy_ddqn')
agent.t = 0
agent.train()
shutil.rmtree('testy_ddqn')
| [
"yarlp.agent.ddqn_agent.DDQNAgent.load",
"yarlp.utils.env_utils.NormalizedGymEnv",
"numpy.array",
"shutil.rmtree",
"yarlp.agent.ddqn_agent.DDQNAgent"
] | [((232, 285), 'yarlp.utils.env_utils.NormalizedGymEnv', 'NormalizedGymEnv', (['"""PongNoFrameskip-v4"""'], {'is_atari': '(True)'}), "('PongNoFrameskip-v4', is_atari=True)\n", (248, 285), False, 'from yarlp.utils.env_utils import NormalizedGymEnv\n'), ((327, 418), 'yarlp.agent.ddqn_agent.DDQNAgent', 'DDQNAgent', (['env'], {'max_timesteps': '(10)', 'learning_start_timestep': '(1)', 'train_freq': '(5)', 'batch_size': '(1)'}), '(env, max_timesteps=10, learning_start_timestep=1, train_freq=5,\n batch_size=1)\n', (336, 418), False, 'from yarlp.agent.ddqn_agent import DDQNAgent\n'), ((530, 571), 'yarlp.agent.ddqn_agent.DDQNAgent', 'DDQNAgent', (['env'], {'seed': '(143)', 'max_timesteps': '(2)'}), '(env, seed=143, max_timesteps=2)\n', (539, 571), False, 'from yarlp.agent.ddqn_agent import DDQNAgent\n'), ((646, 687), 'yarlp.agent.ddqn_agent.DDQNAgent', 'DDQNAgent', (['env'], {'seed': '(143)', 'max_timesteps': '(2)'}), '(env, seed=143, max_timesteps=2)\n', (655, 687), False, 'from yarlp.agent.ddqn_agent import DDQNAgent\n'), ((847, 878), 'yarlp.agent.ddqn_agent.DDQNAgent', 'DDQNAgent', (['env'], {'max_timesteps': '(2)'}), '(env, max_timesteps=2)\n', (856, 878), False, 'from yarlp.agent.ddqn_agent import DDQNAgent\n'), ((938, 966), 'yarlp.agent.ddqn_agent.DDQNAgent.load', 'DDQNAgent.load', (['"""testy_ddqn"""'], {}), "('testy_ddqn')\n", (952, 966), False, 'from yarlp.agent.ddqn_agent import DDQNAgent\n'), ((1005, 1032), 'shutil.rmtree', 'shutil.rmtree', (['"""testy_ddqn"""'], {}), "('testy_ddqn')\n", (1018, 1032), False, 'import shutil\n'), ((778, 790), 'numpy.array', 'np.array', (['ob'], {}), '(ob)\n', (786, 790), True, 'import numpy as np\n'), ((794, 807), 'numpy.array', 'np.array', (['ob2'], {}), '(ob2)\n', (802, 807), True, 'import numpy as np\n')] |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('arbeitsplan', '0008_auto_20141208_1906'),
]
operations = [
migrations.AddField(
model_name='mitglied',
name='arbeitslast',
field=models.IntegerField(default=10, help_text=b'Wieviele Stunden pro Jahr muss dieses Mitglied arbeiten?', verbose_name=b'Arbeitslast (h/Jahr)'),
preserve_default=True,
),
]
| [
"django.db.models.IntegerField"
] | [((361, 510), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(10)', 'help_text': "b'Wieviele Stunden pro Jahr muss dieses Mitglied arbeiten?'", 'verbose_name': "b'Arbeitslast (h/Jahr)'"}), "(default=10, help_text=\n b'Wieviele Stunden pro Jahr muss dieses Mitglied arbeiten?',\n verbose_name=b'Arbeitslast (h/Jahr)')\n", (380, 510), False, 'from django.db import models, migrations\n')] |
import collections
import logging
import json
import os
import luigi
import gokart
import tqdm
import torch
import sentencepiece as spm
import sacrebleu
import MeCab
from fairseq.models.transformer import TransformerModel
from fairseq.data import LanguagePairDataset
from context_nmt.pipelines.conversation_dataset_merger import (
MergeMultipleDataset,
CONCAT_TOKEN,
)
logger = logging.getLogger("luigi-interface")
class GenerateContextIndicator(gokart.TaskOnKart):
task_namespace = "context_nmt"
split_name = luigi.Parameter()
dataset_names = luigi.ListParameter()
source_paths = luigi.ListParameter()
source_lang = luigi.Parameter()
target_lang = luigi.Parameter()
context_aware_translation_models = luigi.DictParameter()
context_aware_sentencepiece_model = luigi.Parameter()
max_source_positions = luigi.IntParameter(default=128)
max_target_positions = luigi.IntParameter(default=128)
max_sentences = luigi.IntParameter(default=128)
sentence_translation_model_name = luigi.Parameter(default=None)
sentence_translation_models = luigi.DictParameter(default={})
sentence_sentencepiece_models = luigi.DictParameter(default={})
score_threhold = luigi.FloatParameter(default=0.3)
def requires(self):
return MergeMultipleDataset(
split_name=self.split_name,
dataset_names=self.dataset_names,
source_paths=self.source_paths,
translation_model_name=self.sentence_translation_model_name,
translation_models=self.sentence_translation_models,
sentencepiece_models=self.sentence_sentencepiece_models,
)
def output(self):
name_components = [
self.split_name,
self.source_lang,
self.target_lang,
self.sentence_translation_model_name,
]
return self.make_target("_".join(name_components) + "_context_indicators.pkl")
def run(self):
def tokenize_for_bleu(target):
target = tokenizer.decode_pieces(target.split())
if self.target_lang == "ja":
target = " ".join(
map(
lambda x: x.split("\t")[0],
tagger.parse(target).split("\n")[:-2],
)
)
return target
docs = self.load()
tagger = MeCab.Tagger()
tokenizer = spm.SentencePieceProcessor()
tokenizer.load(self.context_aware_sentencepiece_model)
translation_models = {}
for bias, path in self.context_aware_translation_models.items():
base_path, checkpoint_path = os.path.split(path)
model = (
TransformerModel.from_pretrained(
base_path, checkpoint_file=checkpoint_path
)
.half()
.cuda()
.eval()
)
model.args.max_source_positions = self.max_source_positions
model.args.max_target_positions = self.max_target_positions
translation_models[int(bias)] = model
args = translation_models[-1].args
task = translation_models[-1].task
criterion = task.build_criterion(args)
results = collections.defaultdict(dict)
for doc_id, doc in tqdm.tqdm(docs.items(), total=len(docs)):
parallel_doc = set(
[
sent_id
for sent_id, score in doc["pairs"]
if score >= self.score_threhold
]
)
batches = collections.defaultdict(dict)
targets = {}
for sent_id in parallel_doc:
source, target = [
tokenizer.encode_as_pieces(doc[lang][sent_id])
for lang in (self.source_lang, self.target_lang)
]
available_index = [
index for index in range(0, sent_id) if doc[self.source_lang][index]
]
# context_bias is the parameter which the model is trained with.
# context_sent_index is the index of the actual used contextual
# sentence.
targets[sent_id] = " ".join(target)
for context_bias, _ in translation_models.items():
context_sent_index = None
if context_bias != -1:
if len(available_index) < context_bias:
context_sent_index = -1
else:
context_sent_index = available_index[-context_bias]
source_context = tokenizer.encode_as_pieces(
docs[doc_id][self.source_lang][context_sent_index]
)
real_source = source_context + [CONCAT_TOKEN] + source
else:
real_source = source
if real_source and len(real_source) < self.max_source_positions:
source_sentence = " ".join(real_source)
else:
source_sentence = None
batches[context_bias][sent_id] = source_sentence
batch_results = collections.defaultdict(
lambda: collections.defaultdict(dict)
)
for context_bias, batch in batches.items():
data = [sentence for sentence in batch.values() if sentence]
if not data:
continue
real_targets = {
sent_id: targets[sent_id] for sent_id in batch if batch[sent_id]
}
model = translation_models[context_bias]
args.max_source_positions = self.max_source_positions
args.max_target_positions = self.max_target_positions
translated = model.translate(data)
# Compute BLEU score
# Make the BLEU negative to easy the results computaion
for trans, (sent_id, target) in zip(translated, real_targets.items()):
batch_results[sent_id]["bleu"][
context_bias
] = -sacrebleu.corpus_bleu(
tokenize_for_bleu(trans), tokenize_for_bleu(target)
).score
# Compute loss
src_tokens = [
model.src_dict.encode_line(
real_source,
line_tokenizer=lambda x: x.split(),
add_if_not_exist=False,
).long()
for real_source in data
]
src_lengths = [tokens.numel() for tokens in src_tokens]
tgt_tokens = [
model.tgt_dict.encode_line(
target,
line_tokenizer=lambda x: x.split(),
add_if_not_exist=False,
).long()
for target in real_targets.values()
]
tgt_lengths = [tokens.numel() for tokens in tgt_tokens]
temp_dataset = LanguagePairDataset(
src_tokens,
src_lengths,
model.src_dict,
tgt_tokens,
tgt_lengths,
left_pad_source=args.left_pad_source,
left_pad_target=args.left_pad_target,
max_source_positions=self.max_source_positions,
max_target_positions=self.max_target_positions,
)
reports = collections.defaultdict(list)
iterator = task.get_batch_iterator(
dataset=temp_dataset, max_sentences=self.max_sentences,
)
for sample in iterator.next_epoch_itr(shuffle=False):
sample["net_input"]["src_tokens"] = sample["net_input"][
"src_tokens"
].cuda()
sample["net_input"]["src_lengths"] = sample["net_input"][
"src_lengths"
].cuda()
sample["net_input"]["prev_output_tokens"] = sample["net_input"][
"prev_output_tokens"
].cuda()
sample["target"] = sample["target"].cuda()
with torch.no_grad():
_, _, report = criterion(model.models[0], sample, False)
for key, value in report.items():
reports[key].append(value)
for key in ("loss", "nll_loss"):
for value, (sent_id, _) in zip(
torch.cat(reports[key]), real_targets.items()
):
batch_results[sent_id][key][context_bias] = float(value)
for sent_id, value in batch_results.items():
results[doc_id][sent_id] = value
self.dump(dict(results))
| [
"logging.getLogger",
"luigi.FloatParameter",
"context_nmt.pipelines.conversation_dataset_merger.MergeMultipleDataset",
"fairseq.models.transformer.TransformerModel.from_pretrained",
"luigi.IntParameter",
"luigi.DictParameter",
"sentencepiece.SentencePieceProcessor",
"fairseq.data.LanguagePairDataset",... | [((389, 425), 'logging.getLogger', 'logging.getLogger', (['"""luigi-interface"""'], {}), "('luigi-interface')\n", (406, 425), False, 'import logging\n'), ((531, 548), 'luigi.Parameter', 'luigi.Parameter', ([], {}), '()\n', (546, 548), False, 'import luigi\n'), ((569, 590), 'luigi.ListParameter', 'luigi.ListParameter', ([], {}), '()\n', (588, 590), False, 'import luigi\n'), ((610, 631), 'luigi.ListParameter', 'luigi.ListParameter', ([], {}), '()\n', (629, 631), False, 'import luigi\n'), ((650, 667), 'luigi.Parameter', 'luigi.Parameter', ([], {}), '()\n', (665, 667), False, 'import luigi\n'), ((686, 703), 'luigi.Parameter', 'luigi.Parameter', ([], {}), '()\n', (701, 703), False, 'import luigi\n'), ((743, 764), 'luigi.DictParameter', 'luigi.DictParameter', ([], {}), '()\n', (762, 764), False, 'import luigi\n'), ((805, 822), 'luigi.Parameter', 'luigi.Parameter', ([], {}), '()\n', (820, 822), False, 'import luigi\n'), ((850, 881), 'luigi.IntParameter', 'luigi.IntParameter', ([], {'default': '(128)'}), '(default=128)\n', (868, 881), False, 'import luigi\n'), ((909, 940), 'luigi.IntParameter', 'luigi.IntParameter', ([], {'default': '(128)'}), '(default=128)\n', (927, 940), False, 'import luigi\n'), ((961, 992), 'luigi.IntParameter', 'luigi.IntParameter', ([], {'default': '(128)'}), '(default=128)\n', (979, 992), False, 'import luigi\n'), ((1031, 1060), 'luigi.Parameter', 'luigi.Parameter', ([], {'default': 'None'}), '(default=None)\n', (1046, 1060), False, 'import luigi\n'), ((1095, 1126), 'luigi.DictParameter', 'luigi.DictParameter', ([], {'default': '{}'}), '(default={})\n', (1114, 1126), False, 'import luigi\n'), ((1163, 1194), 'luigi.DictParameter', 'luigi.DictParameter', ([], {'default': '{}'}), '(default={})\n', (1182, 1194), False, 'import luigi\n'), ((1216, 1249), 'luigi.FloatParameter', 'luigi.FloatParameter', ([], {'default': '(0.3)'}), '(default=0.3)\n', (1236, 1249), False, 'import luigi\n'), ((1290, 1595), 'context_nmt.pipelines.conversation_dataset_merger.MergeMultipleDataset', 'MergeMultipleDataset', ([], {'split_name': 'self.split_name', 'dataset_names': 'self.dataset_names', 'source_paths': 'self.source_paths', 'translation_model_name': 'self.sentence_translation_model_name', 'translation_models': 'self.sentence_translation_models', 'sentencepiece_models': 'self.sentence_sentencepiece_models'}), '(split_name=self.split_name, dataset_names=self.\n dataset_names, source_paths=self.source_paths, translation_model_name=\n self.sentence_translation_model_name, translation_models=self.\n sentence_translation_models, sentencepiece_models=self.\n sentence_sentencepiece_models)\n', (1310, 1595), False, 'from context_nmt.pipelines.conversation_dataset_merger import MergeMultipleDataset, CONCAT_TOKEN\n'), ((2393, 2407), 'MeCab.Tagger', 'MeCab.Tagger', ([], {}), '()\n', (2405, 2407), False, 'import MeCab\n'), ((2428, 2456), 'sentencepiece.SentencePieceProcessor', 'spm.SentencePieceProcessor', ([], {}), '()\n', (2454, 2456), True, 'import sentencepiece as spm\n'), ((3270, 3299), 'collections.defaultdict', 'collections.defaultdict', (['dict'], {}), '(dict)\n', (3293, 3299), False, 'import collections\n'), ((2666, 2685), 'os.path.split', 'os.path.split', (['path'], {}), '(path)\n', (2679, 2685), False, 'import os\n'), ((3608, 3637), 'collections.defaultdict', 'collections.defaultdict', (['dict'], {}), '(dict)\n', (3631, 3637), False, 'import collections\n'), ((7250, 7520), 'fairseq.data.LanguagePairDataset', 'LanguagePairDataset', (['src_tokens', 'src_lengths', 'model.src_dict', 'tgt_tokens', 'tgt_lengths'], {'left_pad_source': 'args.left_pad_source', 'left_pad_target': 'args.left_pad_target', 'max_source_positions': 'self.max_source_positions', 'max_target_positions': 'self.max_target_positions'}), '(src_tokens, src_lengths, model.src_dict, tgt_tokens,\n tgt_lengths, left_pad_source=args.left_pad_source, left_pad_target=args\n .left_pad_target, max_source_positions=self.max_source_positions,\n max_target_positions=self.max_target_positions)\n', (7269, 7520), False, 'from fairseq.data import LanguagePairDataset\n'), ((7733, 7762), 'collections.defaultdict', 'collections.defaultdict', (['list'], {}), '(list)\n', (7756, 7762), False, 'import collections\n'), ((5351, 5380), 'collections.defaultdict', 'collections.defaultdict', (['dict'], {}), '(dict)\n', (5374, 5380), False, 'import collections\n'), ((8514, 8529), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (8527, 8529), False, 'import torch\n'), ((8842, 8865), 'torch.cat', 'torch.cat', (['reports[key]'], {}), '(reports[key])\n', (8851, 8865), False, 'import torch\n'), ((2724, 2800), 'fairseq.models.transformer.TransformerModel.from_pretrained', 'TransformerModel.from_pretrained', (['base_path'], {'checkpoint_file': 'checkpoint_path'}), '(base_path, checkpoint_file=checkpoint_path)\n', (2756, 2800), False, 'from fairseq.models.transformer import TransformerModel\n')] |
import os
from copy import deepcopy
with open(os.path.join(os.path.dirname(__file__), "input.txt"), "r") as file:
lines = [l.strip() for l in file.readlines()]
p1 = list(reversed([int(i) for i in lines[1:26]]))
p2 = list(reversed([int(i) for i in lines[28:]]))
def part1(player1, player2):
while player1 and player2:
p1_card = player1.pop()
p2_card = player2.pop()
if p1_card > p2_card:
player1.insert(0, p1_card)
player1.insert(0, p2_card)
else:
player2.insert(0, p2_card)
player2.insert(0, p1_card)
winning_player = player1 or player2
ans = 0
for idx, card in enumerate(winning_player):
ans += (idx + 1) * card
return ans
def part2(player1, player2):
def game(pl1, pl2):
history = set()
while pl1 and pl2:
current = "".join([str(i) for i in pl1]) + "".join([str(i) for i in pl1])
if current in history:
return "p1"
else:
history.add(current)
p1_card = pl1.pop()
p2_card = pl2.pop()
winner = ""
if len(pl1) >= p1_card and len(pl2) >= p2_card:
winner = game(deepcopy(pl1[-p1_card:]), deepcopy(pl2[-p2_card:]))
else:
if p1_card > p2_card:
winner = "p1"
else:
winner = "p2"
if winner == "p1":
pl1.insert(0, p1_card)
pl1.insert(0, p2_card)
else:
pl2.insert(0, p2_card)
pl2.insert(0, p1_card)
return "p1" if pl1 else "p2"
winning_player = game(player1, player2)
if winning_player == "p1":
winning_player = player1
else:
winning_player = player2
ans = 0
for idx, card in enumerate(winning_player):
ans += (idx + 1) * card
return ans
print(f"Part 1: {part1(deepcopy(p1), deepcopy(p2))}") # 35202
print(f"Part 2: {part2(deepcopy(p1), deepcopy(p2))}") # 32317
| [
"os.path.dirname",
"copy.deepcopy"
] | [((60, 85), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (75, 85), False, 'import os\n'), ((1968, 1980), 'copy.deepcopy', 'deepcopy', (['p1'], {}), '(p1)\n', (1976, 1980), False, 'from copy import deepcopy\n'), ((1982, 1994), 'copy.deepcopy', 'deepcopy', (['p2'], {}), '(p2)\n', (1990, 1994), False, 'from copy import deepcopy\n'), ((2031, 2043), 'copy.deepcopy', 'deepcopy', (['p1'], {}), '(p1)\n', (2039, 2043), False, 'from copy import deepcopy\n'), ((2045, 2057), 'copy.deepcopy', 'deepcopy', (['p2'], {}), '(p2)\n', (2053, 2057), False, 'from copy import deepcopy\n'), ((1240, 1264), 'copy.deepcopy', 'deepcopy', (['pl1[-p1_card:]'], {}), '(pl1[-p1_card:])\n', (1248, 1264), False, 'from copy import deepcopy\n'), ((1266, 1290), 'copy.deepcopy', 'deepcopy', (['pl2[-p2_card:]'], {}), '(pl2[-p2_card:])\n', (1274, 1290), False, 'from copy import deepcopy\n')] |
import shutil
from contextlib import contextmanager
from tempfile import NamedTemporaryFile
from s3fs import S3FileSystem
class S3Downloader:
def __init__(self, tmp_dir=None, chunk_size=16 * 1024, **kwargs):
self.tmp_dir = tmp_dir
self.chunk_size = chunk_size
self.fs = S3FileSystem(**kwargs)
@contextmanager
def get(self, src):
with self.fs.open(src) as s:
with NamedTemporaryFile(dir=self.tmp_dir) as d:
shutil.copyfileobj(s, d, length=self.chunk_size)
d.flush()
yield d.name
| [
"tempfile.NamedTemporaryFile",
"shutil.copyfileobj",
"s3fs.S3FileSystem"
] | [((302, 324), 's3fs.S3FileSystem', 'S3FileSystem', ([], {}), '(**kwargs)\n', (314, 324), False, 'from s3fs import S3FileSystem\n'), ((424, 460), 'tempfile.NamedTemporaryFile', 'NamedTemporaryFile', ([], {'dir': 'self.tmp_dir'}), '(dir=self.tmp_dir)\n', (442, 460), False, 'from tempfile import NamedTemporaryFile\n'), ((483, 531), 'shutil.copyfileobj', 'shutil.copyfileobj', (['s', 'd'], {'length': 'self.chunk_size'}), '(s, d, length=self.chunk_size)\n', (501, 531), False, 'import shutil\n')] |
# Copyright (c) 2021, MITRE Engenuity. Approved for public release.
# See LICENSE for complete terms.
import argparse
import json
import pathlib
import numpy
import requests
from src.create_mappings import get_sheets, get_sheet_by_name
def get_argparse():
desc = "ATT&CK to VERIS Mappings Validator"
argparser = argparse.ArgumentParser(description=desc)
argparser.add_argument(
"-config-location",
dest="config_location",
type=lambda path: pathlib.Path(path),
default=pathlib.Path("..", "frameworks", "veris", "input", "config.json"),
help="The path to the config metadata location.",
)
argparser.add_argument(
"-spreadsheet-location",
dest="spreadsheet_location",
type=lambda path: pathlib.Path(path),
default=pathlib.Path("..", "frameworks", "veris", "veris-mappings.xlsx"),
help="The path to the spreadsheet mappings location.",
)
argparser.add_argument(
"-json-location",
dest="json_location",
type=lambda path: pathlib.Path(path),
default=pathlib.Path("..", "frameworks", "veris", "veris-mappings.json"),
help="The path to the JSON mappings location.",
)
argparser.add_argument(
"-attack-version",
dest="attack_version",
type=str,
default="9.0",
help="The ATT&CK release version to use.",
)
argparser.add_argument(
"-veris-version",
dest="veris_version",
type=str,
default="1.3.5",
help="The VERIS release version to use.",
)
argparser.add_argument(
"-metadata-version",
dest="metadata_version",
type=str,
default="1.9",
help="The Metadata version to check against.",
)
return argparser
def get_mappings_file(mappings_location):
"""Returns the ATT&CK VERIS mappings JSON file"""
path_obj = pathlib.Path(mappings_location).resolve()
with path_obj.open(encoding="utf-8") as f:
return json.load(f)
def get_veris_enum():
"""Downloads the latest VERIS enum"""
veris_enum_dict = requests.get(
"https://raw.githubusercontent.com/vz-risk/VCDB/1.3.5/vcdb-labels.json",
verify=True,
).json()
return veris_enum_dict
def get_stix2_source(attack_version):
"""Downloads ATT&CK knowledge base using the provided version"""
attackid_to_stixid = {}
stix_bundle = requests.get(
f"https://raw.githubusercontent.com/mitre/cti/ATT%26CK-v{attack_version}/"
f"enterprise-attack/enterprise-attack.json",
verify=True,
).json()
for attack_object in stix_bundle["objects"]:
if attack_object["type"] == "attack-pattern":
if "external_references" not in attack_object:
continue # skip objects without IDs
if attack_object.get("revoked", False):
continue # skip revoked objects
if attack_object.get("x_mitre_deprecated", False):
continue # skip deprecated objects
# map attack ID to stix ID
attackid_to_stixid[attack_object["external_references"][0]["external_id"]] = attack_object["id"]
return attackid_to_stixid
def validate_json_mappings_metadata(mappings_location, attack_version, veris_version, metadata_version):
"""Checks for presence and correct metadata information in the mappings JSON file."""
mappings_dict = get_mappings_file(mappings_location)
# Checks presence of metadata key
assert mappings_dict, "[-] No Metadata Found..."
if "metadata" in mappings_dict:
mappings_dict = mappings_dict["metadata"]
# Checks metadata info matches the validator options
assert attack_version == mappings_dict["attack_version"], "[-] ATT&CK Version does not match JSON contents"
assert veris_version == mappings_dict["veris_version"], "[-] VERIS Version does not match JSON contents"
assert metadata_version == mappings_dict["mappings_version"], "[-] Metadata Version does not match JSON contents"
def validate_spreadsheet_mappings_metadata(spreadsheet_location, attack_version, veris_version, metadata_version):
"""Checks for presence and correct metadata information in the mappings spreadsheet."""
sheet_data = get_sheet_by_name(spreadsheet_location, "Metadata")
# Checks presence of metadata key
assert sheet_data.empty is False, "[-] No Metadata Found..."
for idx, row in sheet_data.iterrows():
# Checks metadata info matches the validator options
# Need to track specific rows/cells to make the chec
if idx == 6:
test_attack_version, test_attack_version_value = row[3], row[5]
assert "ATT&CK version" == test_attack_version,\
"[-] Spreadsheet contents does not match ATT&CK version cell"
assert attack_version == str(test_attack_version_value),\
"[-] ATT&CK Version does not match Spreadsheet contents"
if idx == 7:
test_veris_version, test_veris_version_value = row[3], row[5]
assert "VERIS version" == test_veris_version,\
"[-] Spreadsheet contents does not match VERIS version cell"
assert veris_version == str(test_veris_version_value),\
"[-] VERIS Version does not match Spreadsheet contents"
if idx == 8:
test_mappings_version, test_mappings_version_value = row[3], row[5]
assert "Mapping version" == test_mappings_version,\
"[-] Spreadsheet contents does not match Mappings version cell"
assert metadata_version == str(test_mappings_version_value),\
"[-] Mappings version does not match Spreadsheet contents"
if idx == 9:
text_spreadsheet_version, test_spreadsheet_version_value = row[3], row[5]
assert "Spreadsheet version" == text_spreadsheet_version,\
"[-] Spreadsheet contents does not match Spreadsheet version cell"
assert metadata_version == str(test_spreadsheet_version_value),\
"[-] Spreadsheet version does not match Spreadsheet contents "
def validate_mapping_entries(spreadsheet_location, attack_version):
"""Walks over forward and reverse mappings checking the ATT&CK entry is valid.
1) The ATT&CK ID is correct 2) The ATT&CK name is correct 3) The VERIS path is correct"""
attack_source = get_stix2_source(attack_version)
veris_enum = get_veris_enum()
sheets = get_sheets(spreadsheet_location)
print("\t\t[+] VERIS to ATT&CK mappings check...")
fail_test = False
for sheet, name in sheets:
name = name.lower()
print(f"\t\t\t[+] checking sheet: {name}")
veris_path = None
unique_per_veris_entry = {}
for idx, row in sheet.iterrows():
if row[0] is not numpy.nan:
veris_path = f'{name}.{row[0]}'
check_unique = True
else:
check_unique = False
attack_technique = row[1]
if attack_technique is numpy.nan:
# Don't validate the attack_technique if the cell is blank (aka is numpy.nan)
pass
elif attack_technique not in attack_source:
print(f"[-] In Sheet '{name}', under '{veris_path}', "
f"the technique ID '{attack_technique}' is invalid (revoked or deprecated)")
fail_test = True
if check_unique and veris_path in unique_per_veris_entry:
print(f"[-] In Sheet '{name}', under '{veris_path}', "
f"the veris path is duplicated")
fail_test = True
if veris_path not in unique_per_veris_entry:
unique_per_veris_entry[veris_path] = set()
if attack_technique is numpy.nan:
# Don't validate the attack_technique if the cell is blank (aka is numpy.nan)
pass
elif attack_technique not in unique_per_veris_entry[veris_path]:
unique_per_veris_entry[veris_path].add(attack_technique)
else:
print(f"[-] In Sheet '{name}', under '{veris_path}', "
f"the technique ID '{attack_technique}' is duplicated")
fail_test = True
try:
axes, category, sub_category, veris_name = veris_path.split(".")
extracted_value = veris_enum[axes][category][sub_category][veris_name]
assert extracted_value
except (KeyError, ValueError):
print(f"[-] In Sheet '{name}', the VERIS path '{veris_path}' is invalid")
fail_test = True
assert fail_test is False
if __name__ == "__main__":
parser = get_argparse()
args = parser.parse_args()
print("[+] Starting Execution")
print(f"[+] Mappings Location: {args.spreadsheet_location}\t"
f"ATT&CK Version: {args.attack_version}\t"
f"VERIS Version: {args.veris_version}")
validate_json_mappings_metadata(
args.config_location, args.attack_version, args.veris_version, args.metadata_version
)
validate_json_mappings_metadata(
args.json_location, args.attack_version, args.veris_version, args.metadata_version
)
validate_spreadsheet_mappings_metadata(
args.spreadsheet_location, args.attack_version, args.veris_version, args.metadata_version
)
print("\t[+] Metadata Validation passed")
validate_mapping_entries(args.spreadsheet_location, args.attack_version)
print("\t[+] Mappings Validation passed")
print("[+] Finished Execution")
| [
"argparse.ArgumentParser",
"pathlib.Path",
"requests.get",
"src.create_mappings.get_sheets",
"json.load",
"src.create_mappings.get_sheet_by_name"
] | [((325, 366), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': 'desc'}), '(description=desc)\n', (348, 366), False, 'import argparse\n'), ((4285, 4336), 'src.create_mappings.get_sheet_by_name', 'get_sheet_by_name', (['spreadsheet_location', '"""Metadata"""'], {}), "(spreadsheet_location, 'Metadata')\n", (4302, 4336), False, 'from src.create_mappings import get_sheets, get_sheet_by_name\n'), ((6515, 6547), 'src.create_mappings.get_sheets', 'get_sheets', (['spreadsheet_location'], {}), '(spreadsheet_location)\n', (6525, 6547), False, 'from src.create_mappings import get_sheets, get_sheet_by_name\n'), ((2021, 2033), 'json.load', 'json.load', (['f'], {}), '(f)\n', (2030, 2033), False, 'import json\n'), ((518, 583), 'pathlib.Path', 'pathlib.Path', (['""".."""', '"""frameworks"""', '"""veris"""', '"""input"""', '"""config.json"""'], {}), "('..', 'frameworks', 'veris', 'input', 'config.json')\n", (530, 583), False, 'import pathlib\n'), ((810, 874), 'pathlib.Path', 'pathlib.Path', (['""".."""', '"""frameworks"""', '"""veris"""', '"""veris-mappings.xlsx"""'], {}), "('..', 'frameworks', 'veris', 'veris-mappings.xlsx')\n", (822, 874), False, 'import pathlib\n'), ((1092, 1156), 'pathlib.Path', 'pathlib.Path', (['""".."""', '"""frameworks"""', '"""veris"""', '"""veris-mappings.json"""'], {}), "('..', 'frameworks', 'veris', 'veris-mappings.json')\n", (1104, 1156), False, 'import pathlib\n'), ((1917, 1948), 'pathlib.Path', 'pathlib.Path', (['mappings_location'], {}), '(mappings_location)\n', (1929, 1948), False, 'import pathlib\n'), ((2122, 2229), 'requests.get', 'requests.get', (['"""https://raw.githubusercontent.com/vz-risk/VCDB/1.3.5/vcdb-labels.json"""'], {'verify': '(True)'}), "(\n 'https://raw.githubusercontent.com/vz-risk/VCDB/1.3.5/vcdb-labels.json',\n verify=True)\n", (2134, 2229), False, 'import requests\n'), ((2433, 2584), 'requests.get', 'requests.get', (['f"""https://raw.githubusercontent.com/mitre/cti/ATT%26CK-v{attack_version}/enterprise-attack/enterprise-attack.json"""'], {'verify': '(True)'}), "(\n f'https://raw.githubusercontent.com/mitre/cti/ATT%26CK-v{attack_version}/enterprise-attack/enterprise-attack.json'\n , verify=True)\n", (2445, 2584), False, 'import requests\n'), ((482, 500), 'pathlib.Path', 'pathlib.Path', (['path'], {}), '(path)\n', (494, 500), False, 'import pathlib\n'), ((774, 792), 'pathlib.Path', 'pathlib.Path', (['path'], {}), '(path)\n', (786, 792), False, 'import pathlib\n'), ((1056, 1074), 'pathlib.Path', 'pathlib.Path', (['path'], {}), '(path)\n', (1068, 1074), False, 'import pathlib\n')] |
# This file contains the list of API's for operations on ZTP
# @author : <NAME> (<EMAIL>)
from spytest import st
import apis.system.basic as basic_obj
import utilities.utils as utils_obj
import apis.system.switch_configuration as switch_conf_obj
import apis.system.interface as intf_obj
import apis.routing.ip as ip_obj
import apis.system.reboot as reboot_obj
import apis.system.boot_up as boot_up_obj
import datetime
wait_5 = 5
wait_10 = 10
wait_60 = 60
def show_ztp_status(dut, expect_reboot=False, cli_type=""):
"""
Author: <NAME> (<EMAIL>)
API to show ztp status
:param dut:
:return:
"""
cli_type = st.get_ui_type(dut, cli_type=cli_type)
result = dict()
cli_type = "klish" if cli_type in ["rest-put", "rest-patch"] else cli_type
if cli_type not in ["click", "klish"]:
st.error("UNSUPPORTED CLI TYPE")
return result
command = "sudo ztp status" if cli_type == "click" else "show ztp-status"
output = st.show(dut, command, expect_reboot=False, type=cli_type)
file_name = dict()
timestamps = dict()
#excluded_file_name = ["--sonic-mgmt--#"]
if output:
for row in output:
result["filenames"] = list()
result["timestamps"] = list()
if result.get("service"):
pass
else:
result["service"] = row.get("service", "")
# if not result["source"]:
if result.get("source"):
pass
else:
result["source"] = row.get("source", "")
# if not result["status"]:
if result.get("status"):
pass
else:
result["status"] = row.get("status", "")
# if not result["adminmode"]:
if result.get("adminmode"):
pass
else:
result["adminmode"] = row.get("adminmode", "")
# if not result["timestamp"]:
result["timestamp"] = row.get("timestamp", "")
if row.get("filename"):
if cli_type == "click":
values = row["filename"].split(":")
file_name[values[0].strip()] = values[1].strip()
result["filenames"].append(file_name)
elif cli_type == "klish":
file_name[row.get("filename")] = row.get("filestatus")
result["filenames"].append(file_name)
if row.get("filetimestamp"):
timestamps.update({row.get("filename"):row.get("filetimestamp")})
result["timestamps"].append(timestamps)
# if not result["processingtext"]:
# result["processingtext"] = row["processingtext"] if "processingtext" in row and row["processingtext"] else ""
st.debug(result)
return result
def verify_ztp_config_section_from_status(dut, file_names=list(), status="SUCCESS", cli_type=""):
cli_type = st.get_ui_type(dut, cli_type=cli_type)
"""
Author: <NAME> (<EMAIL>)
API to verify the config section
:param dut:
:param file_names:
:param status:
:return:
"""
is_found = 1
if file_names:
response = show_ztp_status(dut, cli_type=cli_type)
for file_name in file_names:
for names in response["filenames"]:
if names[file_name] != status:
is_found = 0
else:
is_found = 1
if not is_found:
return False
return True
def _verify_ztp_status_with_retry(dut, retry_cnt, cli_type=""):
cli_type = st.get_ui_type(dut, cli_type=cli_type)
"""
Author: <NAME> (<EMAIL>)
API to verify ZTP status with retry value
:param dut:
:param retry_cnt:
:return:
"""
not_started_retry_cnt = 0
st.log("Verifying the ZTP status with retry method ...")
for _ in range(1, retry_cnt + 1):
response = show_ztp_status(dut, cli_type=cli_type)
if response["adminmode"] == "True":
st.log("Found that admin mode as {}".format(response["adminmode"]))
if response["service"] == "Inactive":
st.log("Found that service as {}".format(response["service"]))
if response["status"] == "FAILED":
st.log("Found that status as {}".format(response["status"]))
return False
elif response["status"] == "SUCCESS":
st.log("Found that status as {}".format(response["status"]))
return True
elif response["service"] == "Processing" or response["service"] == "Active Discovery":
st.log("Found that service as {}".format(response["service"]))
if response["status"] == "IN-PROGRESS":
st.log("Found that status as {}".format(response["status"]))
st.wait(3)
elif response["status"] == "FAILED":
st.log("Found that status as {}".format(response["status"]))
return False
elif response["status"] == "Not Started":
st.log("Found that status as {}".format(response["status"]))
not_started_retry_cnt += 1
if not_started_retry_cnt >= retry_cnt:
return False
st.wait(3)
else:
return True
elif response["service"] == "SUCCESS":
st.log("Found that service as {}".format(response["service"]))
return True
else:
st.log("Found that ZTP is disabled hence enabling it ..")
return False
return False
def poll_ztp_status(dut, status=["IN-PROGRESS", "Not Started"], iteration=40, retry=3, cli_type=""):
cli_type = st.get_ui_type(dut, cli_type=cli_type)
"""
API to poll the ztp status
Author: <NAME> (<EMAIL>)
:param dut:
:param status:
:param iteration:
:param retry:
:return:
"""
i = 1
status = list([str(e) for e in status]) if isinstance(status, list) else [status]
while True:
response = show_ztp_status(dut, cli_type=cli_type)
if response["status"] in status:
st.log("Observed {} during polling ...".format(status))
return True
if i > iteration:
st.log("Max polling interval {} exceeded ...".format(i))
return False
i += 1
st.wait(retry)
# This function should be called with running ztp run command
def verify_ztp_status(dut, retry_cnt=0, iteration=300, retry=3, expect_reboot=False, reboot_on_success=list(), cli_type=""):
cli_type = st.get_ui_type(dut, cli_type=cli_type)
"""
Author: <NAME> (<EMAIL>)
API to verify ZTP status
:param dut:
:param retry_cnt:
:return:
"""
retry_count_if_no_response = 0
if retry_cnt:
return _verify_ztp_status_with_retry(dut, retry_cnt, cli_type=cli_type)
else:
st.log("Verifying the ZTP status with iteration method ...")
for _ in range(1, iteration + 1):
response = show_ztp_status(dut, expect_reboot=expect_reboot, cli_type=cli_type)
if not response:
st.log("Observed no response in ZTP status ... retrying {} .. ".format(retry_count_if_no_response))
if retry_count_if_no_response > 5:
st.error("show ztp status returned empty data...")
return False
st.wait(retry)
retry_count_if_no_response += 1
continue
if "service" not in response or "status" not in response or "adminmode" not in response:
st.log("Values of service or status or adminmode is not populated yet, retrying ...")
st.wait(10)
continue
if response["adminmode"] == "True":
if "service" not in response or "status" not in response or "adminmode" not in response:
st.log("Values of service or status or adminmode is not populated yet, retrying ...")
st.wait(retry)
else:
# return verify_ztp_status(dut)
st.log("Found that admin mode as {}".format(response["adminmode"]))
if response["service"] == "Inactive":
st.log("Found that service as {}".format(response["service"]))
if response["status"] == "FAILED":
st.log("Found that status as {}".format(response["status"]))
return False
elif response["status"] == "SUCCESS":
st.log("Found that status as {}".format(response["status"]))
return True
else:
st.log("ZTP status is not in expected values , retrying...")
st.wait(retry)
# return verify_ztp_status(dut)
elif response["service"] == "Processing" or response["service"] == "Active Discovery":
st.log("Found that service as {}".format(response["service"]))
if response["status"] == "IN-PROGRESS":
st.log("Found that status as {}".format(response["status"]))
st.log("Files - {}".format(response["filenames"]))
if reboot_on_success and "filenames" in response and response["filenames"]:
reboot_flag = list(reboot_on_success) if isinstance(reboot_on_success, list) else [reboot_on_success]
if len(response["filenames"]) > 0:
filenames = response["filenames"][0]
for filename in reboot_flag:
if filename in filenames and filenames[filename] == "SUCCESS":
return True
if cli_type == "klish":
if len(response["filenames"]) > 0:
for key,value in response["filenames"][0].items():
if ("configdb-json" in key or "graphservice" in key) and value == "IN-PROGRESS":
st.wait(300)
st.wait(retry)
# return verify_ztp_status(dut)
elif response["status"] == "FAILED":
st.log("Found that status as {}".format(response["status"]))
return False
elif response["status"] == "Not Started":
st.log("Found that status as {}".format(response["status"]))
st.wait(retry)
# return verify_ztp_status(dut)
elif response["status"] == "SUCCESS":
st.log("Found that status as {}".format(response["status"]))
st.wait(retry)
# return verify_ztp_status(dut)
else:
st.log("ZTP status is not in expected values, retrying...")
st.wait(retry)
elif response["service"] == "SUCCESS":
st.log("Found that service as {}".format(response["service"]))
return True
else:
st.log("Found that ZTP is disabled hence enabling it ..")
ztp_operations(dut, "enable")
# ztp_operations(dut, "run")
# return verify_ztp_status(dut)
return False
def get_ztp_timestamp_obj(ztp_timestamp):
"""
Author: <NAME> (<EMAIL>)
API to get ztp timestamp
:param ztp_timestamp:
:return:
"""
try:
return datetime.datetime.strptime(ztp_timestamp, '%Y-%m-%d %H:%M:%S')
except ValueError as e:
st.error(e)
def enable_ztp_if_disabled(dut, iteration=5, delay=1, cli_type=""):
cli_type = st.get_ui_type(dut, cli_type=cli_type)
"""
API to enable ztp if it is disabled, added check for enable in polling mechanism
Author: <NAME> (<EMAIL>)
:param dut:
:param iteration:
:param delay:
:return:
"""
i = 1
while True:
response = show_ztp_status(dut, cli_type=cli_type)
if "adminmode" in response and response["adminmode"] != "True":
st.log("Enabling ZTP ...")
ztp_operations(dut, "enable")
break
if i > iteration:
st.log("ZTP admin mode not found after max iterations ...")
break
i += 1
st.wait(delay)
i = 1
while True:
response = show_ztp_status(dut, cli_type=cli_type)
if "adminmode" in response and response["adminmode"] == "True":
st.log("Admin mode enabled at {} iteration".format(i))
return True
if i > iteration:
st.log("Max iteration {} count reached ".format(i))
return False
i += 1
st.wait(delay)
def ztp_operations(dut, operation, cli_type="", max_time=0):
cli_type = st.get_ui_type(dut, cli_type=cli_type)
"""
Author: <NAME> (<EMAIL>)
API to do ZTP operations
:param dut:
:param operation:
:return:
"""
if cli_type == "click":
supported_opers = ["run", "enable", "disable"]
if operation not in supported_opers:
return False
if operation in ["run", "disable"]:
command = "ztp {} -y".format(operation)
else:
command = "ztp {}".format(operation)
elif cli_type == "klish":
no_form = "no" if operation == "disable" else ""
command = "{} ztp enable".format(no_form)
st.config(dut, command, type=cli_type, max_time=max_time)
def ztp_push_full_config(dut, cli_type=""):
"""
NOT USED ANYWHERE
Author: <NAME> (<EMAIL>)
APU to push full config
:param dut:
:return:
"""
cli_type = st.get_ui_type(dut, cli_type=cli_type)
config_dbjson = "config_db.json"
config_file = "ztp_data_local.json"
plugin_file_path = "/etc/sonic/ztp/{}".format(config_file)
source = "/tmp/{}".format(config_dbjson)
plugin_json = {config_dbjson: {"url": {"source": "file://{}".format(source),
"timeout": 300}, "save-config": "true"}}
file_path = basic_obj.write_to_json_file(plugin_json)
st.upload_file_to_dut(dut, file_path, plugin_file_path)
running_config = switch_conf_obj.get_running_config(dut)
file_path = basic_obj.write_to_json_file(running_config)
st.upload_file_to_dut(dut, file_path, source)
st.wait(wait_5)
ztp_operations(dut, "run")
st.wait(wait_60)
show_ztp_status(dut, cli_type=cli_type)
st.wait(wait_10)
show_ztp_status(dut, cli_type=cli_type)
def prepare_and_write_option_67_config_string(ssh_conn_obj, static_ip, config_path, config_file, dhcp_config_file, type="http"):
"""
NOT USED ANYWHERE
Author: <NAME> (<EMAIL>)
Common function to write option 67 to DHCP server
:param ssh_conn_obj:
:param static_ip:
:param config_path:
:param config_file:
:param dhcp_config_file:
:param type:
:return:
"""
option_67_config = "option bootfile-name"
if type == "http":
config_json_url = "http://{}{}/{}".format(static_ip, config_path, config_file)
elif type == "tftp":
config_json_url = "tftp://{}/{}/{}".format(static_ip, config_path, config_file)
elif type == "ftp":
config_json_url = "ftp://{}/{}/{}".format(static_ip, config_path, config_file)
option_67_config_string = '{} "{}";'.format(option_67_config, config_json_url)
if not basic_obj.write_update_file(ssh_conn_obj, option_67_config,
option_67_config_string, dhcp_config_file):
st.log("Written content in file {} not found".format(dhcp_config_file))
st.report_fail("content_not_found")
def write_option_67_to_dhcp_server(ssh_conn_obj, data):
"""
NOT USER ANY WHERE
:param ssh_conn_obj:
:param data:
:return:
"""
option_67_config = "option bootfile-name"
if data.type == "http":
config_json_url = "http://{}{}/{}".format(data.static_ip, data.config_path, data.config_file)
elif data.type == "tftp":
config_json_url = "tftp://{}/{}/{}".format(data.static_ip, data.config_path, data.config_file)
elif data.type == "ftp":
config_json_url = "ftp://{}/{}/{}".format(data.static_ip, data.config_path, data.config_file)
option_67_config_string = '{} "{}";'.format(option_67_config, config_json_url)
if not basic_obj.write_update_file(ssh_conn_obj, option_67_config,
option_67_config_string, data.dhcp_config_file):
st.log("Written content in file {} not found".format(data.dhcp_config_file))
st.report_fail("content_not_found")
basic_obj.service_operations(ssh_conn_obj, data.dhcp_service_name, data.action, data.device)
if not verify_dhcpd_service_status(ssh_conn_obj, data.dhcpd_pid):
st.log("{} service not running".format(data.dhcp_service_name))
st.report_fail("service_not_running", data.dhcp_service_name)
def config_and_verify_dhcp_option(ssh_conn_obj, dut, ztp_params, data, expect_reboot=False, reboot_on_success=list(), cli_type=""):
"""
Common function to configure DHCP option along with status / logs verification
Author: <NAME> (<EMAIL>)
:param ssh_conn_obj:
:param dut:
:param ztp_params:
:param data:
:return:
"""
cli_type = st.get_ui_type(dut,cli_type=cli_type)
cli_type = "klish" if cli_type in ["rest-put", "rest-patch"] else cli_type
retry_count = data.retry_count if "retry_count" in data and data.retry_count else 0
iteration = data.iteration if "iteration" in data and data.iteration else 300
delay = data.delay if "delay" in data and data.delay else 3
if "func_name" in data:
syslog_file_names = ["syslog_1_{}".format(data.func_name), "syslog_{}".format(data.func_name)]
# basic_obj.copy_config_db_to_temp(dut, data.config_db_path, data.config_db_temp)
if "config_file_type" in data and data.config_file_type == "text":
file_path = "/tmp/file_temp.json"
basic_obj.write_to_file(ssh_conn_obj, data.json_content, file_path, device="server")
elif "config_file_type" in data and data.config_file_type == "EoL":
file_path = ""
else:
file_path = basic_obj.write_to_json_file(data.json_content)
if file_path:
destination_path = "{}{}/{}".format(ztp_params.home_path, ztp_params.config_path, data.config_file)
basic_obj.copy_file_from_client_to_server(ssh_conn_obj, src_path=file_path, dst_path=destination_path)
if "config_db_location" in data and data.config_db_location == "json":
st.download_file_from_dut(dut, data.config_db_temp, file_path)
destination_path = "{}{}/{}".format(ztp_params.home_path, ztp_params.config_path, data.config_db_file_name)
basic_obj.copy_file_from_client_to_server(ssh_conn_obj, src_path=file_path, dst_path=destination_path)
if "scenario" in data and data.scenario == "invalid-json":
st.log("Writing invalid content to make invalid json ...")
basic_obj.write_to_file_to_line(ssh_conn_obj, ",", 5, destination_path, "server")
if data.option_type == "67":
st.log("Creating {} file on DHCP server ...".format(data.config_file))
data.search_pattern = r'\s*option\s+bootfile-name\s*\S*\s*"\S+";'
data.option_string = "option bootfile-name"
if data.type == "http":
data.option_url = "http://{}{}/{}".format(data.static_ip, data.config_path, data.config_file)
elif data.type == "tftp":
data.option_url = "tftp://{}/{}/{}".format(data.static_ip, data.config_path, data.config_file)
elif data.type == "ftp":
data.option_url = "ftp://{}/{}/{}".format(data.static_ip, data.config_path, data.config_file)
write_option_to_dhcp_server(ssh_conn_obj, data)
basic_obj.service_operations(ssh_conn_obj, data.dhcp_service_name, data.action, data.device)
if not verify_dhcpd_service_status(ssh_conn_obj, data.dhcpd_pid):
st.log("{} service not running".format(data.dhcp_service_name))
st.report_fail("service_not_running", data.dhcp_service_name)
# write_option_67_to_dhcp_server(ssh_conn_obj, data)
data.device_action = "reboot" if cli_type == "klish" else data.device_action
if data.device_action == "reboot":
reboot_type = data.reboot_type if "reboot_type" in data and data.reboot_type else "normal"
basic_obj.remove_file(dut, data.config_db_path)
st.reboot(dut, reboot_type, skip_port_wait=True)
st.wait_system_status(dut, 500)
elif data.device_action == "run":
ztp_operations(dut, data.device_action)
if "band_type" in data and data.band_type=="inband":
if not basic_obj.poll_for_system_status(dut):
st.log("Sytem is not ready ..")
st.report_env_fail("system_not_ready")
if not basic_obj.check_interface_status(dut, ztp_params.oob_port,"up"):
basic_obj.ifconfig_operation(dut, ztp_params.oob_port, "down")
interface_status = basic_obj.check_interface_status(dut, ztp_params.inband_port, "up")
if interface_status is not None:
if not interface_status:
intf_obj.interface_noshutdown(dut, ztp_params.inband_port, cli_type=cli_type)
if "service" in data and data.service == "disable":
basic_obj.service_operations_by_systemctl(dut, "ztp", "stop")
if basic_obj.verify_service_status(dut, "ztp"):
st.log("ZTP status is not stopped")
st.report_fail("service_not_stopped", "ztp")
basic_obj.service_operations_by_systemctl(dut, "ztp", "start")
if not poll_ztp_status(dut, ["IN-PROGRESS", "Not Started", "SUCCESS"], cli_type=cli_type):
st.report_fail("ztp_max_polling_interval")
if "check" in data and data.check == "not":
if verify_ztp_status(dut, retry_count, iteration, delay, cli_type=cli_type):
if "logs_path" in data and "func_name" in data:
capture_syslogs(dut, data.logs_path, syslog_file_names)
st.log("ZTP status verification failed")
st.report_fail("ztp_status_verification_failed")
else:
st.log("Iteration count {}".format(iteration))
st.log("REBOOT ON SUCCESS - {}".format(reboot_on_success))
if reboot_on_success:
if "configdb-json" in reboot_on_success:
st.wait_system_reboot(dut)
st.wait_system_status(dut, 300)
result = verify_ztp_status(dut, retry_count, iteration, delay, expect_reboot=expect_reboot, reboot_on_success=reboot_on_success, cli_type=cli_type)
else:
result = verify_ztp_status(dut, retry_count, iteration, delay, expect_reboot=expect_reboot, cli_type=cli_type)
if not result:
if "logs_path" in data and "func_name" in data:
capture_syslogs(dut, data.logs_path, syslog_file_names)
st.log("ZTP status verification failed")
st.report_fail("ztp_status_verification_failed")
if reboot_on_success:
output = show_ztp_status(dut, cli_type=cli_type)
if output["status"] != "SUCCESS":
st.wait(300, "Waiting for device to reboot after success...")
st.wait_system_status(dut, 300)
# st.wait_system_reboot(dut)
if not verify_ztp_status(dut, retry_count, iteration, delay, cli_type=cli_type):
if "logs_path" in data and "func_name" in data:
capture_syslogs(dut, data.logs_path, syslog_file_names)
st.log("ZTP status verification failed")
st.report_fail("ztp_status_verification_failed")
st.banner(boot_up_obj.sonic_installer_list(dut))
verify_ztp_filename_logs(dut, data)
if "ztp_log_string" in data and data.ztp_log_string:
if not basic_obj.poll_for_error_logs(dut, data.ztp_log_path, data.ztp_log_string):
st.log("ZTP log {} verification failed for message {}".format(data.ztp_log_path, data.ztp_log_string))
if not basic_obj.poll_for_error_logs(dut, data.ztp_log_path_1, data.ztp_log_string):
st.log("ZTP log {} verification failed for message {}".format(data.ztp_log_path_1, data.ztp_log_string))
st.report_fail("ztp_log_verification_failed", data.ztp_log_path_1, data.ztp_log_string)
if "result" in data and data.result == "pass":
st.report_pass("test_case_passed")
def write_option_239_to_dhcp_server(ssh_conn_obj, data):
st.log("##################### Writing option 239 to dhcp config file ... ##################")
option_239 = 'option provision-url ='
provisioning_script_path = "http://{}{}/{}".format(data["server_ip"], data["config_path"], data["provision_script"])
option_239_config = '{} "{}";'.format(option_239, provisioning_script_path)
option_67_config = "option bootfile-name"
basic_obj.write_update_file(ssh_conn_obj, option_67_config,
"##", data["dhcp_config_file"])
if not basic_obj.write_update_file(ssh_conn_obj, option_239,
option_239_config, data["dhcp_config_file"]):
st.log("Written content in file {} not found".format(data["dhcp_config_file"]))
st.report_fail("content_not_found")
def write_option_225_to_dhcp_server(ssh_conn_obj, data):
option_225 = "option option-225 ="
option_225_path = data["minigraph_path"]
option_225_config = '{} "{}";'.format(option_225, option_225_path)
option_67_config = "option bootfile-name"
option_239 = 'option provision-url ='
basic_obj.write_update_file(ssh_conn_obj, option_67_config,
"##", data["dhcp_config_file"])
basic_obj.write_update_file(ssh_conn_obj, option_239,
"##", data["dhcp_config_file"])
if not basic_obj.write_update_file(ssh_conn_obj, option_225,
option_225_config, data["dhcp_config_file"]):
st.log("Written content in file {} not found".format(data["dhcp_config_file"]))
st.report_fail("content_not_found")
def config_and_verify_option_225(ssh_conn_obj, dut, ztp_params, data, cli_type=""):
cli_type = st.get_ui_type(dut, cli_type=cli_type)
if data.option_type == "225":
if "func_name" in data:
syslog_file_names = ["syslog_1_{}".format(data.func_name), "syslog_{}".format(data.func_name)]
data.search_pattern = r'\s*option option-225\s*\S*\s*"\S+";'
data.option_string = "option option-225 " # "option dhcp6.boot-file-url "
data.option_url = data.minigraph_path
data.option_type = "option_67"
clear_options_from_dhcp_server(ssh_conn_obj, data)
data.option_type = "option_239"
clear_options_from_dhcp_server(ssh_conn_obj, data)
write_option_to_dhcp_server(ssh_conn_obj, data)
# write_option_225_to_dhcp_server(ssh_conn_obj, data)
basic_obj.service_operations(ssh_conn_obj, data.dhcp_service_name, data.action, data.device)
if not verify_dhcpd_service_status(ssh_conn_obj, data.dhcpd_pid):
st.log("{} service not running".format(data.dhcp_service_name))
st.report_fail("service_not_running", data.dhcp_service_name)
data.device_action = "reboot" if cli_type == "klish" else data.device_action
if data.device_action == "reboot":
reboot_type = data.reboot_type if "reboot_type" in data and data.reboot_type else "normal"
basic_obj.remove_file(dut, data.config_db_path)
st.reboot(dut, reboot_type, skip_port_wait=True)
st.wait_system_status(dut, 400)
elif data.device_action == "run":
ztp_operations(dut, data.device_action)
if not verify_ztp_status(dut, cli_type=cli_type):
if "logs_path" in data and "func_name" in data:
capture_syslogs(dut, data.logs_path, syslog_file_names)
st.log("ZTP status verification failed")
st.report_fail("ztp_status_verification_failed")
verify_ztp_filename_logs(dut, data)
if "ztp_log_string" in data and data.ztp_log_string:
if not basic_obj.poll_for_error_logs(dut, data.ztp_log_path, data.ztp_log_string):
st.log("ZTP log {} verification failed for message {}".format(data.ztp_log_path, data.ztp_log_string))
if not basic_obj.poll_for_error_logs(dut, data.ztp_log_path_1, data.ztp_log_string):
st.log("ZTP log {} verification failed for message {}".format(data.ztp_log_path_1,
data.ztp_log_string))
st.report_fail("ztp_log_verification_failed", data.ztp_log_path_1, data.ztp_log_string)
def verify_ztp_attributes(dut, property, value, cli_type=""):
cli_type = st.get_ui_type(dut, cli_type=cli_type)
"""
This is to verify the ztp attributes with the provided value
Author: <NAME> (<EMAIL>)
:param dut: dut object
:param property: status, service, adminmode, filenames, timestamp, source
:param value: This is string except filenames, for file names {'03-test-plugin': 'Not Started', '02-test-plugin':
'Not Started', 'configdb-json': 'Not Started'}
:return: boolean
"""
response = show_ztp_status(dut, cli_type=cli_type)
if not response:
return False
if property in response:
if property == "filenames":
filenames = response["filenames"][0]
for filename, status in filenames:
if value[filename] != status:
return False
else:
if response[property] != value:
return False
else:
return False
return True
def verify_ztp_filename_logs(dut, data, status="SUCCESS", condition="positive"):
"""
Author: <NAME> (<EMAIL>)
API to verify logs
:param dut:
:param data:
:param status:
:return:
"""
filenames = list([str(e) for e in data.file_names]) if isinstance(data.file_names, list) else [data.file_names]
log_msg = data.log_msg if "log_msg" in data and data.log_msg else "Checking configuration section {} result: {}"
match = data.match if "match" in data else ""
for file_name in filenames:
log_string_1 = log_msg.format(file_name, status)
st.log(log_string_1)
if not basic_obj.poll_for_error_logs(dut, data.ztp_log_path, log_string_1, match=match):
if condition == "positive":
st.log("ZTP log {} verification failed for message {}".format(data.ztp_log_path, log_string_1))
if not basic_obj.poll_for_error_logs(dut, data.ztp_log_path_1, log_string_1, match=match):
st.log("ZTP log {} verification failed for message {}".format(data.ztp_log_path_1,
log_string_1))
st.report_fail("ztp_log_verification_failed", data.ztp_log_path_1, log_string_1)
else:
return True
else:
return True
def config_ztp_backdoor_options(dut, ztp_cfg={"admin-mode": True, "restart-ztp-interval": 30}, dut_ztp_cfg_file="/host/ztp/ztp_cfg.json"):
"""
Author: <NAME> (<EMAIL>)
Function to enable backward options for ZTP
:param dut:
:param ztp_cfg:
:param dut_ztp_cfg_file:
:return:
"""
ztp_cfg_file = basic_obj.write_to_json_file(ztp_cfg)
st.upload_file_to_dut(dut, ztp_cfg_file, dut_ztp_cfg_file)
def ztp_status_verbose(dut, cli_type=""):
cli_type = st.get_ui_type(dut, cli_type=cli_type)
"""
API to get the ztp status verbose output with filename and its details as we are getting the status in ztp status API
Author: <NAME> (<EMAIL>)
:param dut:
:return:
"""
command = "sudo ztp status -v" if cli_type == "click" else "show ztp-status"
if cli_type == "click":
return st.show(dut, command, type=cli_type)
else:
return show_ztp_status(dut, cli_type=cli_type)
def verify_plugin_chronological_order(dut, cli_type=""):
cli_type = st.get_ui_type(dut, cli_type=cli_type)
"""
API to verify the plugin chronological order of ztp status
Author: <NAME> (<EMAIL>)
:param dut:
:return:
"""
st.log("Verifying timestamp for chronological order ... ")
output = ztp_status_verbose(dut, cli_type=cli_type)
data = list()
if cli_type == "click":
for val in output:
data.append(val["filetimestamp"])
else:
for val in output["timestamps"]:
for _, timestamp in val.items():
data.append(timestamp)
data.sort()
for i, _ in enumerate(data):
if i + 1 < len(data):
result = utils_obj.date_time_delta(data[i], data[i + 1], True)
st.log(result)
if result[0] < 0 or result[1] < 0:
st.log("Observed timestamp difference is not as expected ...")
return False
return True
def verify_dhclient_on_interface(dut, search_string, interface, expected_count=2):
"""
API to verify DHCLIENT on provided interface using ps aux command
Author: <NAME> (<EMAIL>)
:param dut:
:param search_string:
:param interface:
:param expected_count:
:return:
"""
st.log("Verifying dhclient for {} interface".format(interface))
ps_aux = basic_obj.get_ps_aux(dut, search_string)
# if len(ps_aux) != expected_count:
st.log("Observed {} DHCLIENT entries on {} interface".format(len(ps_aux), interface))
# return False
dhclient_str = "/run/dhclient.{}.pid".format(interface)
if not ps_aux:
st.error("DHCLIENT process not found on DUT ...")
return False
for entry in ps_aux:
if dhclient_str in entry["command"]:
st.log("Required dhclient is found ...")
return True
return False
def create_required_folders(conn_obj, path_list):
"""
API to create folders as per the provided path in bulk
:param dut:
:param path:
:return:
"""
path_list = [path_list] if type(path_list) is str else list([str(e) for e in path_list])
for path in path_list:
basic_obj.make_dir(conn_obj, path, "server")
basic_obj.change_permissions(conn_obj, path, 777, "server")
def config_dhcpv6_options(ssh_conn_obj, ztp_params, config_params, options=dict(), cli_type=""):
"""
Common function to configure dhcpv6 options and verify the result on both inband and out of band interfaces
:param ssh_conn_obj:
:param ztp_params:
:param config_params:
:param options:
:return:
"""
cli_type = st.get_ui_type(config_params.dut, cli_type=cli_type)
retry_count = config_params.retry_count if "retry_count" in config_params and config_params.retry_count else 0
iteration = config_params.iteration if "iteration" in config_params and config_params.iteration else 300
delay = config_params.delay if "delay" in config_params and config_params.delay else 3
expect_reboot = True if "expect_reboot" in options and options ["expect_reboot"] else False
st.log(config_params)
if "func_name" in config_params:
syslog_file_names = ["syslog_1_{}".format(config_params.func_name), "syslog_{}".format(config_params.func_name)]
if "json_content" in config_params:
file_path = basic_obj.write_to_json_file(config_params.json_content)
st.log(file_path)
if file_path:
destination_path = "{}{}/{}".format(config_params.home_path, ztp_params.config_path, config_params.ztp_file)
st.log(destination_path)
basic_obj.copy_file_from_client_to_server(ssh_conn_obj, src_path=file_path, dst_path=destination_path)
config_params.option_59_url = "http://[{}]{}/{}".format(config_params.static_ip, ztp_params.config_path, config_params.ztp_file)
config_params.search_pattern = r'\s*option\s+dhcp6.boot-file-url\s+"\S+";'
write_option_59_to_dhcp_server(ssh_conn_obj, config_params)
basic_obj.service_operations(ssh_conn_obj, config_params.dhcp6_service_name, "restart", "server")
if not verify_dhcpd_service_status(ssh_conn_obj, config_params.dhcpd6_pid):
st.log("{} service is running which is not expected".format(config_params.dhcp6_service_name))
st.report_fail("service_running_not_expected", config_params.dhcp6_service_name)
reboot_type = config_params.reboot_type if "reboot_type" in config_params and config_params.reboot_type else "normal"
if "ztp_operation" in config_params:
config_params.ztp_operation = "reboot" if cli_type == "klish" else config_params.ztp_operation
if config_params.ztp_operation == "reboot":
basic_obj.remove_file(config_params.dut, config_params.config_db_path)
st.reboot(config_params.dut, reboot_type, skip_port_wait=True)
elif config_params.ztp_operation == "run":
ztp_operations(config_params.dut, config_params.ztp_operation)
else:
st.log("ZTP operation is not mentioned hence rebooting the device ...")
basic_obj.remove_file(config_params.dut, config_params.config_db_path)
st.reboot(config_params.dut, reboot_type, skip_port_wait=True)
if "reboot_on_success" in options and options["reboot_on_success"]:
result = verify_ztp_status(config_params.dut, retry_count, iteration, delay, expect_reboot=expect_reboot, reboot_on_success=options["reboot_on_success"], cli_type=cli_type)
else:
result = verify_ztp_status(config_params.dut, retry_count, iteration, delay, expect_reboot=expect_reboot, cli_type=cli_type)
if not result:
if "logs_path" in config_params and "func_name" in config_params:
capture_syslogs(config_params.dut, config_params.logs_path, syslog_file_names)
st.log("ZTP status verification failed")
st.report_fail("ztp_status_verification_failed")
if "reboot_on_success" in options and options["reboot_on_success"]:
reboot_obj.config_reload(config_params.dut)
st.wait(5)
if not ip_obj.ping(config_params.dut, config_params.static_ip, family="ipv6"):
st.log("Pinging to DHCP server failed from DUT, issue either with DUT or server")
# intf_obj.enable_dhcp_on_interface(config_params.dut, config_params.network_port, "v6")
if not verify_ztp_status(config_params.dut, retry_count, iteration, delay, cli_type=cli_type):
if "logs_path" in config_params and "func_name" in config_params:
capture_syslogs(config_params.dut, config_params.logs_path, syslog_file_names)
st.log("ZTP status verification failed")
st.report_fail("ztp_status_verification_failed")
verify_ztp_filename_logs(config_params.dut, config_params)
if "ztp_log_string" in config_params and config_params.ztp_log_string:
if not basic_obj.poll_for_error_logs(config_params.dut, config_params.ztp_log_path, config_params.ztp_log_string):
st.log("ZTP log {} verification failed for message {}".format(config_params.ztp_log_path, config_params.ztp_log_string))
if not basic_obj.poll_for_error_logs(config_params.dut, config_params.ztp_log_path_1, config_params.ztp_log_string):
st.log("ZTP log {} verification failed for message {}".format(config_params.ztp_log_path_1, config_params.ztp_log_string))
st.report_fail("ztp_log_verification_failed", config_params.ztp_log_path_1, config_params.ztp_log_string)
if "result" in config_params and config_params.result == "pass":
st.report_pass("test_case_passed")
def write_option_59_to_dhcp_server(connection_obj, data):
"""
API to add option 59 in DHCP config file.
:param connection_obj:
:param data:
:return:
"""
line_number = basic_obj.get_file_number_with_regex(connection_obj, data.search_pattern, data.dhcp_config_file)
option_59 = "option dhcp6.boot-file-url "
option_59_path = data["option_59_url"]
option_59_config = "'{} \"{}\";'".format(option_59, option_59_path)
if line_number >= 0:
basic_obj.delete_line_using_line_number(connection_obj, line_number, data.dhcp_config_file)
basic_obj.write_to_file(connection_obj, option_59_config, data.dhcp_config_file, device="server")
# else:
# basic_obj.delete_line_using_line_number(connection_obj, line_number, data.dhcp_config_file)
# basic_obj.write_to_file_to_line(connection_obj, option_59_config, line_number, data.dhcp_config_file, device="server")
line_number = basic_obj.get_file_number_with_regex(connection_obj, data.search_pattern, data.dhcp_config_file)
if line_number <=0:
st.log("Written content in file {} not found".format(data["dhcp_config_file"]))
st.report_fail("content_not_found")
def write_option_to_dhcp_server(connection_obj, data):
"""
Common API to write matched line with new one
:param connection_obj:
:param data:
:return:
"""
line_number = basic_obj.get_file_number_with_regex(connection_obj, data.search_pattern, data.dhcp_config_file)
option = data.option_string # "option dhcp6.boot-file-url "
option_path = data.option_url
st.log("#####LINE NUMBER{}".format(line_number))
option_config = "'{} \"{}\";'".format(option, option_path)
if int(line_number) > 0:
# line_number = data.line_number if line_number in data else 60
basic_obj.delete_line_using_line_number(connection_obj, line_number, data.dhcp_config_file)
basic_obj.write_to_file(connection_obj, option_config, data.dhcp_config_file, device="server")
# basic_obj.write_to_file_to_line(connection_obj, option_config, line_number, data.dhcp_config_file, device="server")
line_number = basic_obj.get_file_number_with_regex(connection_obj, data.search_pattern, data.dhcp_config_file)
st.log("#####LINE NUMBER{}".format(line_number))
if line_number <= 0:
st.log("Written content in file {} not found".format(data["dhcp_config_file"]))
st.report_fail("content_not_found")
def clear_options_from_dhcp_server(connection_obj, data):
st.log("Clearing OPTIONS from DHCP server")
option = ""
if "option_type" in data and data.option_type == "option_67":
option = r'\s*option\s+bootfile-name\s*\S*\s*"\S+";'
elif "option_type" in data and data.option_type == "option_239":
option = r'\s*option\s+provision-url\s*\S*\s*"\S+";'
elif "option_type" in data and data.option_type == "option_59":
option = r'\s*option\s+dhcp6.boot-file-url\s+"\S+";'
elif "option_type" in data and data.option_type == "option_225":
option = r'\s*option option-225\s*\S*\s*"\S+";'
st.log("OPTION is {}".format(option))
st.log("CONFIG FILE is {}".format(data.dhcp_config_file))
if option:
line_number = basic_obj.get_file_number_with_regex(connection_obj,
option, data.dhcp_config_file)
if line_number > 0:
basic_obj.delete_line_using_line_number(connection_obj, line_number,
data.dhcp_config_file)
def verify_dhcpd_service_status(dut, process_id):
"""
API to verify DHCLIENT on provided interface using ps aux command
Author: <NAME> (<EMAIL>)
:param dut:
:param search_string:
:param interface:
:param expected_count:
:return:
"""
st.log("Verifying DHCPD for {} ".format(process_id))
dhcpd_pid = "/run/dhcp-server/{}".format(process_id)
ps_aux = basic_obj.get_ps_aux(dut, dhcpd_pid, device="server")
st.log(ps_aux)
config_string = ""
if process_id == "dhcpd6.pid":
config_string = "-cf /etc/dhcp/dhcpd6.conf"
if process_id == "dhcpd.pid":
config_string = "-cf /etc/dhcp/dhcpd.conf"
st.log("Verifying the output with {}".format(config_string))
if config_string not in ps_aux:
st.log("Required DHCPD service not found ...")
return False
return True
def capture_syslogs(dut, destination_path, file_name):
file_names = list(file_name) if isinstance(file_name, list) else [file_name]
syslog_paths = ["/var/log/syslog.1", "/var/log/syslog"]
for i, syslog_path in enumerate(syslog_paths):
dst_file = "{}/{}".format(destination_path, file_names[i])
st.download_file_from_dut(dut, syslog_path, dst_file)
return True
| [
"spytest.st.debug",
"apis.system.basic.service_operations_by_systemctl",
"spytest.st.wait_system_reboot",
"spytest.st.config",
"spytest.st.reboot",
"apis.system.reboot.config_reload",
"spytest.st.wait_system_status",
"spytest.st.report_fail",
"spytest.st.wait",
"apis.system.basic.make_dir",
"api... | [((634, 672), 'spytest.st.get_ui_type', 'st.get_ui_type', (['dut'], {'cli_type': 'cli_type'}), '(dut, cli_type=cli_type)\n', (648, 672), False, 'from spytest import st\n'), ((969, 1026), 'spytest.st.show', 'st.show', (['dut', 'command'], {'expect_reboot': '(False)', 'type': 'cli_type'}), '(dut, command, expect_reboot=False, type=cli_type)\n', (976, 1026), False, 'from spytest import st\n'), ((2822, 2838), 'spytest.st.debug', 'st.debug', (['result'], {}), '(result)\n', (2830, 2838), False, 'from spytest import st\n'), ((2972, 3010), 'spytest.st.get_ui_type', 'st.get_ui_type', (['dut'], {'cli_type': 'cli_type'}), '(dut, cli_type=cli_type)\n', (2986, 3010), False, 'from spytest import st\n'), ((3630, 3668), 'spytest.st.get_ui_type', 'st.get_ui_type', (['dut'], {'cli_type': 'cli_type'}), '(dut, cli_type=cli_type)\n', (3644, 3668), False, 'from spytest import st\n'), ((3845, 3901), 'spytest.st.log', 'st.log', (['"""Verifying the ZTP status with retry method ..."""'], {}), "('Verifying the ZTP status with retry method ...')\n", (3851, 3901), False, 'from spytest import st\n'), ((5866, 5904), 'spytest.st.get_ui_type', 'st.get_ui_type', (['dut'], {'cli_type': 'cli_type'}), '(dut, cli_type=cli_type)\n', (5880, 5904), False, 'from spytest import st\n'), ((6735, 6773), 'spytest.st.get_ui_type', 'st.get_ui_type', (['dut'], {'cli_type': 'cli_type'}), '(dut, cli_type=cli_type)\n', (6749, 6773), False, 'from spytest import st\n'), ((12298, 12336), 'spytest.st.get_ui_type', 'st.get_ui_type', (['dut'], {'cli_type': 'cli_type'}), '(dut, cli_type=cli_type)\n', (12312, 12336), False, 'from spytest import st\n'), ((13425, 13463), 'spytest.st.get_ui_type', 'st.get_ui_type', (['dut'], {'cli_type': 'cli_type'}), '(dut, cli_type=cli_type)\n', (13439, 13463), False, 'from spytest import st\n'), ((14042, 14099), 'spytest.st.config', 'st.config', (['dut', 'command'], {'type': 'cli_type', 'max_time': 'max_time'}), '(dut, command, type=cli_type, max_time=max_time)\n', (14051, 14099), False, 'from spytest import st\n'), ((14285, 14323), 'spytest.st.get_ui_type', 'st.get_ui_type', (['dut'], {'cli_type': 'cli_type'}), '(dut, cli_type=cli_type)\n', (14299, 14323), False, 'from spytest import st\n'), ((14690, 14731), 'apis.system.basic.write_to_json_file', 'basic_obj.write_to_json_file', (['plugin_json'], {}), '(plugin_json)\n', (14718, 14731), True, 'import apis.system.basic as basic_obj\n'), ((14736, 14791), 'spytest.st.upload_file_to_dut', 'st.upload_file_to_dut', (['dut', 'file_path', 'plugin_file_path'], {}), '(dut, file_path, plugin_file_path)\n', (14757, 14791), False, 'from spytest import st\n'), ((14813, 14852), 'apis.system.switch_configuration.get_running_config', 'switch_conf_obj.get_running_config', (['dut'], {}), '(dut)\n', (14847, 14852), True, 'import apis.system.switch_configuration as switch_conf_obj\n'), ((14869, 14913), 'apis.system.basic.write_to_json_file', 'basic_obj.write_to_json_file', (['running_config'], {}), '(running_config)\n', (14897, 14913), True, 'import apis.system.basic as basic_obj\n'), ((14918, 14963), 'spytest.st.upload_file_to_dut', 'st.upload_file_to_dut', (['dut', 'file_path', 'source'], {}), '(dut, file_path, source)\n', (14939, 14963), False, 'from spytest import st\n'), ((14968, 14983), 'spytest.st.wait', 'st.wait', (['wait_5'], {}), '(wait_5)\n', (14975, 14983), False, 'from spytest import st\n'), ((15019, 15035), 'spytest.st.wait', 'st.wait', (['wait_60'], {}), '(wait_60)\n', (15026, 15035), False, 'from spytest import st\n'), ((15084, 15100), 'spytest.st.wait', 'st.wait', (['wait_10'], {}), '(wait_10)\n', (15091, 15100), False, 'from spytest import st\n'), ((17259, 17356), 'apis.system.basic.service_operations', 'basic_obj.service_operations', (['ssh_conn_obj', 'data.dhcp_service_name', 'data.action', 'data.device'], {}), '(ssh_conn_obj, data.dhcp_service_name, data.\n action, data.device)\n', (17287, 17356), True, 'import apis.system.basic as basic_obj\n'), ((17935, 17973), 'spytest.st.get_ui_type', 'st.get_ui_type', (['dut'], {'cli_type': 'cli_type'}), '(dut, cli_type=cli_type)\n', (17949, 17973), False, 'from spytest import st\n'), ((25153, 25256), 'spytest.st.log', 'st.log', (['"""##################### Writing option 239 to dhcp config file ... ##################"""'], {}), "(\n '##################### Writing option 239 to dhcp config file ... ##################'\n )\n", (25159, 25256), False, 'from spytest import st\n'), ((25540, 25636), 'apis.system.basic.write_update_file', 'basic_obj.write_update_file', (['ssh_conn_obj', 'option_67_config', '"""##"""', "data['dhcp_config_file']"], {}), "(ssh_conn_obj, option_67_config, '##', data[\n 'dhcp_config_file'])\n", (25567, 25636), True, 'import apis.system.basic as basic_obj\n'), ((26251, 26347), 'apis.system.basic.write_update_file', 'basic_obj.write_update_file', (['ssh_conn_obj', 'option_67_config', '"""##"""', "data['dhcp_config_file']"], {}), "(ssh_conn_obj, option_67_config, '##', data[\n 'dhcp_config_file'])\n", (26278, 26347), True, 'import apis.system.basic as basic_obj\n'), ((26379, 26469), 'apis.system.basic.write_update_file', 'basic_obj.write_update_file', (['ssh_conn_obj', 'option_239', '"""##"""', "data['dhcp_config_file']"], {}), "(ssh_conn_obj, option_239, '##', data[\n 'dhcp_config_file'])\n", (26406, 26469), True, 'import apis.system.basic as basic_obj\n'), ((26880, 26918), 'spytest.st.get_ui_type', 'st.get_ui_type', (['dut'], {'cli_type': 'cli_type'}), '(dut, cli_type=cli_type)\n', (26894, 26918), False, 'from spytest import st\n'), ((29538, 29576), 'spytest.st.get_ui_type', 'st.get_ui_type', (['dut'], {'cli_type': 'cli_type'}), '(dut, cli_type=cli_type)\n', (29552, 29576), False, 'from spytest import st\n'), ((32151, 32188), 'apis.system.basic.write_to_json_file', 'basic_obj.write_to_json_file', (['ztp_cfg'], {}), '(ztp_cfg)\n', (32179, 32188), True, 'import apis.system.basic as basic_obj\n'), ((32193, 32251), 'spytest.st.upload_file_to_dut', 'st.upload_file_to_dut', (['dut', 'ztp_cfg_file', 'dut_ztp_cfg_file'], {}), '(dut, ztp_cfg_file, dut_ztp_cfg_file)\n', (32214, 32251), False, 'from spytest import st\n'), ((32311, 32349), 'spytest.st.get_ui_type', 'st.get_ui_type', (['dut'], {'cli_type': 'cli_type'}), '(dut, cli_type=cli_type)\n', (32325, 32349), False, 'from spytest import st\n'), ((32847, 32885), 'spytest.st.get_ui_type', 'st.get_ui_type', (['dut'], {'cli_type': 'cli_type'}), '(dut, cli_type=cli_type)\n', (32861, 32885), False, 'from spytest import st\n'), ((33027, 33085), 'spytest.st.log', 'st.log', (['"""Verifying timestamp for chronological order ... """'], {}), "('Verifying timestamp for chronological order ... ')\n", (33033, 33085), False, 'from spytest import st\n'), ((34137, 34177), 'apis.system.basic.get_ps_aux', 'basic_obj.get_ps_aux', (['dut', 'search_string'], {}), '(dut, search_string)\n', (34157, 34177), True, 'import apis.system.basic as basic_obj\n'), ((35411, 35463), 'spytest.st.get_ui_type', 'st.get_ui_type', (['config_params.dut'], {'cli_type': 'cli_type'}), '(config_params.dut, cli_type=cli_type)\n', (35425, 35463), False, 'from spytest import st\n'), ((35879, 35900), 'spytest.st.log', 'st.log', (['config_params'], {}), '(config_params)\n', (35885, 35900), False, 'from spytest import st\n'), ((36777, 36878), 'apis.system.basic.service_operations', 'basic_obj.service_operations', (['ssh_conn_obj', 'config_params.dhcp6_service_name', '"""restart"""', '"""server"""'], {}), "(ssh_conn_obj, config_params.dhcp6_service_name,\n 'restart', 'server')\n", (36805, 36878), True, 'import apis.system.basic as basic_obj\n'), ((40583, 40683), 'apis.system.basic.get_file_number_with_regex', 'basic_obj.get_file_number_with_regex', (['connection_obj', 'data.search_pattern', 'data.dhcp_config_file'], {}), '(connection_obj, data.search_pattern,\n data.dhcp_config_file)\n', (40619, 40683), True, 'import apis.system.basic as basic_obj\n'), ((40970, 41072), 'apis.system.basic.write_to_file', 'basic_obj.write_to_file', (['connection_obj', 'option_59_config', 'data.dhcp_config_file'], {'device': '"""server"""'}), "(connection_obj, option_59_config, data.\n dhcp_config_file, device='server')\n", (40993, 41072), True, 'import apis.system.basic as basic_obj\n'), ((41329, 41429), 'apis.system.basic.get_file_number_with_regex', 'basic_obj.get_file_number_with_regex', (['connection_obj', 'data.search_pattern', 'data.dhcp_config_file'], {}), '(connection_obj, data.search_pattern,\n data.dhcp_config_file)\n', (41365, 41429), True, 'import apis.system.basic as basic_obj\n'), ((41779, 41879), 'apis.system.basic.get_file_number_with_regex', 'basic_obj.get_file_number_with_regex', (['connection_obj', 'data.search_pattern', 'data.dhcp_config_file'], {}), '(connection_obj, data.search_pattern,\n data.dhcp_config_file)\n', (41815, 41879), True, 'import apis.system.basic as basic_obj\n'), ((42296, 42395), 'apis.system.basic.write_to_file', 'basic_obj.write_to_file', (['connection_obj', 'option_config', 'data.dhcp_config_file'], {'device': '"""server"""'}), "(connection_obj, option_config, data.\n dhcp_config_file, device='server')\n", (42319, 42395), True, 'import apis.system.basic as basic_obj\n'), ((42531, 42631), 'apis.system.basic.get_file_number_with_regex', 'basic_obj.get_file_number_with_regex', (['connection_obj', 'data.search_pattern', 'data.dhcp_config_file'], {}), '(connection_obj, data.search_pattern,\n data.dhcp_config_file)\n', (42567, 42631), True, 'import apis.system.basic as basic_obj\n'), ((42901, 42944), 'spytest.st.log', 'st.log', (['"""Clearing OPTIONS from DHCP server"""'], {}), "('Clearing OPTIONS from DHCP server')\n", (42907, 42944), False, 'from spytest import st\n'), ((44334, 44387), 'apis.system.basic.get_ps_aux', 'basic_obj.get_ps_aux', (['dut', 'dhcpd_pid'], {'device': '"""server"""'}), "(dut, dhcpd_pid, device='server')\n", (44354, 44387), True, 'import apis.system.basic as basic_obj\n'), ((44392, 44406), 'spytest.st.log', 'st.log', (['ps_aux'], {}), '(ps_aux)\n', (44398, 44406), False, 'from spytest import st\n'), ((823, 855), 'spytest.st.error', 'st.error', (['"""UNSUPPORTED CLI TYPE"""'], {}), "('UNSUPPORTED CLI TYPE')\n", (831, 855), False, 'from spytest import st\n'), ((6516, 6530), 'spytest.st.wait', 'st.wait', (['retry'], {}), '(retry)\n', (6523, 6530), False, 'from spytest import st\n'), ((7050, 7110), 'spytest.st.log', 'st.log', (['"""Verifying the ZTP status with iteration method ..."""'], {}), "('Verifying the ZTP status with iteration method ...')\n", (7056, 7110), False, 'from spytest import st\n'), ((12102, 12164), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (['ztp_timestamp', '"""%Y-%m-%d %H:%M:%S"""'], {}), "(ztp_timestamp, '%Y-%m-%d %H:%M:%S')\n", (12128, 12164), False, 'import datetime\n'), ((12931, 12945), 'spytest.st.wait', 'st.wait', (['delay'], {}), '(delay)\n', (12938, 12945), False, 'from spytest import st\n'), ((13332, 13346), 'spytest.st.wait', 'st.wait', (['delay'], {}), '(delay)\n', (13339, 13346), False, 'from spytest import st\n'), ((16025, 16131), 'apis.system.basic.write_update_file', 'basic_obj.write_update_file', (['ssh_conn_obj', 'option_67_config', 'option_67_config_string', 'dhcp_config_file'], {}), '(ssh_conn_obj, option_67_config,\n option_67_config_string, dhcp_config_file)\n', (16052, 16131), True, 'import apis.system.basic as basic_obj\n'), ((16256, 16291), 'spytest.st.report_fail', 'st.report_fail', (['"""content_not_found"""'], {}), "('content_not_found')\n", (16270, 16291), False, 'from spytest import st\n'), ((16978, 17089), 'apis.system.basic.write_update_file', 'basic_obj.write_update_file', (['ssh_conn_obj', 'option_67_config', 'option_67_config_string', 'data.dhcp_config_file'], {}), '(ssh_conn_obj, option_67_config,\n option_67_config_string, data.dhcp_config_file)\n', (17005, 17089), True, 'import apis.system.basic as basic_obj\n'), ((17219, 17254), 'spytest.st.report_fail', 'st.report_fail', (['"""content_not_found"""'], {}), "('content_not_found')\n", (17233, 17254), False, 'from spytest import st\n'), ((17502, 17563), 'spytest.st.report_fail', 'st.report_fail', (['"""service_not_running"""', 'data.dhcp_service_name'], {}), "('service_not_running', data.dhcp_service_name)\n", (17516, 17563), False, 'from spytest import st\n'), ((18624, 18713), 'apis.system.basic.write_to_file', 'basic_obj.write_to_file', (['ssh_conn_obj', 'data.json_content', 'file_path'], {'device': '"""server"""'}), "(ssh_conn_obj, data.json_content, file_path, device=\n 'server')\n", (18647, 18713), True, 'import apis.system.basic as basic_obj\n'), ((19016, 19122), 'apis.system.basic.copy_file_from_client_to_server', 'basic_obj.copy_file_from_client_to_server', (['ssh_conn_obj'], {'src_path': 'file_path', 'dst_path': 'destination_path'}), '(ssh_conn_obj, src_path=file_path,\n dst_path=destination_path)\n', (19057, 19122), True, 'import apis.system.basic as basic_obj\n'), ((19202, 19264), 'spytest.st.download_file_from_dut', 'st.download_file_from_dut', (['dut', 'data.config_db_temp', 'file_path'], {}), '(dut, data.config_db_temp, file_path)\n', (19227, 19264), False, 'from spytest import st\n'), ((19389, 19495), 'apis.system.basic.copy_file_from_client_to_server', 'basic_obj.copy_file_from_client_to_server', (['ssh_conn_obj'], {'src_path': 'file_path', 'dst_path': 'destination_path'}), '(ssh_conn_obj, src_path=file_path,\n dst_path=destination_path)\n', (19430, 19495), True, 'import apis.system.basic as basic_obj\n'), ((19563, 19621), 'spytest.st.log', 'st.log', (['"""Writing invalid content to make invalid json ..."""'], {}), "('Writing invalid content to make invalid json ...')\n", (19569, 19621), False, 'from spytest import st\n'), ((19630, 19715), 'apis.system.basic.write_to_file_to_line', 'basic_obj.write_to_file_to_line', (['ssh_conn_obj', '""","""', '(5)', 'destination_path', '"""server"""'], {}), "(ssh_conn_obj, ',', 5, destination_path,\n 'server')\n", (19661, 19715), True, 'import apis.system.basic as basic_obj\n'), ((20432, 20529), 'apis.system.basic.service_operations', 'basic_obj.service_operations', (['ssh_conn_obj', 'data.dhcp_service_name', 'data.action', 'data.device'], {}), '(ssh_conn_obj, data.dhcp_service_name, data.\n action, data.device)\n', (20460, 20529), True, 'import apis.system.basic as basic_obj\n'), ((21037, 21084), 'apis.system.basic.remove_file', 'basic_obj.remove_file', (['dut', 'data.config_db_path'], {}), '(dut, data.config_db_path)\n', (21058, 21084), True, 'import apis.system.basic as basic_obj\n'), ((21093, 21141), 'spytest.st.reboot', 'st.reboot', (['dut', 'reboot_type'], {'skip_port_wait': '(True)'}), '(dut, reboot_type, skip_port_wait=True)\n', (21102, 21141), False, 'from spytest import st\n'), ((21150, 21181), 'spytest.st.wait_system_status', 'st.wait_system_status', (['dut', '(500)'], {}), '(dut, 500)\n', (21171, 21181), False, 'from spytest import st\n'), ((21656, 21723), 'apis.system.basic.check_interface_status', 'basic_obj.check_interface_status', (['dut', 'ztp_params.inband_port', '"""up"""'], {}), "(dut, ztp_params.inband_port, 'up')\n", (21688, 21723), True, 'import apis.system.basic as basic_obj\n'), ((21960, 22021), 'apis.system.basic.service_operations_by_systemctl', 'basic_obj.service_operations_by_systemctl', (['dut', '"""ztp"""', '"""stop"""'], {}), "(dut, 'ztp', 'stop')\n", (22001, 22021), True, 'import apis.system.basic as basic_obj\n'), ((22033, 22076), 'apis.system.basic.verify_service_status', 'basic_obj.verify_service_status', (['dut', '"""ztp"""'], {}), "(dut, 'ztp')\n", (22064, 22076), True, 'import apis.system.basic as basic_obj\n'), ((22191, 22253), 'apis.system.basic.service_operations_by_systemctl', 'basic_obj.service_operations_by_systemctl', (['dut', '"""ztp"""', '"""start"""'], {}), "(dut, 'ztp', 'start')\n", (22232, 22253), True, 'import apis.system.basic as basic_obj\n'), ((22357, 22399), 'spytest.st.report_fail', 'st.report_fail', (['"""ztp_max_polling_interval"""'], {}), "('ztp_max_polling_interval')\n", (22371, 22399), False, 'from spytest import st\n'), ((25055, 25089), 'spytest.st.report_pass', 'st.report_pass', (['"""test_case_passed"""'], {}), "('test_case_passed')\n", (25069, 25089), False, 'from spytest import st\n'), ((25675, 25777), 'apis.system.basic.write_update_file', 'basic_obj.write_update_file', (['ssh_conn_obj', 'option_239', 'option_239_config', "data['dhcp_config_file']"], {}), "(ssh_conn_obj, option_239, option_239_config,\n data['dhcp_config_file'])\n", (25702, 25777), True, 'import apis.system.basic as basic_obj\n'), ((25910, 25945), 'spytest.st.report_fail', 'st.report_fail', (['"""content_not_found"""'], {}), "('content_not_found')\n", (25924, 25945), False, 'from spytest import st\n'), ((26508, 26610), 'apis.system.basic.write_update_file', 'basic_obj.write_update_file', (['ssh_conn_obj', 'option_225', 'option_225_config', "data['dhcp_config_file']"], {}), "(ssh_conn_obj, option_225, option_225_config,\n data['dhcp_config_file'])\n", (26535, 26610), True, 'import apis.system.basic as basic_obj\n'), ((26743, 26778), 'spytest.st.report_fail', 'st.report_fail', (['"""content_not_found"""'], {}), "('content_not_found')\n", (26757, 26778), False, 'from spytest import st\n'), ((27613, 27710), 'apis.system.basic.service_operations', 'basic_obj.service_operations', (['ssh_conn_obj', 'data.dhcp_service_name', 'data.action', 'data.device'], {}), '(ssh_conn_obj, data.dhcp_service_name, data.\n action, data.device)\n', (27641, 27710), True, 'import apis.system.basic as basic_obj\n'), ((31049, 31069), 'spytest.st.log', 'st.log', (['log_string_1'], {}), '(log_string_1)\n', (31055, 31069), False, 'from spytest import st\n'), ((32670, 32706), 'spytest.st.show', 'st.show', (['dut', 'command'], {'type': 'cli_type'}), '(dut, command, type=cli_type)\n', (32677, 32706), False, 'from spytest import st\n'), ((34414, 34463), 'spytest.st.error', 'st.error', (['"""DHCLIENT process not found on DUT ..."""'], {}), "('DHCLIENT process not found on DUT ...')\n", (34422, 34463), False, 'from spytest import st\n'), ((34949, 34993), 'apis.system.basic.make_dir', 'basic_obj.make_dir', (['conn_obj', 'path', '"""server"""'], {}), "(conn_obj, path, 'server')\n", (34967, 34993), True, 'import apis.system.basic as basic_obj\n'), ((35002, 35061), 'apis.system.basic.change_permissions', 'basic_obj.change_permissions', (['conn_obj', 'path', '(777)', '"""server"""'], {}), "(conn_obj, path, 777, 'server')\n", (35030, 35061), True, 'import apis.system.basic as basic_obj\n'), ((36119, 36175), 'apis.system.basic.write_to_json_file', 'basic_obj.write_to_json_file', (['config_params.json_content'], {}), '(config_params.json_content)\n', (36147, 36175), True, 'import apis.system.basic as basic_obj\n'), ((36184, 36201), 'spytest.st.log', 'st.log', (['file_path'], {}), '(file_path)\n', (36190, 36201), False, 'from spytest import st\n'), ((37066, 37151), 'spytest.st.report_fail', 'st.report_fail', (['"""service_running_not_expected"""', 'config_params.dhcp6_service_name'], {}), "('service_running_not_expected', config_params.dhcp6_service_name\n )\n", (37080, 37151), False, 'from spytest import st\n'), ((37767, 37838), 'spytest.st.log', 'st.log', (['"""ZTP operation is not mentioned hence rebooting the device ..."""'], {}), "('ZTP operation is not mentioned hence rebooting the device ...')\n", (37773, 37838), False, 'from spytest import st\n'), ((37847, 37917), 'apis.system.basic.remove_file', 'basic_obj.remove_file', (['config_params.dut', 'config_params.config_db_path'], {}), '(config_params.dut, config_params.config_db_path)\n', (37868, 37917), True, 'import apis.system.basic as basic_obj\n'), ((37926, 37988), 'spytest.st.reboot', 'st.reboot', (['config_params.dut', 'reboot_type'], {'skip_port_wait': '(True)'}), '(config_params.dut, reboot_type, skip_port_wait=True)\n', (37935, 37988), False, 'from spytest import st\n'), ((38577, 38617), 'spytest.st.log', 'st.log', (['"""ZTP status verification failed"""'], {}), "('ZTP status verification failed')\n", (38583, 38617), False, 'from spytest import st\n'), ((38626, 38674), 'spytest.st.report_fail', 'st.report_fail', (['"""ztp_status_verification_failed"""'], {}), "('ztp_status_verification_failed')\n", (38640, 38674), False, 'from spytest import st\n'), ((38755, 38798), 'apis.system.reboot.config_reload', 'reboot_obj.config_reload', (['config_params.dut'], {}), '(config_params.dut)\n', (38779, 38798), True, 'import apis.system.reboot as reboot_obj\n'), ((38807, 38817), 'spytest.st.wait', 'st.wait', (['(5)'], {}), '(5)\n', (38814, 38817), False, 'from spytest import st\n'), ((40351, 40385), 'spytest.st.report_pass', 'st.report_pass', (['"""test_case_passed"""'], {}), "('test_case_passed')\n", (40365, 40385), False, 'from spytest import st\n'), ((40874, 40970), 'apis.system.basic.delete_line_using_line_number', 'basic_obj.delete_line_using_line_number', (['connection_obj', 'line_number', 'data.dhcp_config_file'], {}), '(connection_obj, line_number, data.\n dhcp_config_file)\n', (40913, 40970), True, 'import apis.system.basic as basic_obj\n'), ((41546, 41581), 'spytest.st.report_fail', 'st.report_fail', (['"""content_not_found"""'], {}), "('content_not_found')\n", (41560, 41581), False, 'from spytest import st\n'), ((42200, 42296), 'apis.system.basic.delete_line_using_line_number', 'basic_obj.delete_line_using_line_number', (['connection_obj', 'line_number', 'data.dhcp_config_file'], {}), '(connection_obj, line_number, data.\n dhcp_config_file)\n', (42239, 42296), True, 'import apis.system.basic as basic_obj\n'), ((42802, 42837), 'spytest.st.report_fail', 'st.report_fail', (['"""content_not_found"""'], {}), "('content_not_found')\n", (42816, 42837), False, 'from spytest import st\n'), ((43614, 43702), 'apis.system.basic.get_file_number_with_regex', 'basic_obj.get_file_number_with_regex', (['connection_obj', 'option', 'data.dhcp_config_file'], {}), '(connection_obj, option, data.\n dhcp_config_file)\n', (43650, 43702), True, 'import apis.system.basic as basic_obj\n'), ((44711, 44758), 'spytest.st.log', 'st.log', (['"""Required DHCPD service not found ..."""'], {}), "('Required DHCPD service not found ...')\n", (44717, 44758), False, 'from spytest import st\n'), ((45119, 45172), 'spytest.st.download_file_from_dut', 'st.download_file_from_dut', (['dut', 'syslog_path', 'dst_file'], {}), '(dut, syslog_path, dst_file)\n', (45144, 45172), False, 'from spytest import st\n'), ((5648, 5705), 'spytest.st.log', 'st.log', (['"""Found that ZTP is disabled hence enabling it .."""'], {}), "('Found that ZTP is disabled hence enabling it ..')\n", (5654, 5705), False, 'from spytest import st\n'), ((12201, 12212), 'spytest.st.error', 'st.error', (['e'], {}), '(e)\n', (12209, 12212), False, 'from spytest import st\n'), ((12705, 12731), 'spytest.st.log', 'st.log', (['"""Enabling ZTP ..."""'], {}), "('Enabling ZTP ...')\n", (12711, 12731), False, 'from spytest import st\n'), ((12830, 12889), 'spytest.st.log', 'st.log', (['"""ZTP admin mode not found after max iterations ..."""'], {}), "('ZTP admin mode not found after max iterations ...')\n", (12836, 12889), False, 'from spytest import st\n'), ((18834, 18881), 'apis.system.basic.write_to_json_file', 'basic_obj.write_to_json_file', (['data.json_content'], {}), '(data.json_content)\n', (18862, 18881), True, 'import apis.system.basic as basic_obj\n'), ((20687, 20748), 'spytest.st.report_fail', 'st.report_fail', (['"""service_not_running"""', 'data.dhcp_service_name'], {}), "('service_not_running', data.dhcp_service_name)\n", (20701, 20748), False, 'from spytest import st\n'), ((21340, 21377), 'apis.system.basic.poll_for_system_status', 'basic_obj.poll_for_system_status', (['dut'], {}), '(dut)\n', (21372, 21377), True, 'import apis.system.basic as basic_obj\n'), ((21391, 21422), 'spytest.st.log', 'st.log', (['"""Sytem is not ready .."""'], {}), "('Sytem is not ready ..')\n", (21397, 21422), False, 'from spytest import st\n'), ((21435, 21473), 'spytest.st.report_env_fail', 'st.report_env_fail', (['"""system_not_ready"""'], {}), "('system_not_ready')\n", (21453, 21473), False, 'from spytest import st\n'), ((21489, 21553), 'apis.system.basic.check_interface_status', 'basic_obj.check_interface_status', (['dut', 'ztp_params.oob_port', '"""up"""'], {}), "(dut, ztp_params.oob_port, 'up')\n", (21521, 21553), True, 'import apis.system.basic as basic_obj\n'), ((21566, 21628), 'apis.system.basic.ifconfig_operation', 'basic_obj.ifconfig_operation', (['dut', 'ztp_params.oob_port', '"""down"""'], {}), "(dut, ztp_params.oob_port, 'down')\n", (21594, 21628), True, 'import apis.system.basic as basic_obj\n'), ((22090, 22125), 'spytest.st.log', 'st.log', (['"""ZTP status is not stopped"""'], {}), "('ZTP status is not stopped')\n", (22096, 22125), False, 'from spytest import st\n'), ((22138, 22182), 'spytest.st.report_fail', 'st.report_fail', (['"""service_not_stopped"""', '"""ztp"""'], {}), "('service_not_stopped', 'ztp')\n", (22152, 22182), False, 'from spytest import st\n'), ((22677, 22717), 'spytest.st.log', 'st.log', (['"""ZTP status verification failed"""'], {}), "('ZTP status verification failed')\n", (22683, 22717), False, 'from spytest import st\n'), ((22730, 22778), 'spytest.st.report_fail', 'st.report_fail', (['"""ztp_status_verification_failed"""'], {}), "('ztp_status_verification_failed')\n", (22744, 22778), False, 'from spytest import st\n'), ((23549, 23589), 'spytest.st.log', 'st.log', (['"""ZTP status verification failed"""'], {}), "('ZTP status verification failed')\n", (23555, 23589), False, 'from spytest import st\n'), ((23602, 23650), 'spytest.st.report_fail', 'st.report_fail', (['"""ztp_status_verification_failed"""'], {}), "('ztp_status_verification_failed')\n", (23616, 23650), False, 'from spytest import st\n'), ((24483, 24557), 'apis.system.basic.poll_for_error_logs', 'basic_obj.poll_for_error_logs', (['dut', 'data.ztp_log_path', 'data.ztp_log_string'], {}), '(dut, data.ztp_log_path, data.ztp_log_string)\n', (24512, 24557), True, 'import apis.system.basic as basic_obj\n'), ((27868, 27929), 'spytest.st.report_fail', 'st.report_fail', (['"""service_not_running"""', 'data.dhcp_service_name'], {}), "('service_not_running', data.dhcp_service_name)\n", (27882, 27929), False, 'from spytest import st\n'), ((28173, 28220), 'apis.system.basic.remove_file', 'basic_obj.remove_file', (['dut', 'data.config_db_path'], {}), '(dut, data.config_db_path)\n', (28194, 28220), True, 'import apis.system.basic as basic_obj\n'), ((28233, 28281), 'spytest.st.reboot', 'st.reboot', (['dut', 'reboot_type'], {'skip_port_wait': '(True)'}), '(dut, reboot_type, skip_port_wait=True)\n', (28242, 28281), False, 'from spytest import st\n'), ((28294, 28325), 'spytest.st.wait_system_status', 'st.wait_system_status', (['dut', '(400)'], {}), '(dut, 400)\n', (28315, 28325), False, 'from spytest import st\n'), ((28622, 28662), 'spytest.st.log', 'st.log', (['"""ZTP status verification failed"""'], {}), "('ZTP status verification failed')\n", (28628, 28662), False, 'from spytest import st\n'), ((28675, 28723), 'spytest.st.report_fail', 'st.report_fail', (['"""ztp_status_verification_failed"""'], {}), "('ztp_status_verification_failed')\n", (28689, 28723), False, 'from spytest import st\n'), ((31085, 31170), 'apis.system.basic.poll_for_error_logs', 'basic_obj.poll_for_error_logs', (['dut', 'data.ztp_log_path', 'log_string_1'], {'match': 'match'}), '(dut, data.ztp_log_path, log_string_1, match=match\n )\n', (31114, 31170), True, 'import apis.system.basic as basic_obj\n'), ((33500, 33553), 'utilities.utils.date_time_delta', 'utils_obj.date_time_delta', (['data[i]', 'data[i + 1]', '(True)'], {}), '(data[i], data[i + 1], True)\n', (33525, 33553), True, 'import utilities.utils as utils_obj\n'), ((33566, 33580), 'spytest.st.log', 'st.log', (['result'], {}), '(result)\n', (33572, 33580), False, 'from spytest import st\n'), ((34567, 34607), 'spytest.st.log', 'st.log', (['"""Required dhclient is found ..."""'], {}), "('Required dhclient is found ...')\n", (34573, 34607), False, 'from spytest import st\n'), ((36357, 36381), 'spytest.st.log', 'st.log', (['destination_path'], {}), '(destination_path)\n', (36363, 36381), False, 'from spytest import st\n'), ((36394, 36500), 'apis.system.basic.copy_file_from_client_to_server', 'basic_obj.copy_file_from_client_to_server', (['ssh_conn_obj'], {'src_path': 'file_path', 'dst_path': 'destination_path'}), '(ssh_conn_obj, src_path=file_path,\n dst_path=destination_path)\n', (36435, 36500), True, 'import apis.system.basic as basic_obj\n'), ((37477, 37547), 'apis.system.basic.remove_file', 'basic_obj.remove_file', (['config_params.dut', 'config_params.config_db_path'], {}), '(config_params.dut, config_params.config_db_path)\n', (37498, 37547), True, 'import apis.system.basic as basic_obj\n'), ((37560, 37622), 'spytest.st.reboot', 'st.reboot', (['config_params.dut', 'reboot_type'], {'skip_port_wait': '(True)'}), '(config_params.dut, reboot_type, skip_port_wait=True)\n', (37569, 37622), False, 'from spytest import st\n'), ((38833, 38903), 'apis.routing.ip.ping', 'ip_obj.ping', (['config_params.dut', 'config_params.static_ip'], {'family': '"""ipv6"""'}), "(config_params.dut, config_params.static_ip, family='ipv6')\n", (38844, 38903), True, 'import apis.routing.ip as ip_obj\n'), ((38917, 39003), 'spytest.st.log', 'st.log', (['"""Pinging to DHCP server failed from DUT, issue either with DUT or server"""'], {}), "(\n 'Pinging to DHCP server failed from DUT, issue either with DUT or server')\n", (38923, 39003), False, 'from spytest import st\n'), ((39388, 39428), 'spytest.st.log', 'st.log', (['"""ZTP status verification failed"""'], {}), "('ZTP status verification failed')\n", (39394, 39428), False, 'from spytest import st\n'), ((39441, 39489), 'spytest.st.report_fail', 'st.report_fail', (['"""ztp_status_verification_failed"""'], {}), "('ztp_status_verification_failed')\n", (39455, 39489), False, 'from spytest import st\n'), ((39643, 39753), 'apis.system.basic.poll_for_error_logs', 'basic_obj.poll_for_error_logs', (['config_params.dut', 'config_params.ztp_log_path', 'config_params.ztp_log_string'], {}), '(config_params.dut, config_params.ztp_log_path,\n config_params.ztp_log_string)\n', (39672, 39753), True, 'import apis.system.basic as basic_obj\n'), ((43797, 43893), 'apis.system.basic.delete_line_using_line_number', 'basic_obj.delete_line_using_line_number', (['connection_obj', 'line_number', 'data.dhcp_config_file'], {}), '(connection_obj, line_number, data.\n dhcp_config_file)\n', (43836, 43893), True, 'import apis.system.basic as basic_obj\n'), ((7561, 7575), 'spytest.st.wait', 'st.wait', (['retry'], {}), '(retry)\n', (7568, 7575), False, 'from spytest import st\n'), ((7766, 7861), 'spytest.st.log', 'st.log', (['"""Values of service or status or adminmode is not populated yet, retrying ..."""'], {}), "(\n 'Values of service or status or adminmode is not populated yet, retrying ...'\n )\n", (7772, 7861), False, 'from spytest import st\n'), ((7868, 7879), 'spytest.st.wait', 'st.wait', (['(10)'], {}), '(10)\n', (7875, 7879), False, 'from spytest import st\n'), ((11703, 11760), 'spytest.st.log', 'st.log', (['"""Found that ZTP is disabled hence enabling it .."""'], {}), "('Found that ZTP is disabled hence enabling it ..')\n", (11709, 11760), False, 'from spytest import st\n'), ((21818, 21895), 'apis.system.interface.interface_noshutdown', 'intf_obj.interface_noshutdown', (['dut', 'ztp_params.inband_port'], {'cli_type': 'cli_type'}), '(dut, ztp_params.inband_port, cli_type=cli_type)\n', (21847, 21895), True, 'import apis.system.interface as intf_obj\n'), ((23010, 23036), 'spytest.st.wait_system_reboot', 'st.wait_system_reboot', (['dut'], {}), '(dut)\n', (23031, 23036), False, 'from spytest import st\n'), ((23053, 23084), 'spytest.st.wait_system_status', 'st.wait_system_status', (['dut', '(300)'], {}), '(dut, 300)\n', (23074, 23084), False, 'from spytest import st\n'), ((23804, 23865), 'spytest.st.wait', 'st.wait', (['(300)', '"""Waiting for device to reboot after success..."""'], {}), "(300, 'Waiting for device to reboot after success...')\n", (23811, 23865), False, 'from spytest import st\n'), ((23882, 23913), 'spytest.st.wait_system_status', 'st.wait_system_status', (['dut', '(300)'], {}), '(dut, 300)\n', (23903, 23913), False, 'from spytest import st\n'), ((24204, 24244), 'spytest.st.log', 'st.log', (['"""ZTP status verification failed"""'], {}), "('ZTP status verification failed')\n", (24210, 24244), False, 'from spytest import st\n'), ((24261, 24309), 'spytest.st.report_fail', 'st.report_fail', (['"""ztp_status_verification_failed"""'], {}), "('ztp_status_verification_failed')\n", (24275, 24309), False, 'from spytest import st\n'), ((24332, 24369), 'apis.system.boot_up.sonic_installer_list', 'boot_up_obj.sonic_installer_list', (['dut'], {}), '(dut)\n', (24364, 24369), True, 'import apis.system.boot_up as boot_up_obj\n'), ((24693, 24769), 'apis.system.basic.poll_for_error_logs', 'basic_obj.poll_for_error_logs', (['dut', 'data.ztp_log_path_1', 'data.ztp_log_string'], {}), '(dut, data.ztp_log_path_1, data.ztp_log_string)\n', (24722, 24769), True, 'import apis.system.basic as basic_obj\n'), ((24908, 25000), 'spytest.st.report_fail', 'st.report_fail', (['"""ztp_log_verification_failed"""', 'data.ztp_log_path_1', 'data.ztp_log_string'], {}), "('ztp_log_verification_failed', data.ztp_log_path_1, data.\n ztp_log_string)\n", (24922, 25000), False, 'from spytest import st\n'), ((28848, 28922), 'apis.system.basic.poll_for_error_logs', 'basic_obj.poll_for_error_logs', (['dut', 'data.ztp_log_path', 'data.ztp_log_string'], {}), '(dut, data.ztp_log_path, data.ztp_log_string)\n', (28877, 28922), True, 'import apis.system.basic as basic_obj\n'), ((33644, 33706), 'spytest.st.log', 'st.log', (['"""Observed timestamp difference is not as expected ..."""'], {}), "('Observed timestamp difference is not as expected ...')\n", (33650, 33706), False, 'from spytest import st\n'), ((39903, 40016), 'apis.system.basic.poll_for_error_logs', 'basic_obj.poll_for_error_logs', (['config_params.dut', 'config_params.ztp_log_path_1', 'config_params.ztp_log_string'], {}), '(config_params.dut, config_params.\n ztp_log_path_1, config_params.ztp_log_string)\n', (39932, 40016), True, 'import apis.system.basic as basic_obj\n'), ((40168, 40277), 'spytest.st.report_fail', 'st.report_fail', (['"""ztp_log_verification_failed"""', 'config_params.ztp_log_path_1', 'config_params.ztp_log_string'], {}), "('ztp_log_verification_failed', config_params.ztp_log_path_1,\n config_params.ztp_log_string)\n", (40182, 40277), False, 'from spytest import st\n'), ((7461, 7511), 'spytest.st.error', 'st.error', (['"""show ztp status returned empty data..."""'], {}), "('show ztp status returned empty data...')\n", (7469, 7511), False, 'from spytest import st\n'), ((8078, 8173), 'spytest.st.log', 'st.log', (['"""Values of service or status or adminmode is not populated yet, retrying ..."""'], {}), "(\n 'Values of service or status or adminmode is not populated yet, retrying ...'\n )\n", (8084, 8173), False, 'from spytest import st\n'), ((8184, 8198), 'spytest.st.wait', 'st.wait', (['retry'], {}), '(retry)\n', (8191, 8198), False, 'from spytest import st\n'), ((29066, 29142), 'apis.system.basic.poll_for_error_logs', 'basic_obj.poll_for_error_logs', (['dut', 'data.ztp_log_path_1', 'data.ztp_log_string'], {}), '(dut, data.ztp_log_path_1, data.ztp_log_string)\n', (29095, 29142), True, 'import apis.system.basic as basic_obj\n'), ((29371, 29463), 'spytest.st.report_fail', 'st.report_fail', (['"""ztp_log_verification_failed"""', 'data.ztp_log_path_1', 'data.ztp_log_string'], {}), "('ztp_log_verification_failed', data.ztp_log_path_1, data.\n ztp_log_string)\n", (29385, 29463), False, 'from spytest import st\n'), ((31342, 31429), 'apis.system.basic.poll_for_error_logs', 'basic_obj.poll_for_error_logs', (['dut', 'data.ztp_log_path_1', 'log_string_1'], {'match': 'match'}), '(dut, data.ztp_log_path_1, log_string_1, match\n =match)\n', (31371, 31429), True, 'import apis.system.basic as basic_obj\n'), ((31646, 31731), 'spytest.st.report_fail', 'st.report_fail', (['"""ztp_log_verification_failed"""', 'data.ztp_log_path_1', 'log_string_1'], {}), "('ztp_log_verification_failed', data.ztp_log_path_1, log_string_1\n )\n", (31660, 31731), False, 'from spytest import st\n'), ((4919, 4929), 'spytest.st.wait', 'st.wait', (['(3)'], {}), '(3)\n', (4926, 4929), False, 'from spytest import st\n'), ((5399, 5409), 'spytest.st.wait', 'st.wait', (['(3)'], {}), '(3)\n', (5406, 5409), False, 'from spytest import st\n'), ((8944, 9004), 'spytest.st.log', 'st.log', (['"""ZTP status is not in expected values , retrying..."""'], {}), "('ZTP status is not in expected values , retrying...')\n", (8950, 9004), False, 'from spytest import st\n'), ((9033, 9047), 'spytest.st.wait', 'st.wait', (['retry'], {}), '(retry)\n', (9040, 9047), False, 'from spytest import st\n'), ((10548, 10562), 'spytest.st.wait', 'st.wait', (['retry'], {}), '(retry)\n', (10555, 10562), False, 'from spytest import st\n'), ((10997, 11011), 'spytest.st.wait', 'st.wait', (['retry'], {}), '(retry)\n', (11004, 11011), False, 'from spytest import st\n'), ((11251, 11265), 'spytest.st.wait', 'st.wait', (['retry'], {}), '(retry)\n', (11258, 11265), False, 'from spytest import st\n'), ((11384, 11443), 'spytest.st.log', 'st.log', (['"""ZTP status is not in expected values, retrying..."""'], {}), "('ZTP status is not in expected values, retrying...')\n", (11390, 11443), False, 'from spytest import st\n'), ((11472, 11486), 'spytest.st.wait', 'st.wait', (['retry'], {}), '(retry)\n', (11479, 11486), False, 'from spytest import st\n'), ((10507, 10519), 'spytest.st.wait', 'st.wait', (['(300)'], {}), '(300)\n', (10514, 10519), False, 'from spytest import st\n')] |
from kivy.app import runTouchApp
from kivy.properties import StringProperty
from kivy.uix.button import Button
from kivy.uix.boxlayout import BoxLayout
from kivy.uix.gridlayout import GridLayout
from kivy.lang import Builder
from kivyx.uix.drawer import KXDrawer
class Numpad(GridLayout):
def on_kv_post(self, *args, **kwargs):
super().on_kv_post(*args, **kwargs)
for text in '7 8 9 * 4 5 6 / 1 2 3 del 0 + - ent'.split():
self.add_widget(Button(
text=text,
size_hint=(None, None, ),
size=(50, 50, ),
font_size=24,
))
class MenuItem(BoxLayout):
anchor = StringProperty()
@property
def drawer(self):
return self.parent.parent.ids.drawer
root = Builder.load_string(r'''
<Numpad>:
cols: 4
rows: 4
spacing: 10
padding: 10
size_hint: None, None
size: self.minimum_size
<Separator@Widget>:
size: 1, 1
canvas:
Color:
rgb: 1, 0, 1
Rectangle:
pos: self.pos
size: self.size
<MenuItem>:
CheckBox:
group: 'menuitem'
on_active: root.drawer.anchor = root.anchor
Label:
text: root.anchor
<StencilFloatLayout@StencilView+FloatLayout>:
BoxLayout:
StencilFloatLayout:
# RelativeLayout:
FloatLayout:
size_hint: .9, .9
pos_hint: {'center_x': .5, 'center_y': .5, }
canvas.after:
Color:
rgb: 1, 1, 1,
Line:
dash_offset: 4
dash_length: 2
rectangle: [*self.pos, *self.size, ]
KXDrawer:
id: drawer
anchor: 'tr'
auto_bring_to_front: True
size_hint: None, None
size: numpad.size
disabled: disabled.active
Numpad:
id: numpad
KXDrawer:
anchor: 'rt'
auto_bring_to_front: True
size_hint: None, None
size: 100, 100
Button:
KXDrawer:
anchor: 'bm'
size_hint: None, None
size: 2, 10
Separator:
size_hint_x: None
BoxLayout:
id: menu
size_hint_x: .1
size_hint_min_x: 100
orientation: 'vertical'
spacing: dp(4)
Label:
text: 'disabled'
color: 0, 1, 0, 1
Switch:
id: disabled
active: False
Separator:
size_hint_y: None
Label:
text: 'methods'
color: 0, 1, 0, 1
Button:
text: 'open()'
on_press: drawer.open()
Button:
text: 'close()'
on_press: drawer.close()
Separator:
size_hint_y: None
Label:
text: 'anchor'
color: 0, 1, 0, 1
''')
menu = root.ids.menu
for anchor in KXDrawer.anchor.options:
menu.add_widget(MenuItem(anchor=anchor))
runTouchApp(root)
| [
"kivy.uix.button.Button",
"kivy.lang.Builder.load_string",
"kivy.properties.StringProperty",
"kivy.app.runTouchApp"
] | [((779, 3023), 'kivy.lang.Builder.load_string', 'Builder.load_string', (['"""\n<Numpad>:\n cols: 4\n rows: 4\n spacing: 10\n padding: 10\n size_hint: None, None\n size: self.minimum_size\n\n<Separator@Widget>:\n size: 1, 1\n canvas:\n Color:\n rgb: 1, 0, 1\n Rectangle:\n pos: self.pos\n size: self.size\n\n<MenuItem>:\n CheckBox:\n group: \'menuitem\'\n on_active: root.drawer.anchor = root.anchor\n Label:\n text: root.anchor\n\n<StencilFloatLayout@StencilView+FloatLayout>:\n\nBoxLayout:\n StencilFloatLayout:\n # RelativeLayout:\n FloatLayout:\n size_hint: .9, .9\n pos_hint: {\'center_x\': .5, \'center_y\': .5, }\n canvas.after:\n Color:\n rgb: 1, 1, 1,\n Line:\n dash_offset: 4\n dash_length: 2\n rectangle: [*self.pos, *self.size, ]\n KXDrawer:\n id: drawer\n anchor: \'tr\'\n auto_bring_to_front: True\n size_hint: None, None\n size: numpad.size\n disabled: disabled.active\n Numpad:\n id: numpad\n KXDrawer:\n anchor: \'rt\'\n auto_bring_to_front: True\n size_hint: None, None\n size: 100, 100\n Button:\n KXDrawer:\n anchor: \'bm\'\n size_hint: None, None\n size: 2, 10\n \n Separator:\n size_hint_x: None\n BoxLayout:\n id: menu\n size_hint_x: .1\n size_hint_min_x: 100\n orientation: \'vertical\'\n spacing: dp(4)\n Label:\n text: \'disabled\'\n color: 0, 1, 0, 1\n Switch:\n id: disabled\n active: False\n Separator:\n size_hint_y: None\n Label:\n text: \'methods\'\n color: 0, 1, 0, 1\n Button:\n text: \'open()\'\n on_press: drawer.open()\n Button:\n text: \'close()\'\n on_press: drawer.close()\n Separator:\n size_hint_y: None\n Label:\n text: \'anchor\'\n color: 0, 1, 0, 1\n"""'], {}), '(\n """\n<Numpad>:\n cols: 4\n rows: 4\n spacing: 10\n padding: 10\n size_hint: None, None\n size: self.minimum_size\n\n<Separator@Widget>:\n size: 1, 1\n canvas:\n Color:\n rgb: 1, 0, 1\n Rectangle:\n pos: self.pos\n size: self.size\n\n<MenuItem>:\n CheckBox:\n group: \'menuitem\'\n on_active: root.drawer.anchor = root.anchor\n Label:\n text: root.anchor\n\n<StencilFloatLayout@StencilView+FloatLayout>:\n\nBoxLayout:\n StencilFloatLayout:\n # RelativeLayout:\n FloatLayout:\n size_hint: .9, .9\n pos_hint: {\'center_x\': .5, \'center_y\': .5, }\n canvas.after:\n Color:\n rgb: 1, 1, 1,\n Line:\n dash_offset: 4\n dash_length: 2\n rectangle: [*self.pos, *self.size, ]\n KXDrawer:\n id: drawer\n anchor: \'tr\'\n auto_bring_to_front: True\n size_hint: None, None\n size: numpad.size\n disabled: disabled.active\n Numpad:\n id: numpad\n KXDrawer:\n anchor: \'rt\'\n auto_bring_to_front: True\n size_hint: None, None\n size: 100, 100\n Button:\n KXDrawer:\n anchor: \'bm\'\n size_hint: None, None\n size: 2, 10\n \n Separator:\n size_hint_x: None\n BoxLayout:\n id: menu\n size_hint_x: .1\n size_hint_min_x: 100\n orientation: \'vertical\'\n spacing: dp(4)\n Label:\n text: \'disabled\'\n color: 0, 1, 0, 1\n Switch:\n id: disabled\n active: False\n Separator:\n size_hint_y: None\n Label:\n text: \'methods\'\n color: 0, 1, 0, 1\n Button:\n text: \'open()\'\n on_press: drawer.open()\n Button:\n text: \'close()\'\n on_press: drawer.close()\n Separator:\n size_hint_y: None\n Label:\n text: \'anchor\'\n color: 0, 1, 0, 1\n"""\n )\n', (798, 3023), False, 'from kivy.lang import Builder\n'), ((3120, 3137), 'kivy.app.runTouchApp', 'runTouchApp', (['root'], {}), '(root)\n', (3131, 3137), False, 'from kivy.app import runTouchApp\n'), ((671, 687), 'kivy.properties.StringProperty', 'StringProperty', ([], {}), '()\n', (685, 687), False, 'from kivy.properties import StringProperty\n'), ((474, 544), 'kivy.uix.button.Button', 'Button', ([], {'text': 'text', 'size_hint': '(None, None)', 'size': '(50, 50)', 'font_size': '(24)'}), '(text=text, size_hint=(None, None), size=(50, 50), font_size=24)\n', (480, 544), False, 'from kivy.uix.button import Button\n')] |
# -*- coding: utf-8 -*-
"""
A Probabiltics Context Free Grammer (PCFG) Parser using Python.
This code implemented a weighted graph search
@author: <NAME>
"""
import codecs
from collections import defaultdict
import math
f_grammer=".\\test\\08-grammar.txt"
nonterm=[]
preterm=defaultdict(list)
grammer_file=codecs.open(f_grammer, 'r','utf-8')
index = 0
for rule in grammer_file:
words = rule.split('\t')
lhs = words[0]
rhs = words[1]
prob = float(words[2])
rhs_symbols=rhs.split(' ')
if len(rhs_symbols) == 1:
preterm[rhs].append([lhs, math.log(prob)])
else:
nonterm.insert(index,[lhs, rhs_symbols[0], rhs_symbols[1],math.log(prob)])
# add pre-terminals
f_text=".\\test\\08-input.txt"
text_file=codecs.open(f_text, 'r', 'utf-8')
# init best score with lowest level
best_score=defaultdict(lambda: float('-inf'))
best_edge={}
for line in text_file:
words = line.split(' ')
for i in range(len(words)):
word = words[i].strip()
for item in (preterm[word]):
lhs = item[0]
log_prob = item[1]
ibs = lhs + ' ' + str(i) + ' ' + str(i+1)
best_score[ibs] = (log_prob)
text_file.close()
#cyk, calculate the rest levels
text_file=codecs.open(f_text,'r','utf-8')
my_lp = float('-inf')
for j in range(2, len(words)+1):
for i in range(j-2, -1, -1):
for k in range(i+1, j):
# rules in grammer table
for nrul in range(len(nonterm)):
sym=nonterm[nrul][0]
lsym=nonterm[nrul][1]
rsym=nonterm[nrul][2]
logprob =nonterm[nrul][3]
ilsym = lsym +' ' + str(i) + ' ' + str(k)
irsym = rsym +' ' + str(k) + ' ' + str(j)
if best_score[ilsym] > float('-inf') and best_score[irsym] > float('-inf'):
my_lp = best_score[ilsym] + best_score[irsym] + logprob
isymi = sym + ' ' + str(i) + ' ' + str(j)
if(my_lp > best_score[isymi]):
best_score[isymi] = my_lp
best_edge[isymi] = [ilsym,irsym]
def Print(sym, best_edge, words):
if sym in best_edge:
symp = sym.split(' ')[0]
return "("+symp+" " \
+Print(best_edge[sym][0], best_edge, words) +" " + Print(best_edge[sym][1],best_edge, words) \
+ ")"
else:
i = sym.split(' ')[1]
symp = sym.split(' ')[0]
return "(" + sym + " " + words[int(i)]+")"
print(Print('S 0 7',best_edge,words))
def main():
pass
# Any code you like
if __name__ == '__main__':
main() | [
"codecs.open",
"collections.defaultdict",
"math.log"
] | [((290, 307), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (301, 307), False, 'from collections import defaultdict\n'), ((324, 360), 'codecs.open', 'codecs.open', (['f_grammer', '"""r"""', '"""utf-8"""'], {}), "(f_grammer, 'r', 'utf-8')\n", (335, 360), False, 'import codecs\n'), ((773, 806), 'codecs.open', 'codecs.open', (['f_text', '"""r"""', '"""utf-8"""'], {}), "(f_text, 'r', 'utf-8')\n", (784, 806), False, 'import codecs\n'), ((1298, 1331), 'codecs.open', 'codecs.open', (['f_text', '"""r"""', '"""utf-8"""'], {}), "(f_text, 'r', 'utf-8')\n", (1309, 1331), False, 'import codecs\n'), ((594, 608), 'math.log', 'math.log', (['prob'], {}), '(prob)\n', (602, 608), False, 'import math\n'), ((689, 703), 'math.log', 'math.log', (['prob'], {}), '(prob)\n', (697, 703), False, 'import math\n')] |
#!/usr/bin/env python
#
# test tool for PostgreSQL Commitfest website
#
# written by: <NAME> <<EMAIL>>
#
import re
import os
import sys
import logging
import tempfile
import atexit
import shutil
import time
import subprocess
from subprocess import Popen
import socket
import sqlite3
import datetime
from time import gmtime, localtime, strftime
# config functions
from config import Config
import copy
# start with 'info', can be overriden by '-q' later on
logging.basicConfig(level = logging.INFO,
format = '%(levelname)s: %(message)s')
# exit_handler()
#
# exit handler, called upon exit of the script
# main job: remove the temp directory
#
# parameters:
# none
# return:
# none
def exit_handler():
# do something in the end ...
pass
# register exit handler
atexit.register(exit_handler)
#######################################################################
# main code
# config todo:
# * test technology (Docker, LXC, ...)
config = Config()
config.parse_parameters()
config.load_config()
config.build_and_verify_config()
# by now the lockfile is acquired, there is no other instance running
# before starting new jobs, cleanup remaining old ones
# startup
config.cleanup_old_dirs_and_files()
# main mode
| [
"logging.basicConfig",
"config.Config",
"atexit.register"
] | [((459, 535), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.INFO', 'format': '"""%(levelname)s: %(message)s"""'}), "(level=logging.INFO, format='%(levelname)s: %(message)s')\n", (478, 535), False, 'import logging\n'), ((782, 811), 'atexit.register', 'atexit.register', (['exit_handler'], {}), '(exit_handler)\n', (797, 811), False, 'import atexit\n'), ((967, 975), 'config.Config', 'Config', ([], {}), '()\n', (973, 975), False, 'from config import Config\n')] |
import random
from fake_useragent import UserAgent
agent_list = '''Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_8; en-us) AppleWebKit/534.50 (KHTML, like Gecko) Version/5.1 Safari/534.50
Mozilla/5.0 (Windows; U; Windows NT 6.1; en-us) AppleWebKit/534.50 (KHTML, like Gecko) Version/5.1 Safari/534.50
Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Trident/5.0;)
Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.0; Trident/4.0)
Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.0)'''
def random_agent():
headers = agent_list.split('\n')
length = len(headers)
return headers[random.randint(0, length - 1)]
def get_random_agent():
ua = UserAgent(cache=False).random
#print(ua)
return ua
def main():
# agent = get_random_agent()
agent = random_agent()
print('agent=', agent)
if __name__ == "__main__":
main()
| [
"random.randint",
"fake_useragent.UserAgent"
] | [((591, 620), 'random.randint', 'random.randint', (['(0)', '(length - 1)'], {}), '(0, length - 1)\n', (605, 620), False, 'import random\n'), ((656, 678), 'fake_useragent.UserAgent', 'UserAgent', ([], {'cache': '(False)'}), '(cache=False)\n', (665, 678), False, 'from fake_useragent import UserAgent\n')] |
# -*- coding: utf-8 -*-
from __future__ import print_function, division, absolute_import
import matplotlib
matplotlib.use('Agg')
from matplotlib import pyplot as plt
import seaborn as sns
import numpy as np
import tensorflow as tf
tf.enable_eager_execution()
import tensorflow_probability as tfp
from tensorflow_probability import edward2 as ed
tfd = tfp.distributions
# ===========================================================================
# Constant
# ===========================================================================
a = 8
b = 0.5
mu = 0
n_samples = 100000
# ===========================================================================
# Following the generative procedure
# ===========================================================================
# Step 1: generate the precision Beta
beta_dist = tfd.Gamma(concentration=a, rate=b)
beta = beta_dist.sample(n_samples)
# the prior probability
p_beta_given_a_and_b = beta_dist.prob(beta)
# Step 2: generate the data point
# scale is standard deviation
x_dist = tfd.Normal(loc=mu, scale=tf.sqrt(1 / beta))
x = x_dist.sample()
# the likelihood
p_x_given_mu_and_beta = x_dist.prob(x)
# ====== plotting the prior ====== #
plt.figure()
sns.distplot(beta.numpy(), bins=120, kde=True)
plt.title(r"Prior distribution: $p(\beta|a=%g, b=%g)$" % (a, b))
# ====== plotting the likelihood ====== #
plt.figure()
sns.distplot(x.numpy(), bins=120, kde=True)
plt.title(r"Likelihood distribution: $p(X|\mu=%g, \sigma=\sqrt{\beta^{-1}})$" % mu)
# ====== plotting the posterior ====== #
# the posterior probability, this is only
# proportionally, not exactly because we omit
# the evidence p(X)
# If we want to calculate p(X), we need to marginalize out
# beta using sum rule:
# p(X) = p(X, beta_1) + p(X, beta_2) + ... + p(X, beta_∞)
# This is not easy
p_beta_given_x = p_x_given_mu_and_beta * p_beta_given_a_and_b
p_beta_given_x = p_beta_given_x / tf.reduce_sum(p_beta_given_x)
posterior_dist = tfd.Categorical(probs=p_beta_given_x)
beta = beta.numpy()
posterior = []
for i in range(n_samples // 2000):
idx = posterior_dist.sample(2000).numpy()
posterior.append(beta[idx])
posterior = np.concatenate(posterior)
plt.figure()
sns.distplot(posterior, bins=120, kde=True)
plt.title(r"Sampled posterior distribution: $p(\beta|X)$")
# ====== plotting the close form solution ====== #
a0 = a + n_samples / 2
b0 = b + n_samples / 2 * np.var(x.numpy())
posterior_dist = tfd.Gamma(concentration=a0, rate=b0)
posterior = posterior_dist.sample(n_samples)
plt.figure()
sns.distplot(posterior, bins=120, kde=True)
plt.title(
r"Closed form solution: $p(\beta|X) \sim Gamma(a=%g, b=%g)$"
% (a0, b0))
from odin import visual as V
V.plot_save('/tmp/tmp.pdf', dpi=200)
| [
"seaborn.distplot",
"matplotlib.use",
"odin.visual.plot_save",
"tensorflow.reduce_sum",
"tensorflow.enable_eager_execution",
"matplotlib.pyplot.figure",
"tensorflow.sqrt",
"numpy.concatenate",
"matplotlib.pyplot.title"
] | [((107, 128), 'matplotlib.use', 'matplotlib.use', (['"""Agg"""'], {}), "('Agg')\n", (121, 128), False, 'import matplotlib\n'), ((233, 260), 'tensorflow.enable_eager_execution', 'tf.enable_eager_execution', ([], {}), '()\n', (258, 260), True, 'import tensorflow as tf\n'), ((1194, 1206), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (1204, 1206), True, 'from matplotlib import pyplot as plt\n'), ((1254, 1318), 'matplotlib.pyplot.title', 'plt.title', (["('Prior distribution: $p(\\\\beta|a=%g, b=%g)$' % (a, b))"], {}), "('Prior distribution: $p(\\\\beta|a=%g, b=%g)$' % (a, b))\n", (1263, 1318), True, 'from matplotlib import pyplot as plt\n'), ((1362, 1374), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (1372, 1374), True, 'from matplotlib import pyplot as plt\n'), ((1419, 1515), 'matplotlib.pyplot.title', 'plt.title', (["('Likelihood distribution: $p(X|\\\\mu=%g, \\\\sigma=\\\\sqrt{\\\\beta^{-1}})$' % mu)"], {}), "(\n 'Likelihood distribution: $p(X|\\\\mu=%g, \\\\sigma=\\\\sqrt{\\\\beta^{-1}})$' % mu\n )\n", (1428, 1515), True, 'from matplotlib import pyplot as plt\n'), ((2152, 2177), 'numpy.concatenate', 'np.concatenate', (['posterior'], {}), '(posterior)\n', (2166, 2177), True, 'import numpy as np\n'), ((2179, 2191), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (2189, 2191), True, 'from matplotlib import pyplot as plt\n'), ((2192, 2235), 'seaborn.distplot', 'sns.distplot', (['posterior'], {'bins': '(120)', 'kde': '(True)'}), '(posterior, bins=120, kde=True)\n', (2204, 2235), True, 'import seaborn as sns\n'), ((2236, 2294), 'matplotlib.pyplot.title', 'plt.title', (['"""Sampled posterior distribution: $p(\\\\beta|X)$"""'], {}), "('Sampled posterior distribution: $p(\\\\beta|X)$')\n", (2245, 2294), True, 'from matplotlib import pyplot as plt\n'), ((2513, 2525), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (2523, 2525), True, 'from matplotlib import pyplot as plt\n'), ((2526, 2569), 'seaborn.distplot', 'sns.distplot', (['posterior'], {'bins': '(120)', 'kde': '(True)'}), '(posterior, bins=120, kde=True)\n', (2538, 2569), True, 'import seaborn as sns\n'), ((2570, 2658), 'matplotlib.pyplot.title', 'plt.title', (["('Closed form solution: $p(\\\\beta|X) \\\\sim Gamma(a=%g, b=%g)$' % (a0, b0))"], {}), "('Closed form solution: $p(\\\\beta|X) \\\\sim Gamma(a=%g, b=%g)$' % (\n a0, b0))\n", (2579, 2658), True, 'from matplotlib import pyplot as plt\n'), ((2693, 2729), 'odin.visual.plot_save', 'V.plot_save', (['"""/tmp/tmp.pdf"""'], {'dpi': '(200)'}), "('/tmp/tmp.pdf', dpi=200)\n", (2704, 2729), True, 'from odin import visual as V\n'), ((1910, 1939), 'tensorflow.reduce_sum', 'tf.reduce_sum', (['p_beta_given_x'], {}), '(p_beta_given_x)\n', (1923, 1939), True, 'import tensorflow as tf\n'), ((1061, 1078), 'tensorflow.sqrt', 'tf.sqrt', (['(1 / beta)'], {}), '(1 / beta)\n', (1068, 1078), True, 'import tensorflow as tf\n')] |
#
# This source file is part of the EdgeDB open source project.
#
# Copyright 2016-present MagicStack Inc. and the EdgeDB authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import asyncio
import contextlib
import getpass
import logging
import os
import os.path
import pathlib
import setproctitle
import signal
import socket
import sys
import tempfile
import uvloop
import click
from edb.common import devmode
from edb.common import exceptions
from . import buildmeta
from . import cluster as edgedb_cluster
from . import daemon
from . import defines
from . import logsetup
logger = logging.getLogger('edb.server')
_server_initialized = False
def abort(msg, *args):
logger.critical(msg, *args)
sys.exit(1)
def terminate_server(server, loop):
loop.stop()
def _ensure_runstate_dir(data_dir, runstate_dir):
if runstate_dir is None:
try:
runstate_dir = buildmeta.get_runstate_path(data_dir)
except buildmeta.MetadataError:
abort(
f'cannot determine the runstate directory location; '
f'please use --runstate-dir to specify the correct location')
runstate_dir = pathlib.Path(runstate_dir)
if not runstate_dir.exists():
if not runstate_dir.parent.exists():
abort(
f'cannot create the runstate directory: '
f'{str(runstate_dir.parent)!r} does not exist; please use '
f'--runstate-dir to specify the correct location')
try:
runstate_dir.mkdir()
except PermissionError as ex:
abort(
f'cannot create the runstate directory: '
f'{ex!s}; please use --runstate-dir to specify '
f'the correct location')
if not os.path.isdir(runstate_dir):
abort(f'{str(runstate_dir)!r} is not a directory; please use '
f'--runstate-dir to specify the correct location')
return runstate_dir
@contextlib.contextmanager
def _internal_state_dir(runstate_dir):
try:
with tempfile.TemporaryDirectory(prefix='internal-',
dir=runstate_dir) as td:
yield td
except PermissionError as ex:
abort(f'cannot write to the runstate directory: '
f'{ex!s}; please fix the permissions or use '
f'--runstate-dir to specify the correct location')
def _init_cluster(cluster, args) -> bool:
from edb.server import bootstrap
bootstrap_args = {
'default_database': (args['default_database'] or
args['default_database_user']),
'default_database_user': args['default_database_user'],
'testmode': args['testmode'],
'insecure': args['insecure'],
}
need_restart = asyncio.run(bootstrap.bootstrap(cluster, bootstrap_args))
global _server_initialized
_server_initialized = True
return need_restart
def _sd_notify(message):
notify_socket = os.environ.get('NOTIFY_SOCKET')
if not notify_socket:
return
if notify_socket[0] == '@':
notify_socket = '\0' + notify_socket[1:]
sd_sock = socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM)
sd_sock.connect(notify_socket)
try:
sd_sock.sendall(message.encode())
finally:
sd_sock.close()
def _init_parsers():
# Initialize all parsers, rebuilding grammars if
# necessary. Do it earlier than later so that we don't
# end up in a situation where all our compiler processes
# are building parsers in parallel.
from edb.edgeql import parser as ql_parser
ql_parser.preload()
def _run_server(cluster, args, runstate_dir, internal_runstate_dir):
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
try:
# Import here to make sure that most of imports happen
# under coverage (if we're testing with it). Otherwise
# coverage will fail to detect that "import edb..." lines
# actually were run.
from . import server
ss = server.Server(
loop=loop,
cluster=cluster,
runstate_dir=runstate_dir,
internal_runstate_dir=internal_runstate_dir,
max_backend_connections=args['max_backend_connections'],
nethost=args['bind_address'],
netport=args['port'],
)
loop.run_until_complete(ss.init())
try:
loop.run_until_complete(ss.start())
except Exception:
loop.run_until_complete(ss.stop())
raise
loop.add_signal_handler(signal.SIGTERM, terminate_server, ss, loop)
# Notify systemd that we've started up.
_sd_notify('READY=1')
try:
loop.run_forever()
finally:
loop.run_until_complete(ss.stop())
except KeyboardInterrupt:
logger.info('Shutting down.')
_sd_notify('STOPPING=1')
def run_server(args):
ver = buildmeta.get_version()
if devmode.is_in_dev_mode():
logger.info(f'EdgeDB server ({ver}) starting in DEV mode.')
else:
logger.info(f'EdgeDB server ({ver}) starting.')
_init_parsers()
pg_cluster_init_by_us = False
pg_cluster_started_by_us = False
try:
server_settings = {
'log_connections': 'yes',
'log_statement': 'all',
'log_disconnections': 'yes',
'log_min_messages': 'INFO',
'client_min_messages': 'INFO',
'listen_addresses': '', # we use Unix sockets
'unix_socket_permissions': '0700',
# We always enforce UTC timezone:
# * timestamptz is stored in UTC anyways;
# * this makes the DB server more predictable.
'TimeZone': 'UTC',
'default_transaction_isolation': 'repeatable read',
# TODO: EdgeDB must manage/monitor all client connections and
# have its own "max_connections". We'll set this setting even
# higher when we have that fully implemented.
'max_connections': '500',
}
cluster = edgedb_cluster.get_pg_cluster(args['data_dir'])
cluster_status = cluster.get_status()
if cluster_status == 'not-initialized':
logger.info(
'Initializing database cluster in %s', args['data_dir'])
initdb_output = cluster.init(
username='postgres', locale='C', encoding='UTF8')
for line in initdb_output.splitlines():
logger.debug('initdb: %s', line)
cluster.reset_hba()
cluster.add_hba_entry(
type='local',
database='all',
user='postgres',
auth_method='trust'
)
cluster.add_hba_entry(
type='local',
database='all',
user=defines.EDGEDB_SUPERUSER,
auth_method='trust'
)
pg_cluster_init_by_us = True
cluster_status = cluster.get_status()
data_dir = cluster.get_data_dir()
if args['runstate_dir']:
specified_runstate_dir = args['runstate_dir']
elif args['bootstrap']:
# When bootstrapping a new EdgeDB instance it is often necessary
# to avoid using the main runstate dir due to lack of permissions,
# possibility of conflict with another running instance, etc.
# The --bootstrap mode is also often runs unattended, i.e.
# as a post-install hook during package installation.
specified_runstate_dir = data_dir
else:
specified_runstate_dir = None
runstate_dir = _ensure_runstate_dir(data_dir, specified_runstate_dir)
with _internal_state_dir(runstate_dir) as internal_runstate_dir:
server_settings['unix_socket_directories'] = args['data_dir']
if cluster_status == 'stopped':
cluster.start(
port=edgedb_cluster.find_available_port(),
server_settings=server_settings)
pg_cluster_started_by_us = True
elif cluster_status != 'running':
abort('Could not start database cluster in %s',
args['data_dir'])
cluster.override_connection_spec(
user='postgres', database='template1')
need_cluster_restart = _init_cluster(cluster, args)
if need_cluster_restart and pg_cluster_started_by_us:
logger.info('Restarting server to reload configuration...')
cluster_port = cluster.get_connection_spec()['port']
cluster.stop()
cluster.start(
port=cluster_port,
server_settings=server_settings)
if not args['bootstrap']:
_run_server(cluster, args, runstate_dir, internal_runstate_dir)
except BaseException:
if pg_cluster_init_by_us and not _server_initialized:
logger.warning('server bootstrap did not complete successfully, '
'removing the data directory')
if cluster.get_status() == 'running':
cluster.stop()
cluster.destroy()
raise
if pg_cluster_started_by_us:
cluster.stop()
_server_options = [
click.option(
'-D', '--data-dir', type=str, envvar='EDGEDB_DATADIR',
help='database cluster directory'),
click.option(
'-l', '--log-level',
help=('Logging level. Possible values: (d)ebug, (i)nfo, (w)arn, '
'(e)rror, (s)ilent'),
default='i', envvar='EDGEDB_LOG_LEVEL'),
click.option(
'--log-to',
help=('send logs to DEST, where DEST can be a file name, "syslog", '
'or "stderr"'),
type=str, metavar='DEST', default='stderr'),
click.option(
'--bootstrap', is_flag=True,
help='bootstrap the database cluster and exit'),
click.option(
'--default-database', type=str, default=getpass.getuser(),
help='the name of the default database to create'),
click.option(
'--default-database-user', type=str, default=getpass.getuser(),
help='the name of the default database owner'),
click.option(
'--devmode/--no-devmode',
help='enable or disable the development mode',
default=None),
click.option(
'--testmode/--no-testmode',
help='enable or disable the test mode',
default=False),
click.option(
'-I', '--bind-address', type=str, default=None,
help='IP address to listen on', envvar='EDGEDB_BIND_ADDRESS'),
click.option(
'-p', '--port', type=int, default=None,
help='port to listen on'),
click.option(
'-b', '--background', is_flag=True, help='daemonize'),
click.option(
'--pidfile', type=str, default='/run/edgedb/',
help='path to PID file directory'),
click.option(
'--daemon-user', type=int),
click.option(
'--daemon-group', type=int),
click.option(
'--runstate-dir', type=str, default=None,
help=('directory where UNIX sockets will be created '
'("/run" on Linux by default)')),
click.option(
'--max-backend-connections', type=int, default=100),
]
def server_options(func):
for option in reversed(_server_options):
func = option(func)
return func
def server_main(*, insecure=False, **kwargs):
logsetup.setup_logging(kwargs['log_level'], kwargs['log_to'])
exceptions.install_excepthook()
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
if kwargs['devmode'] is not None:
devmode.enable_dev_mode(kwargs['devmode'])
if not kwargs['data_dir']:
if devmode.is_in_dev_mode():
kwargs['data_dir'] = os.path.expanduser('~/.edgedb')
else:
abort('Please specify the instance data directory '
'using the -D argument')
kwargs['insecure'] = insecure
if kwargs['background']:
daemon_opts = {'detach_process': True}
pidfile = os.path.join(
kwargs['pidfile'], '.s.EDGEDB.{}.lock'.format(kwargs['port']))
daemon_opts['pidfile'] = pidfile
if kwargs['daemon_user']:
daemon_opts['uid'] = kwargs['daemon_user']
if kwargs['daemon_group']:
daemon_opts['gid'] = kwargs['daemon_group']
with daemon.DaemonContext(**daemon_opts):
setproctitle.setproctitle(
'edgedb-server-{}'.format(kwargs['port']))
run_server(kwargs)
else:
with devmode.CoverageConfig.enable_coverage_if_requested():
run_server(kwargs)
@click.command(
'EdgeDB Server',
context_settings=dict(help_option_names=['-h', '--help']))
@server_options
def main(**kwargs):
server_main(**kwargs)
def main_dev():
devmode.enable_dev_mode()
main()
if __name__ == '__main__':
main()
| [
"logging.getLogger",
"edb.common.devmode.enable_dev_mode",
"edb.edgeql.parser.preload",
"sys.exit",
"getpass.getuser",
"edb.server.bootstrap.bootstrap",
"pathlib.Path",
"click.option",
"asyncio.new_event_loop",
"os.path.isdir",
"os.path.expanduser",
"edb.common.devmode.is_in_dev_mode",
"temp... | [((1096, 1127), 'logging.getLogger', 'logging.getLogger', (['"""edb.server"""'], {}), "('edb.server')\n", (1113, 1127), False, 'import logging\n'), ((1217, 1228), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (1225, 1228), False, 'import sys\n'), ((1669, 1695), 'pathlib.Path', 'pathlib.Path', (['runstate_dir'], {}), '(runstate_dir)\n', (1681, 1695), False, 'import pathlib\n'), ((3490, 3521), 'os.environ.get', 'os.environ.get', (['"""NOTIFY_SOCKET"""'], {}), "('NOTIFY_SOCKET')\n", (3504, 3521), False, 'import os\n'), ((3660, 3708), 'socket.socket', 'socket.socket', (['socket.AF_UNIX', 'socket.SOCK_DGRAM'], {}), '(socket.AF_UNIX, socket.SOCK_DGRAM)\n', (3673, 3708), False, 'import socket\n'), ((4123, 4142), 'edb.edgeql.parser.preload', 'ql_parser.preload', ([], {}), '()\n', (4140, 4142), True, 'from edb.edgeql import parser as ql_parser\n'), ((4225, 4249), 'asyncio.new_event_loop', 'asyncio.new_event_loop', ([], {}), '()\n', (4247, 4249), False, 'import asyncio\n'), ((4254, 4282), 'asyncio.set_event_loop', 'asyncio.set_event_loop', (['loop'], {}), '(loop)\n', (4276, 4282), False, 'import asyncio\n'), ((5505, 5529), 'edb.common.devmode.is_in_dev_mode', 'devmode.is_in_dev_mode', ([], {}), '()\n', (5527, 5529), False, 'from edb.common import devmode\n'), ((9911, 10018), 'click.option', 'click.option', (['"""-D"""', '"""--data-dir"""'], {'type': 'str', 'envvar': '"""EDGEDB_DATADIR"""', 'help': '"""database cluster directory"""'}), "('-D', '--data-dir', type=str, envvar='EDGEDB_DATADIR', help=\n 'database cluster directory')\n", (9923, 10018), False, 'import click\n'), ((10036, 10203), 'click.option', 'click.option', (['"""-l"""', '"""--log-level"""'], {'help': '"""Logging level. Possible values: (d)ebug, (i)nfo, (w)arn, (e)rror, (s)ilent"""', 'default': '"""i"""', 'envvar': '"""EDGEDB_LOG_LEVEL"""'}), "('-l', '--log-level', help=\n 'Logging level. Possible values: (d)ebug, (i)nfo, (w)arn, (e)rror, (s)ilent'\n , default='i', envvar='EDGEDB_LOG_LEVEL')\n", (10048, 10203), False, 'import click\n'), ((10243, 10400), 'click.option', 'click.option', (['"""--log-to"""'], {'help': '"""send logs to DEST, where DEST can be a file name, "syslog", or "stderr\\""""', 'type': 'str', 'metavar': '"""DEST"""', 'default': '"""stderr"""'}), '(\'--log-to\', help=\n \'send logs to DEST, where DEST can be a file name, "syslog", or "stderr"\',\n type=str, metavar=\'DEST\', default=\'stderr\')\n', (10255, 10400), False, 'import click\n'), ((10441, 10535), 'click.option', 'click.option', (['"""--bootstrap"""'], {'is_flag': '(True)', 'help': '"""bootstrap the database cluster and exit"""'}), "('--bootstrap', is_flag=True, help=\n 'bootstrap the database cluster and exit')\n", (10453, 10535), False, 'import click\n'), ((10844, 10948), 'click.option', 'click.option', (['"""--devmode/--no-devmode"""'], {'help': '"""enable or disable the development mode"""', 'default': 'None'}), "('--devmode/--no-devmode', help=\n 'enable or disable the development mode', default=None)\n", (10856, 10948), False, 'import click\n'), ((10974, 11074), 'click.option', 'click.option', (['"""--testmode/--no-testmode"""'], {'help': '"""enable or disable the test mode"""', 'default': '(False)'}), "('--testmode/--no-testmode', help=\n 'enable or disable the test mode', default=False)\n", (10986, 11074), False, 'import click\n'), ((11100, 11227), 'click.option', 'click.option', (['"""-I"""', '"""--bind-address"""'], {'type': 'str', 'default': 'None', 'help': '"""IP address to listen on"""', 'envvar': '"""EDGEDB_BIND_ADDRESS"""'}), "('-I', '--bind-address', type=str, default=None, help=\n 'IP address to listen on', envvar='EDGEDB_BIND_ADDRESS')\n", (11112, 11227), False, 'import click\n'), ((11245, 11323), 'click.option', 'click.option', (['"""-p"""', '"""--port"""'], {'type': 'int', 'default': 'None', 'help': '"""port to listen on"""'}), "('-p', '--port', type=int, default=None, help='port to listen on')\n", (11257, 11323), False, 'import click\n'), ((11346, 11412), 'click.option', 'click.option', (['"""-b"""', '"""--background"""'], {'is_flag': '(True)', 'help': '"""daemonize"""'}), "('-b', '--background', is_flag=True, help='daemonize')\n", (11358, 11412), False, 'import click\n'), ((11427, 11526), 'click.option', 'click.option', (['"""--pidfile"""'], {'type': 'str', 'default': '"""/run/edgedb/"""', 'help': '"""path to PID file directory"""'}), "('--pidfile', type=str, default='/run/edgedb/', help=\n 'path to PID file directory')\n", (11439, 11526), False, 'import click\n'), ((11544, 11583), 'click.option', 'click.option', (['"""--daemon-user"""'], {'type': 'int'}), "('--daemon-user', type=int)\n", (11556, 11583), False, 'import click\n'), ((11598, 11638), 'click.option', 'click.option', (['"""--daemon-group"""'], {'type': 'int'}), "('--daemon-group', type=int)\n", (11610, 11638), False, 'import click\n'), ((11653, 11799), 'click.option', 'click.option', (['"""--runstate-dir"""'], {'type': 'str', 'default': 'None', 'help': '"""directory where UNIX sockets will be created ("/run" on Linux by default)"""'}), '(\'--runstate-dir\', type=str, default=None, help=\n \'directory where UNIX sockets will be created ("/run" on Linux by default)\'\n )\n', (11665, 11799), False, 'import click\n'), ((11831, 11895), 'click.option', 'click.option', (['"""--max-backend-connections"""'], {'type': 'int', 'default': '(100)'}), "('--max-backend-connections', type=int, default=100)\n", (11843, 11895), False, 'import click\n'), ((12144, 12175), 'edb.common.exceptions.install_excepthook', 'exceptions.install_excepthook', ([], {}), '()\n', (12173, 12175), False, 'from edb.common import exceptions\n'), ((13496, 13521), 'edb.common.devmode.enable_dev_mode', 'devmode.enable_dev_mode', ([], {}), '()\n', (13519, 13521), False, 'from edb.common import devmode\n'), ((2276, 2303), 'os.path.isdir', 'os.path.isdir', (['runstate_dir'], {}), '(runstate_dir)\n', (2289, 2303), False, 'import os\n'), ((3309, 3353), 'edb.server.bootstrap.bootstrap', 'bootstrap.bootstrap', (['cluster', 'bootstrap_args'], {}), '(cluster, bootstrap_args)\n', (3328, 3353), False, 'from edb.server import bootstrap\n'), ((12211, 12235), 'uvloop.EventLoopPolicy', 'uvloop.EventLoopPolicy', ([], {}), '()\n', (12233, 12235), False, 'import uvloop\n'), ((12284, 12326), 'edb.common.devmode.enable_dev_mode', 'devmode.enable_dev_mode', (["kwargs['devmode']"], {}), "(kwargs['devmode'])\n", (12307, 12326), False, 'from edb.common import devmode\n'), ((12370, 12394), 'edb.common.devmode.is_in_dev_mode', 'devmode.is_in_dev_mode', ([], {}), '()\n', (12392, 12394), False, 'from edb.common import devmode\n'), ((2556, 2621), 'tempfile.TemporaryDirectory', 'tempfile.TemporaryDirectory', ([], {'prefix': '"""internal-"""', 'dir': 'runstate_dir'}), "(prefix='internal-', dir=runstate_dir)\n", (2583, 2621), False, 'import tempfile\n'), ((10615, 10632), 'getpass.getuser', 'getpass.getuser', ([], {}), '()\n', (10630, 10632), False, 'import getpass\n'), ((10765, 10782), 'getpass.getuser', 'getpass.getuser', ([], {}), '()\n', (10780, 10782), False, 'import getpass\n'), ((12429, 12460), 'os.path.expanduser', 'os.path.expanduser', (['"""~/.edgedb"""'], {}), "('~/.edgedb')\n", (12447, 12460), False, 'import os\n'), ((13224, 13277), 'edb.common.devmode.CoverageConfig.enable_coverage_if_requested', 'devmode.CoverageConfig.enable_coverage_if_requested', ([], {}), '()\n', (13275, 13277), False, 'from edb.common import devmode\n')] |
# -*- coding: utf-8 -*-
from TopCmds import *
import quality_check
import data_management as dat
Qualitytest = quality_check.Qualitytest
left_boundary=float(dat.get_globalParameter("left_boundary"))
right_boundary=float(dat.get_globalParameter("right_boundary"))
def Check_180turn(leftboundary,rightboundary):
""" turns the Spectrum for 180 degrees if Reference Region has an overall negative Signal"""
Intensities_of_reference=sum(GETPROCDATA(left_boundary,left_boundary))
if Intensities_of_reference < 0:
XCMD(".ph",WAIT_TILL_DONE) # opens phase correction mode
XCMD(".ph180",WAIT_TILL_DONE) # adds 180 degrees to ph0
XCMD(".sret",WAIT_TILL_DONE) # adjusts Spectrum according to ph and safes result
# Processing of CPMG data
@quality_check.conditional_decorator(quality_check.Quality,quality_check.Quality_lifted,Qualitytest)
def proz():
"""processing pipeline for CPMG data"""
print("processing: ",CURDATA()[0])
Check_180turn(left_boundary,right_boundary)
EF() #exponential window multiplication + fourier
APK0() #1. Phase correction 0th Ordnung
APK1() #1. Phase correction 1st Ordnung
ABS() #Baseline correction
APK()
ABS() #Baseline correction
Check_180turn(left_boundary,right_boundary)
def proz2D():
"""processing pipeline for CPMG data"""
print("processing: ",CURDATA()[0])
XCMD("apk2d",WAIT_TILL_DONE)
ABS2() #Baseline correction
ABS1()
# Processing of NOESY data
@quality_check.conditional_decorator(quality_check.Quality,quality_check.Quality_lifted,Qualitytest)
def proz_noe():
"""processing pipeline for NOESY data"""
print("processing: ",CURDATA()[0])
Check_180turn(left_boundary,right_boundary)
EFP() # Exponential window multiplication + Fourier Transformation + phase correction
ABS() # Baseline correction
Check_180turn(left_boundary,right_boundary)
# After manual processing
@quality_check.conditional_decorator(quality_check.Quality,quality_check.Quality_lifted,Qualitytest)
def proz_manually ():
"""processing pipeline used after manual phase correction"""
Check_180turn(left_boundary,right_boundary)
ABS() # Baseline correction
XCMD("closeall",WAIT_TILL_DONE)
| [
"data_management.get_globalParameter",
"quality_check.conditional_decorator"
] | [((785, 891), 'quality_check.conditional_decorator', 'quality_check.conditional_decorator', (['quality_check.Quality', 'quality_check.Quality_lifted', 'Qualitytest'], {}), '(quality_check.Quality, quality_check.\n Quality_lifted, Qualitytest)\n', (820, 891), False, 'import quality_check\n'), ((1519, 1625), 'quality_check.conditional_decorator', 'quality_check.conditional_decorator', (['quality_check.Quality', 'quality_check.Quality_lifted', 'Qualitytest'], {}), '(quality_check.Quality, quality_check.\n Quality_lifted, Qualitytest)\n', (1554, 1625), False, 'import quality_check\n'), ((1980, 2086), 'quality_check.conditional_decorator', 'quality_check.conditional_decorator', (['quality_check.Quality', 'quality_check.Quality_lifted', 'Qualitytest'], {}), '(quality_check.Quality, quality_check.\n Quality_lifted, Qualitytest)\n', (2015, 2086), False, 'import quality_check\n'), ((164, 204), 'data_management.get_globalParameter', 'dat.get_globalParameter', (['"""left_boundary"""'], {}), "('left_boundary')\n", (187, 204), True, 'import data_management as dat\n'), ((228, 269), 'data_management.get_globalParameter', 'dat.get_globalParameter', (['"""right_boundary"""'], {}), "('right_boundary')\n", (251, 269), True, 'import data_management as dat\n')] |
import PegandoVariavel as v
print(v.get_Pessoas())
print()
for d in v.get_Pessoas():
print(d) | [
"PegandoVariavel.get_Pessoas"
] | [((71, 86), 'PegandoVariavel.get_Pessoas', 'v.get_Pessoas', ([], {}), '()\n', (84, 86), True, 'import PegandoVariavel as v\n'), ((36, 51), 'PegandoVariavel.get_Pessoas', 'v.get_Pessoas', ([], {}), '()\n', (49, 51), True, 'import PegandoVariavel as v\n')] |
import abc
from collections import OrderedDict
from .constants import RESULT_KEY_MAP
class ResultMessageBase(abc.ABC):
"""
Result message base class.
"""
@abc.abstractmethod
def get_content(self, custom_data=None):
"""
Get message content.
Args:
custom_data (dict): Any custom data.
Returns:
(dict): Message content.
"""
return {}
def get_options(self):
"""
Get message options.
Returns:
(dict): Message options.
"""
return {}
@staticmethod
def convert_result_to_readable(result):
"""
Convert result keys to convenient format.
Args:
result (OrderedDict): Raw result data.
Returns:
(OrderedDict): Converted result data.
"""
converted = OrderedDict()
for key, value in result.items():
if key in RESULT_KEY_MAP:
converted[RESULT_KEY_MAP[key]] = value
return converted
class FileResultMessageBase(ResultMessageBase):
"""
Build and sent result as document message.
"""
@abc.abstractmethod
def get_filename(self):
"""
Define filename.
Returns:
(str): Filename.
"""
return "output"
@abc.abstractmethod
def get_document(self, data):
"""
Build document to send.
Args:
data (dict): Data to build document.
Returns:
(file-like object): Document.
"""
return None
def get_content(self, custom_data=None):
content = {
"filename": self.get_filename(),
"document": self.get_document(custom_data or {}),
}
content.update(self.get_options())
return content
def send(self, bot, chat_id, custom_data=None):
"""
Send built message.
Args:
bot (instance): Bot.
chat_id (int): Chat ID.
custom_data (dict): Any custom data.
Returns: None.
"""
bot.send_document(
chat_id=chat_id,
**self.get_content(custom_data)
)
class TextResultMessageBase(ResultMessageBase):
"""
Build and sent result as text message.
"""
@abc.abstractmethod
def get_text(self, data):
"""
Build text to send.
Args:
data (dict): Data to build text.
Returns:
(str): Text.
"""
return ""
def get_content(self, custom_data=None):
content = {"text": self.get_text(custom_data or {})}
content.update(self.get_options())
return content
def send(self, bot, chat_id, custom_data=None):
"""
Send built message.
Args:
bot (instance): Bot.
chat_id (int): Chat ID.
custom_data (dict): Any custom data.
Returns: None.
"""
bot.send_message(
chat_id=chat_id,
**self.get_content(custom_data)
)
| [
"collections.OrderedDict"
] | [((878, 891), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (889, 891), False, 'from collections import OrderedDict\n')] |
# coding: utf-8
from nltk.corpus import wordnet as wn
all_synsets = set()
for word in wn.words():
for synset in wn.synsets(word):
all_synsets.add(synset)
with open("wordnet_synset_definition.txt", "wb+") as fp:
for synset in all_synsets:
print >> fp, "%s\t%s" % (
synset.name(),
synset.definition()
)
| [
"nltk.corpus.wordnet.synsets",
"nltk.corpus.wordnet.words"
] | [((88, 98), 'nltk.corpus.wordnet.words', 'wn.words', ([], {}), '()\n', (96, 98), True, 'from nltk.corpus import wordnet as wn\n'), ((118, 134), 'nltk.corpus.wordnet.synsets', 'wn.synsets', (['word'], {}), '(word)\n', (128, 134), True, 'from nltk.corpus import wordnet as wn\n')] |
"""Main module."""
from sqlalchemy import create_engine
import pandas as pd
import collections
import logging
import re
from pprint import pprint
from typing import Sequence
from opentelemetry.metrics import Counter, Metric
from opentelemetry.sdk.metrics.export import (
MetricRecord,
MetricsExporter,
MetricsExportResult,
)
logger = logging.getLogger(__name__)
class FeatureMetricsExporter(MetricsExporter):
"""
Feature Usage metrics exporter for OpenTelemetry
"""
def __init__(self):
"""
Connect to the database
"""
eng_str = 'mysql+mysqldb://{0}:{1}@{2}:7706/{3}'.format('***',
'***',
'10.2.1.43',
'subscriber_data')
self.engine = create_engine(eng_str, pool_recycle=60, echo=True)
def export(
self, metric_records: Sequence[MetricRecord]
) -> MetricsExportResult:
for record in metric_records:
print(
'{}(feature_id="{}", performance_id="{}", value={})'.format(
type(self).__name__,
record.labels[0][1],
record.labels[1][1],
record.aggregator.checkpoint,
)
)
df = pd.DataFrame({"feature_id":int(record.labels[0][1]),
"performance_id":int(record.labels[1][1]),
"data":record.aggregator.checkpoint}, index=["feature_id"])
try:
df.to_sql(con=self.engine, name='feature_perf_data',
if_exists="append", index=False)
except ValueError as e:
print(e)
return MetricsExportResult.FAILURE
return MetricsExportResult.SUCCESS
def shutdown(self) -> None:
"""Shuts down the exporter.
Called when the SDK is shut down.
"""
| [
"logging.getLogger",
"sqlalchemy.create_engine"
] | [((349, 376), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (366, 376), False, 'import logging\n'), ((899, 949), 'sqlalchemy.create_engine', 'create_engine', (['eng_str'], {'pool_recycle': '(60)', 'echo': '(True)'}), '(eng_str, pool_recycle=60, echo=True)\n', (912, 949), False, 'from sqlalchemy import create_engine\n')] |
# Copyright (c) 2017 UFCG-LSD.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from broker.plugins import base as plugin_base
from broker.service import api
from broker.utils.logger import Log
from broker.utils.framework import authorizer
from broker.utils.framework import optimizer
from broker import exceptions as ex
API_LOG = Log("APIv10", "logs/APIv10.log")
submissions = {}
def run_submission(data):
if ('plugin' not in data or 'plugin_info' not in data):
API_LOG.log("Missing plugin fields in request")
raise ex.BadRequestException("Missing plugin fields in request")
if data['enable_auth']:
if ('username' not in data or 'password' not in data):
API_LOG.log("Missing plugin fields in request")
raise ex.BadRequestException("Missing plugin fields in request")
username = data['username']
password = data['password']
authorization = authorizer.get_authorization(api.authorization_url,
username, password)
if not authorization['success']:
API_LOG.log("Unauthorized request")
raise ex.UnauthorizedException()
else:
if data['plugin'] not in api.plugins: raise ex.BadRequestException()
plugin = plugin_base.PLUGINS.get_plugin(data['plugin'])
submission_id, executor = plugin.execute(data['plugin_info'])
submissions[submission_id] = executor
return submission_id
def stop_submission(submission_id, data):
if 'username' not in data or 'password' not in data:
API_LOG.log("Missing parameters in request")
raise ex.BadRequestException()
username = data['username']
password = data['password']
authorization = authorizer.get_authorization(api.authorization_url,
username, password)
if not authorization['success']:
API_LOG.log("Unauthorized request")
raise ex.UnauthorizedException()
else:
if submission_id not in submissions.keys():
raise ex.BadRequestException()
# TODO: Call the executor by submission_id and stop the execution.
return submissions[submission_id]
def list_submissions():
submissions_status = {}
for id in submissions.keys():
this_status = {}
submissions_status[id] = this_status
this_status['status'] = (submissions[id].
get_application_state())
return submissions_status
def submission_status(submission_id):
if submission_id not in submissions.keys():
API_LOG.log("Wrong request")
raise ex.BadRequestException()
# TODO: Update status of application with more informations
this_status = {}
this_status['status'] = (submissions[submission_id].
get_application_state())
this_status['execution_time'] = (submissions[submission_id].
get_application_execution_time())
this_status['start_time'] = (submissions[submission_id].
get_application_start_time())
return this_status
def submission_log(submission_id):
if submission_id not in submissions.keys():
API_LOG.log("Wrong request")
raise ex.BadRequestException()
logs = {'execution':'', 'stderr':'', 'stdout': ''}
exec_log = open("logs/apps/%s/execution" % submission_id, "r")
stderr = open("logs/apps/%s/stderr" % submission_id, "r")
stdout = open("logs/apps/%s/stdout" % submission_id, "r")
remove_newline = lambda x: x.replace("\n","")
logs['execution'] = map(remove_newline, exec_log.readlines())
logs['stderr'] = map(remove_newline, stderr.readlines())
logs['stdout'] = map(remove_newline, stdout.readlines())
exec_log.close()
stderr.close()
stdout.close()
return logs
| [
"broker.utils.logger.Log",
"broker.utils.framework.authorizer.get_authorization",
"broker.exceptions.BadRequestException",
"broker.exceptions.UnauthorizedException",
"broker.plugins.base.PLUGINS.get_plugin"
] | [((832, 864), 'broker.utils.logger.Log', 'Log', (['"""APIv10"""', '"""logs/APIv10.log"""'], {}), "('APIv10', 'logs/APIv10.log')\n", (835, 864), False, 'from broker.utils.logger import Log\n'), ((2269, 2340), 'broker.utils.framework.authorizer.get_authorization', 'authorizer.get_authorization', (['api.authorization_url', 'username', 'password'], {}), '(api.authorization_url, username, password)\n', (2297, 2340), False, 'from broker.utils.framework import authorizer\n'), ((1041, 1099), 'broker.exceptions.BadRequestException', 'ex.BadRequestException', (['"""Missing plugin fields in request"""'], {}), "('Missing plugin fields in request')\n", (1063, 1099), True, 'from broker import exceptions as ex\n'), ((1427, 1498), 'broker.utils.framework.authorizer.get_authorization', 'authorizer.get_authorization', (['api.authorization_url', 'username', 'password'], {}), '(api.authorization_url, username, password)\n', (1455, 1498), False, 'from broker.utils.framework import authorizer\n'), ((1793, 1839), 'broker.plugins.base.PLUGINS.get_plugin', 'plugin_base.PLUGINS.get_plugin', (["data['plugin']"], {}), "(data['plugin'])\n", (1823, 1839), True, 'from broker.plugins import base as plugin_base\n'), ((2154, 2178), 'broker.exceptions.BadRequestException', 'ex.BadRequestException', ([], {}), '()\n', (2176, 2178), True, 'from broker import exceptions as ex\n'), ((2486, 2512), 'broker.exceptions.UnauthorizedException', 'ex.UnauthorizedException', ([], {}), '()\n', (2510, 2512), True, 'from broker import exceptions as ex\n'), ((3176, 3200), 'broker.exceptions.BadRequestException', 'ex.BadRequestException', ([], {}), '()\n', (3198, 3200), True, 'from broker import exceptions as ex\n'), ((3821, 3845), 'broker.exceptions.BadRequestException', 'ex.BadRequestException', ([], {}), '()\n', (3843, 3845), True, 'from broker import exceptions as ex\n'), ((1270, 1328), 'broker.exceptions.BadRequestException', 'ex.BadRequestException', (['"""Missing plugin fields in request"""'], {}), "('Missing plugin fields in request')\n", (1292, 1328), True, 'from broker import exceptions as ex\n'), ((1659, 1685), 'broker.exceptions.UnauthorizedException', 'ex.UnauthorizedException', ([], {}), '()\n', (1683, 1685), True, 'from broker import exceptions as ex\n'), ((1749, 1773), 'broker.exceptions.BadRequestException', 'ex.BadRequestException', ([], {}), '()\n', (1771, 1773), True, 'from broker import exceptions as ex\n'), ((2594, 2618), 'broker.exceptions.BadRequestException', 'ex.BadRequestException', ([], {}), '()\n', (2616, 2618), True, 'from broker import exceptions as ex\n')] |
try:
import tensorflow
except ModuleNotFoundError:
pkg_name = 'tensorflow'
import os
import sys
import subprocess
from cellacdc import myutils
cancel = myutils.install_package_msg(pkg_name)
if cancel:
raise ModuleNotFoundError(
f'User aborted {pkg_name} installation'
)
subprocess.check_call(
[sys.executable, '-m', 'pip', 'install', 'tensorflow']
)
# numba requires numpy<1.22 but tensorflow might install higher
# so install numpy less than 1.22 if needed
import numpy
np_version = numpy.__version__.split('.')
np_major, np_minor = [int(v) for v in np_version][:2]
if np_major >= 1 and np_minor >= 22:
subprocess.check_call(
[sys.executable, '-m', 'pip', 'install', '--upgrade', 'numpy<1.22']
)
| [
"numpy.__version__.split",
"subprocess.check_call",
"cellacdc.myutils.install_package_msg"
] | [((180, 217), 'cellacdc.myutils.install_package_msg', 'myutils.install_package_msg', (['pkg_name'], {}), '(pkg_name)\n', (207, 217), False, 'from cellacdc import myutils\n'), ((334, 411), 'subprocess.check_call', 'subprocess.check_call', (["[sys.executable, '-m', 'pip', 'install', 'tensorflow']"], {}), "([sys.executable, '-m', 'pip', 'install', 'tensorflow'])\n", (355, 411), False, 'import subprocess\n'), ((577, 605), 'numpy.__version__.split', 'numpy.__version__.split', (['"""."""'], {}), "('.')\n", (600, 605), False, 'import numpy\n'), ((713, 807), 'subprocess.check_call', 'subprocess.check_call', (["[sys.executable, '-m', 'pip', 'install', '--upgrade', 'numpy<1.22']"], {}), "([sys.executable, '-m', 'pip', 'install', '--upgrade',\n 'numpy<1.22'])\n", (734, 807), False, 'import subprocess\n')] |
import aioredis
import trafaret as t
import yaml
from aiohttp import web
CONFIG_TRAFARET = t.Dict(
{
t.Key('redis'): t.Dict(
{
'port': t.Int(),
'host': t.String(),
'db': t.Int(),
'minsize': t.Int(),
'maxsize': t.Int(),
}
),
'host': t.IP,
'port': t.Int(),
}
)
def load_config(fname):
with open(fname, 'rt') as f:
data = yaml.load(f)
return CONFIG_TRAFARET.check(data)
async def init_redis(conf, loop):
pool = await aioredis.create_redis_pool(
(conf['host'], conf['port']),
minsize=conf['minsize'],
maxsize=conf['maxsize'],
loop=loop,
)
return pool
CHARS = "abcdefghijkmnpqrstuvwxyzABCDEFGHJKLMNPQRSTUVWXYZ23456789"
def encode(num, alphabet=CHARS):
if num == 0:
return alphabet[0]
arr = []
base = len(alphabet)
while num:
num, rem = divmod(num, base)
arr.append(alphabet[rem])
arr.reverse()
return ''.join(arr)
ShortifyRequest = t.Dict({t.Key('url'): t.URL})
def fetch_url(data):
try:
data = ShortifyRequest(data)
except t.DataError:
raise web.HTTPBadRequest('URL is not valid')
return data['url']
| [
"aioredis.create_redis_pool",
"trafaret.String",
"yaml.load",
"trafaret.Key",
"aiohttp.web.HTTPBadRequest",
"trafaret.Int"
] | [((115, 129), 'trafaret.Key', 't.Key', (['"""redis"""'], {}), "('redis')\n", (120, 129), True, 'import trafaret as t\n'), ((388, 395), 'trafaret.Int', 't.Int', ([], {}), '()\n', (393, 395), True, 'import trafaret as t\n'), ((479, 491), 'yaml.load', 'yaml.load', (['f'], {}), '(f)\n', (488, 491), False, 'import yaml\n'), ((584, 706), 'aioredis.create_redis_pool', 'aioredis.create_redis_pool', (["(conf['host'], conf['port'])"], {'minsize': "conf['minsize']", 'maxsize': "conf['maxsize']", 'loop': 'loop'}), "((conf['host'], conf['port']), minsize=conf[\n 'minsize'], maxsize=conf['maxsize'], loop=loop)\n", (610, 706), False, 'import aioredis\n'), ((1099, 1111), 'trafaret.Key', 't.Key', (['"""url"""'], {}), "('url')\n", (1104, 1111), True, 'import trafaret as t\n'), ((1228, 1266), 'aiohttp.web.HTTPBadRequest', 'web.HTTPBadRequest', (['"""URL is not valid"""'], {}), "('URL is not valid')\n", (1246, 1266), False, 'from aiohttp import web\n'), ((177, 184), 'trafaret.Int', 't.Int', ([], {}), '()\n', (182, 184), True, 'import trafaret as t\n'), ((210, 220), 'trafaret.String', 't.String', ([], {}), '()\n', (218, 220), True, 'import trafaret as t\n'), ((244, 251), 'trafaret.Int', 't.Int', ([], {}), '()\n', (249, 251), True, 'import trafaret as t\n'), ((280, 287), 'trafaret.Int', 't.Int', ([], {}), '()\n', (285, 287), True, 'import trafaret as t\n'), ((316, 323), 'trafaret.Int', 't.Int', ([], {}), '()\n', (321, 323), True, 'import trafaret as t\n')] |
### tensorflow==2.3.1
import tensorflow as tf
# Float16 Quantization - Input/Output=float32
height = 384
width = 384
converter = tf.lite.TFLiteConverter.from_saved_model('saved_model')
converter.optimizations = [tf.lite.Optimize.DEFAULT]
converter.target_spec.supported_types = [tf.float16]
tflite_model = converter.convert()
with open('midas_{}x{}_float16_quant.tflite'.format(height, width), 'wb') as w:
w.write(tflite_model)
print('Float16 Quantization complete! - midas_{}x{}_float16_quant.tflite'.format(height, width))
| [
"tensorflow.lite.TFLiteConverter.from_saved_model"
] | [((132, 187), 'tensorflow.lite.TFLiteConverter.from_saved_model', 'tf.lite.TFLiteConverter.from_saved_model', (['"""saved_model"""'], {}), "('saved_model')\n", (172, 187), True, 'import tensorflow as tf\n')] |
import os
from experiments.file_naming.single_target_classifier_indicator import SingleTargetClassifierIndicator
from project_info import project_dir
def get_single_target_tree_rule_dir() -> str:
mcars_dir: str = os.path.join(project_dir,
'models',
'single_target_tree_rules')
if not os.path.exists(mcars_dir):
os.makedirs(mcars_dir)
return mcars_dir
def get_single_target_tree_rules_relative_file_name_without_extension(
dataset_name: str, fold_i: int,
target_attribute: str,
classifier_indicator: SingleTargetClassifierIndicator,
nb_of_trees_per_model: int,
min_support: float,
max_depth: int
) -> str:
return (
f"{dataset_name}{fold_i}_{target_attribute}_{str(classifier_indicator.value)}"
f"_{nb_of_trees_per_model}trees"
f"_{min_support}supp_{max_depth}depth"
)
def get_single_target_tree_rules_abs_file_name(
dataset_name: str, fold_i: int,
target_attribute: str,
classifier_indicator: SingleTargetClassifierIndicator,
nb_of_trees_per_model: int,
min_support: float,
max_depth: int,
):
rules_dir = get_single_target_tree_rule_dir()
relative_file_name: str = get_single_target_tree_rules_relative_file_name_without_extension(
dataset_name=dataset_name, fold_i=fold_i,
target_attribute=target_attribute,
classifier_indicator=classifier_indicator,
nb_of_trees_per_model=nb_of_trees_per_model,
min_support=min_support, max_depth=max_depth
)
tree_derived_rule_abs_file_name = os.path.join(rules_dir, f"{relative_file_name}.json.gz")
return tree_derived_rule_abs_file_name
def get_single_target_tree_rules_gen_timing_info_abs_file_name(
dataset_name: str, fold_i: int,
target_attribute: str,
classifier_indicator: SingleTargetClassifierIndicator,
nb_of_trees_per_model: int,
min_support: float,
max_depth: int,
):
rules_dir = get_single_target_tree_rule_dir()
relative_file_name: str = get_single_target_tree_rules_relative_file_name_without_extension(
dataset_name=dataset_name, fold_i=fold_i,
target_attribute=target_attribute,
classifier_indicator=classifier_indicator,
nb_of_trees_per_model=nb_of_trees_per_model,
min_support=min_support, max_depth=max_depth
)
tree_derived_rule_abs_file_name = os.path.join(rules_dir, f"{relative_file_name}_timings.json.gz")
return tree_derived_rule_abs_file_name
| [
"os.path.exists",
"os.path.join",
"os.makedirs"
] | [((220, 283), 'os.path.join', 'os.path.join', (['project_dir', '"""models"""', '"""single_target_tree_rules"""'], {}), "(project_dir, 'models', 'single_target_tree_rules')\n", (232, 283), False, 'import os\n'), ((1657, 1713), 'os.path.join', 'os.path.join', (['rules_dir', 'f"""{relative_file_name}.json.gz"""'], {}), "(rules_dir, f'{relative_file_name}.json.gz')\n", (1669, 1713), False, 'import os\n'), ((2490, 2554), 'os.path.join', 'os.path.join', (['rules_dir', 'f"""{relative_file_name}_timings.json.gz"""'], {}), "(rules_dir, f'{relative_file_name}_timings.json.gz')\n", (2502, 2554), False, 'import os\n'), ((363, 388), 'os.path.exists', 'os.path.exists', (['mcars_dir'], {}), '(mcars_dir)\n', (377, 388), False, 'import os\n'), ((398, 420), 'os.makedirs', 'os.makedirs', (['mcars_dir'], {}), '(mcars_dir)\n', (409, 420), False, 'import os\n')] |
from database import Base
from sqlalchemy import Column, Integer, String, Boolean, ForeignKey, DateTime, Float
from sqlalchemy.types import DateTime
from flask import Flask, request, jsonify, make_response
from flask_sqlalchemy import SQLAlchemy
app = Flask(__name__)
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
db = SQLAlchemy(app)
class Medallions(Base):
__tablename__ = 'medallions'
id = Column(Integer, primary_key=True)
medallion = Column(String(50))
hack_license = Column(String(20))
vendor_id = Column(String(20))
rate_code = Column(String(20))
store_and_fwd_flag = Column(String(20))
pickup_datetime = Column(DateTime)
dropoff_datetime = Column(DateTime)
passenger_count = Column(Integer)
trip_time_in_secs = Column(Integer)
trip_distance = Column(Float)
| [
"flask_sqlalchemy.SQLAlchemy",
"sqlalchemy.String",
"sqlalchemy.Column",
"flask.Flask"
] | [((257, 272), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (262, 272), False, 'from flask import Flask, request, jsonify, make_response\n'), ((332, 347), 'flask_sqlalchemy.SQLAlchemy', 'SQLAlchemy', (['app'], {}), '(app)\n', (342, 347), False, 'from flask_sqlalchemy import SQLAlchemy\n'), ((411, 444), 'sqlalchemy.Column', 'Column', (['Integer'], {'primary_key': '(True)'}), '(Integer, primary_key=True)\n', (417, 444), False, 'from sqlalchemy import Column, Integer, String, Boolean, ForeignKey, DateTime, Float\n'), ((637, 653), 'sqlalchemy.Column', 'Column', (['DateTime'], {}), '(DateTime)\n', (643, 653), False, 'from sqlalchemy import Column, Integer, String, Boolean, ForeignKey, DateTime, Float\n'), ((674, 690), 'sqlalchemy.Column', 'Column', (['DateTime'], {}), '(DateTime)\n', (680, 690), False, 'from sqlalchemy import Column, Integer, String, Boolean, ForeignKey, DateTime, Float\n'), ((710, 725), 'sqlalchemy.Column', 'Column', (['Integer'], {}), '(Integer)\n', (716, 725), False, 'from sqlalchemy import Column, Integer, String, Boolean, ForeignKey, DateTime, Float\n'), ((747, 762), 'sqlalchemy.Column', 'Column', (['Integer'], {}), '(Integer)\n', (753, 762), False, 'from sqlalchemy import Column, Integer, String, Boolean, ForeignKey, DateTime, Float\n'), ((780, 793), 'sqlalchemy.Column', 'Column', (['Float'], {}), '(Float)\n', (786, 793), False, 'from sqlalchemy import Column, Integer, String, Boolean, ForeignKey, DateTime, Float\n'), ((465, 475), 'sqlalchemy.String', 'String', (['(50)'], {}), '(50)\n', (471, 475), False, 'from sqlalchemy import Column, Integer, String, Boolean, ForeignKey, DateTime, Float\n'), ((500, 510), 'sqlalchemy.String', 'String', (['(20)'], {}), '(20)\n', (506, 510), False, 'from sqlalchemy import Column, Integer, String, Boolean, ForeignKey, DateTime, Float\n'), ((532, 542), 'sqlalchemy.String', 'String', (['(20)'], {}), '(20)\n', (538, 542), False, 'from sqlalchemy import Column, Integer, String, Boolean, ForeignKey, DateTime, Float\n'), ((564, 574), 'sqlalchemy.String', 'String', (['(20)'], {}), '(20)\n', (570, 574), False, 'from sqlalchemy import Column, Integer, String, Boolean, ForeignKey, DateTime, Float\n'), ((605, 615), 'sqlalchemy.String', 'String', (['(20)'], {}), '(20)\n', (611, 615), False, 'from sqlalchemy import Column, Integer, String, Boolean, ForeignKey, DateTime, Float\n')] |
#####################################
##### Class to Query Census API #####
#####################################
import requests
import json
import pandas as pd
import datascience as ds
from .utils import *
class CensusQuery:
"""Object to query US Census API"""
_url_endings = {
"acs5": "acs/acs5",
"acs1": "acs/acs1",
"sf1": "dec/sf1"
}
_variable_aliases = {
"acs5": {
"total_population": "B00001_001E",
}
}
def __init__(self, api_key, dataset, year=None, out="pd"):
"""
Initializes the CensusQuery object to start API requests
Args:
* api_key (`str`): User's API key
* dataset (`str`): The dataset to be queried; `"acs5"`, `"acs1"`, or `"sf1"`
Kwargs:
* year (`int`): The year to query data for; can be overwritten in `CensusQuery.query`
* out (`str`): Whether output should be `pandas.DataFrame` or `datascience.tables.Table`; `"pd"` or `"ds"`
Returns:
* `CensusQuery`. The `CensusQuery` instance to be used to query the API
"""
assert dataset in CensusQuery._url_endings.keys(), "{} is not a valid dataset".format(dataset)
self._dataset = CensusQuery._url_endings[dataset]
if year:
assert type(year) == int, "{} not a valid year".format(year)
self._year = year
self._api_key = api_key
assert out in ["pd", "ds"], """out argument must be \"pd\" or \"ds\""""
self._out = out
def _make_params(self, variables, state, county, tract, year):
"""
Creates parameters dict for requests
Args:
* `variables` (`list`): List of variables to extract
* `state` (`str`): Abbreviation for state from which to query data
* `county` (`str`): County name for localized queries
* `tract` (`str`): FIPS code for tract to query data from
* `year` (`int`): Year for which to query data
Returns:
* `dict`. A dict of parameters for the API query
"""
assert type(variables) == list, "variables must be a list"
assert len(state) == 2, "state must be an abbreviation"
params = {}
params["get"] = ",".join(variables)
params["for"] = "tract:{}".format(tract)
state_fips = zero_pad_state(state)
params["in"] = "state:{}".format(state_fips)
if county:
county_fips = get_county_fips(county, state)
params["in"] += "+county:{}".format(county_fips)
params["key"] = self._api_key
return params
def _send_request(self, variables, state, county, tract, year):
"""
Sends request to API through `requests` package
Args:
* `variables` (`list`): List of variables to extract
* `state` (`str`): Abbreviation for state from which to query data
* `county` (`str`): County name for localized queries
* `tract` (`str`): FIPS code for tract to query data from
* `year` (`int`): Year for which to query data
Returns:
* `pandas.DataFrame`. The data retrieved from the query
"""
params = self._make_params(variables, state, county, tract, year)
url = "https://api.census.gov/data/{}/{}".format(year, self._dataset)
response = requests.get(url, params)
try:
text = json.loads(response.text)
except json.JSONDecodeError:
return response.text
cols = text[0]
response_df = pd.DataFrame(text[1:], columns=cols)
return response_df
def query(self, variables, state, county=None, tract="*", year=None):
"""
Queries Census API to get data regarding listed variables; if `year` provided, ignores `CensusData` instance year
Args:
* `variables` (`list`): List of variables to extract
* `state` (`str`): Abbreviation for state from which to query data
* `county` (`str`): County name for localized queries
* `tract` (`str`): FIPS code for tract to query data from
* `year` (`int`): Year for which to query data; if provided, ignores instance `year`
Returns:
* `pandas.DataFrame` or `datascience.tables.Table`. The data retrieved from the query
"""
if not self._year:
assert year != None, "Year must be defined"
assert type(year) == int, "{} not a valid year".format(year)
response_df = self._send_request(variables, state, county, tract, year)
if response_df == "":
return response_df
if self._out == "ds":
return ds.Table.from_df(response_df)
return response_df | [
"pandas.DataFrame",
"json.loads",
"datascience.Table.from_df",
"requests.get"
] | [((2941, 2966), 'requests.get', 'requests.get', (['url', 'params'], {}), '(url, params)\n', (2953, 2966), False, 'import requests\n'), ((3099, 3135), 'pandas.DataFrame', 'pd.DataFrame', (['text[1:]'], {'columns': 'cols'}), '(text[1:], columns=cols)\n', (3111, 3135), True, 'import pandas as pd\n'), ((2984, 3009), 'json.loads', 'json.loads', (['response.text'], {}), '(response.text)\n', (2994, 3009), False, 'import json\n'), ((4081, 4110), 'datascience.Table.from_df', 'ds.Table.from_df', (['response_df'], {}), '(response_df)\n', (4097, 4110), True, 'import datascience as ds\n')] |
import os, sys, re, json, random, importlib
import numpy as np
import pandas as pd
from collections import OrderedDict
from tqdm import tqdm
import matplotlib
import matplotlib.pyplot as plt
import seaborn as sns
import logomaker as lm
from venn import venn
from venn import generate_petal_labels, draw_venn
from scipy.stats import pearsonr
from scipy.cluster import hierarchy
from util import *
import warnings
warnings.filterwarnings('ignore')
class CAMInterp():
def __init__(self, mhc_seq_filename, allele_mask_dirname, epitope_mask_dirname, df_filename, output_dir,
pred_basename='score', pred_threshold=0.9, mhc_len=182, min_sample_num=100, submotif_len=4):
self.aa_str = 'ACDEFGHIKLMNPQRSTVWY'
self.mhc_len = mhc_len
self.epitope_len = 10
self.res34_pos = [6, 8, 23, 44, 58, 61, 62, 65, 66, 68, 69, 72, 73, 75, 76, 79, 80, 83, 94,
96, 98, 113, 115, 117, 142, 146, 149, 151, 155, 157, 158, 162, 166, 170]
self.color_dict = {'A': '#DACC47', 'B': '#B1DEC9', 'C': '#FFBB99', 'polymorphism': '#875A85'}
self.dpi = 600
self.fontsize = 10
self.pred_basename = pred_basename
self.pred_threshold = pred_threshold
self.min_sample_num = min_sample_num
self.submotif_len = submotif_len
self.output_dir = output_dir
# mhc_seq_dict
self.mhc_seq_dict = json.load(open(mhc_seq_filename, 'r'))
# allele_mask_df
if type(allele_mask_dirname) == list:
alleles = [self._convert_allele(i) for i in os.listdir(allele_mask_dirname[0])]
self.allele_mask_df = pd.DataFrame(columns=alleles, index=range(self.mhc_len), data=0)
self.allele_mask_df.loc['count'] = 0
for i in range(len(allele_mask_dirname)):
temp_df = pd.DataFrame(self._parse_mask(allele_mask_dirname[i], mask_type='mhc'))
self.allele_mask_df.loc[temp_df.index, temp_df.columns] += temp_df
self.allele_mask_df.loc['count', temp_df.columns] += 1
self.allele_mask_df = self.allele_mask_df.loc[:, self.allele_mask_df.loc['count'] != 0]
self.allele_mask_df.loc[range(self.mhc_len)] /= self.allele_mask_df.loc['count']
self.allele_mask_df = self.allele_mask_df.drop('count')
else:
self.allele_mask_df = pd.DataFrame(self._parse_mask(allele_mask_dirname, mask_type='mhc'))
self.allele_mask_df.to_csv('%s/AlleleMask.csv'%self.output_dir)
# epitope_mask_df
if type(epitope_mask_dirname) == list:
alleles = [self._convert_allele(i) for i in os.listdir(epitope_mask_dirname[0])]
self.epitope_mask_df = pd.DataFrame(columns=alleles, index=range(self.epitope_len), data=0)
self.epitope_mask_df.loc['count'] = 0
for i in range(len(epitope_mask_dirname)):
temp_df = pd.DataFrame(self._parse_mask(epitope_mask_dirname[i], mask_type='epitope'))
self.epitope_mask_df.loc[temp_df.index, temp_df.columns] += temp_df
self.epitope_mask_df.loc['count', temp_df.columns] += 1
self.epitope_mask_df = self.epitope_mask_df.loc[:, self.epitope_mask_df.loc['count'] != 0]
self.epitope_mask_df.loc[range(self.epitope_len)] /= self.epitope_mask_df.loc['count']
self.epitope_mask_df = self.epitope_mask_df.drop('count')
else:
self.epitope_mask_df = pd.DataFrame(self._parse_mask(epitope_mask_dirname, mask_type='epitope'))
self.epitope_mask_df['position'] = [1,2,3,4,5,-5,-4,-3,-2,-1]
self.epitope_mask_df = self.epitope_mask_df.set_index('position', drop=True)
self.epitope_mask_df.to_csv('%s/EpitopeMask.csv'%self.output_dir)
# df
self.df = pd.read_csv(df_filename, index_col=0)
self.alleles = list(self.df['mhc'].unique())
self.allele_num = len(self.alleles)
# motif_dict
self.motif_dict = self._parse_motif(pred_basename, pred_threshold, self.min_sample_num)
self.alleles = list(self.df['mhc'].unique())
self.allele_num = len(self.alleles)
# mhc_seqlogo_df
self.mhc_seqlogo_df = self._mhc_seqlogo_df(self.alleles, list(range(self.mhc_len)))
def ResidueAnalysis(self, cam_threshold, importance_threshold, barplot_figsize=(10,2), square_figsize=(3.5,3.5)):
# mean plot
self._residue_barplot(self.allele_mask_df.mean(axis=1), self.res34_pos, figsize=barplot_figsize,
figfile='%s/CAMmean.png'%self.output_dir)
# importance plot
importance_count = self._residue_importance_count(self.alleles, cam_threshold)
self._residue_barplot(importance_count, self.res34_pos, figsize=barplot_figsize,
figfile='%s/CAMimportance.png'%self.output_dir)
# important residues - stacked plot
df = self._importance_stacked_barplot(cam_threshold, self.res34_pos,
xticklabels=False, yticklabels=True, figsize=barplot_figsize,
figfile='%s/CAMimportanceStacked.png'%self.output_dir)
df.to_csv('%s/ImportanceStack.csv'%self.output_dir)
# important residues
residue_dict = self._select_residue(cam_threshold, importance_threshold)
json.dump(residue_dict, open('%s/ResidueSelection.json'%self.output_dir, 'w'))
# venn diagram of residue selection
self._importance_venn_plot(residue_dict, figsize=square_figsize,
figfile='%s/ResidueSelectionVenn.png'%self.output_dir)
# correlation between residue importance and sequence entropy
# entropy = sigma(probability**2)
# allele part
df = self._mhc_importance_polymorphism_plot(cam_threshold, residue_dict, figsize=square_figsize,
figfile='%s/AlleleImportanceEntropyCorrelation.png'%self.output_dir)
df.to_csv('%s/AlleleImportancePolymorphism.csv'%self.output_dir)
# epitope part
df = self._epitope_importance_polymorphism_plot(figsize=square_figsize,
figfile='%s/EpitopeImportanceEntropyCorrelation.png'%self.output_dir)
df.to_csv('%s/EpitopeImportancePolymorphism.csv'%self.output_dir)
def ClusterAnalysis(self, method, metric, allele_figsize=(10,2), epitope_figsize=(3.5,3.5)):
alleles = self.alleles
# allele masks
allele_order, position_order = self._mask_clustering_plot(alleles, mask_type='mhc',
method=method, metric=metric,
xticklabels=False, yticklabels=False,
row_colors=True, figsize=allele_figsize,
title=None, xlabel='MHC-I position', ylabel='MHC-I allele',
figfile='%s/AlleleCAMcluster_all.png'%self.output_dir)
# epitope masks
allele_order, position_order = self._mask_clustering_plot(alleles, mask_type='epitope',
method=method, metric=metric,
xticklabels=True, yticklabels=False,
row_colors=True, figsize=epitope_figsize,
title=None, xlabel='peptide position', ylabel='MHC-I allele',
figfile='%s/EpitopeCAMcluster_all.png'%self.output_dir)
""""""""""""""""""""""""""""""""""""""
# Plots
""""""""""""""""""""""""""""""""""""""
# mask_type: mhc or epitope
def _mask_clustering_plot(self, alleles, mask_type='mhc',
method='average', metric='euclidean',
allele_linkage=True, position_linkage=False,
row_colors=False, xticklabels=True, yticklabels=True,
title=None, xlabel=None, ylabel=None,
figsize=(8, 4), figfile=None):
# residue positions
if mask_type == 'mhc':
positions = list(range(self.mhc_len))
df = self.allele_mask_df.iloc[positions][alleles].T
else:
positions = [1,2,3,4,-4,-3,-2,-1]
df = self.epitope_mask_df.loc[positions][alleles].T
# linkage
zx, zy = None, None
if allele_linkage:
zy = hierarchy.linkage(df, method=method, metric=metric, optimal_ordering=True)
if position_linkage:
zx = hierarchy.linkage(df.T, method=method, metric=metric, optimal_ordering=True)
# row colors
if row_colors:
color_list = list()
for allele in alleles:
hla = allele.split('*')[0]
color_list.append(self.color_dict[hla])
else:
color_list = None
# clustermap
g = sns.clustermap(df,
col_cluster=position_linkage,
row_cluster=allele_linkage,
row_linkage=zy,
col_linkage=zx,
row_colors = color_list,
cmap='Blues',
cbar_kws={'orientation': 'horizontal', 'label': 'mask score'},
cbar_pos=(.3, -.05, .4, .02),
dendrogram_ratio=0.1,
colors_ratio=0.02,
xticklabels=xticklabels,
yticklabels=yticklabels,
figsize=figsize)
g.ax_heatmap.set_title(title)
g.ax_heatmap.set_xlabel(xlabel)
g.ax_heatmap.set_ylabel(ylabel)
# cluster order
if allele_linkage:
allele_order = g.dendrogram_row.reordered_ind
allele_order = [alleles[i] for i in allele_order]
else:
allele_order = None
if position_linkage:
position_order = g.dendrogram_col.reordered_ind
position_order = [positions[i] for i in position_order]
else:
position_order = None
# save figure
if figfile:
plt.savefig(figfile, bbox_inches='tight', dpi=self.dpi)
return allele_order, position_order
def _motif_plot(self, alleles, motif_dict, figfile=None):
allele_num = len(alleles)
fig, ax = plt.subplots(allele_num, figsize=(0.8, allele_num*0.2), dpi=self.dpi)
for i in range(allele_num):
allele = alleles[i]
seqlogo_df = pd.DataFrame(motif_dict[allele], columns=list(self.aa_str))
logo = lm.Logo(seqlogo_df, ax=ax[i], color_scheme="skylign_protein")
_ = ax[i].set_xticks([])
_ = ax[i].set_yticks([])
for side in ['top','bottom','left','right']:
ax[i].spines[side].set_linewidth(0.1)
fig.tight_layout()
if figfile:
fig.savefig(figfile)
def _residue_barplot(self, arr, tag_pos, figsize=(8,3), figfile=None):
# main figure
fig, ax = plt.subplots(1, figsize=figsize, dpi=self.dpi)
sns.barplot(x=list(range(self.mhc_len)), y=arr, ax=ax)
ax.tick_params(axis='x', rotation=90)
# fontsize
for item in ([ax.title, ax.xaxis.label, ax.yaxis.label] + ax.get_yticklabels()):
item.set_fontsize(self.fontsize)
for item in ax.get_xticklabels():
item.set_fontsize(self.fontsize/4)
# set xtick colors
colors = list()
for i in range(self.mhc_len):
if i in tag_pos:
colors.append('red')
else:
colors.append('black')
for tick, color in zip(ax.get_xticklabels(), colors):
tick.set_color(color)
fig.tight_layout()
# save figure
if figfile:
fig.savefig(figfile, bbox_inches='tight')
def _importance_stacked_barplot(self, cam_threshold, tag_pos, figsize=(8,3),
xticklabels=True, yticklabels=True, figfile=None):
# build importance dataframe, columns=['A','B','C']
d = dict()
for hla in ['A', 'B', 'C']:
alleles = [i for i in self.alleles if hla in i]
d[hla] = self._residue_importance_count(alleles, cam_threshold)
df = pd.DataFrame(d)
# figure
fig = plt.figure(figsize=figsize, dpi=self.dpi)
ax = fig.add_subplot(111)
ax.margins(x=0)
# stacked bar plot
ax.bar(df.index, df['A'], color=self.color_dict['A'])
ax.bar(df.index, df['B'], bottom=df['A'], color=self.color_dict['B'])
ax.bar(df.index, df['C'], bottom=df['A'] + df['B'], color=self.color_dict['C'])
# ticks & ticklabels
if xticklabels:
_ = ax.set_xticks(df.index)
_ = ax.set_xticklabels(df.index+1, rotation=90)
# xtick colors
colors = list()
for i in df.index:
if i in tag_pos:
colors.append('red')
else:
colors.append('black')
for tick, color in zip(ax.get_xticklabels(), colors):
tick.set_color(color)
else:
_ = ax.set_xticks([])
_ = ax.set_xticklabels([])
if yticklabels:
_ = ax.set_ylabel('importance')
else:
_ = ax.set_yticks([])
_ = ax.set_yticklabels([])
# fontsize
for item in ([ax.title, ax.xaxis.label, ax.yaxis.label] + ax.get_xticklabels() + ax.get_yticklabels()):
item.set_fontsize(self.fontsize)
# legend
Abar = matplotlib.patches.Rectangle((0,0),1,1,fc=self.color_dict['A'], edgecolor='none')
Bbar = matplotlib.patches.Rectangle((0,0),1,1,fc=self.color_dict['B'], edgecolor='none')
Cbar = matplotlib.patches.Rectangle((0,0),1,1,fc=self.color_dict['C'], edgecolor='none')
l = ax.legend([Abar, Bbar, Cbar], ['HLA-A', 'HLA-B', 'HLA-C'], loc=0, ncol=3, fontsize=self.fontsize)
l.draw_frame(False)
fig.tight_layout()
# save figure
if figfile:
fig.savefig(figfile, bbox_inches='tight')
return df
def _mhc_importance_polymorphism_plot(self, cam_threshold, position_dict, figsize=(3.5,3.5), s=2, figfile=None):
# figure
df = pd.DataFrame()
fig, ax = plt.subplots(1, figsize=figsize, dpi=self.dpi)
# calculate entropy
df['polymorphism'] = -(self.mhc_seqlogo_df*np.log(self.mhc_seqlogo_df)).sum(axis=1)
# calculate importance by HLA
importance_counts = list()
for hla in ['A', 'B', 'C']:
alleles = [i for i in self.alleles if hla in i]
importance_counts.append(self._residue_importance_count(alleles, cam_threshold))
importance_counts = np.array(importance_counts)
importance_count = importance_counts.max(axis=0)
df['importance'] = importance_count
# label
df['label'] = 'others'
df.loc[position_dict['res34'], 'label'] = '34-residue'
df.loc[position_dict['selected'], 'label'] = 'selected'
intersect = list(set(position_dict['res34']) & set(position_dict['selected']))
df.loc[intersect, 'label'] = 'intersection'
# plot_param
param_dict = OrderedDict({'selected':{'color': '#ff4949', 'marker': 'o', 's': 12},
'intersection': {'color': '#ff4949', 'marker': 'x', 's': 12},
'34-residue': {'color': '#adb5bd', 'marker': 'x', 's': 12},
'others': {'color': '#adb5bd', 'marker': 'o', 's': 12}})
# regplot
df = df[df['polymorphism']!=0]
p = sns.regplot(x='importance', y='polymorphism', data=df, ax=ax, fit_reg=True, scatter_kws={'s':0})
for label, params in param_dict.items():
p = sns.regplot(x='importance', y='polymorphism', data=df[df['label']==label],
ax=ax, fit_reg=False, marker=params['marker'],
scatter_kws={'color':params['color'], 's':params['s'], 'linewidths': 0.1})
'''
# annotation
for idx, row in df.iterrows():
if idx in [64, 70]:
p.text(df.loc[idx, 'importance']-0.025, df.loc[idx, 'polymorphism']-0.09, idx+1, fontsize=self.fontsize-2)
'''
# fontsize
for item in ([ax.title, ax.xaxis.label, ax.yaxis.label] + ax.get_xticklabels() + ax.get_yticklabels()):
item.set_fontsize(self.fontsize)
# legend
legend_list = [matplotlib.patches.Rectangle((0,0),1,1,fc='#ff4949', edgecolor='none'),
matplotlib.patches.Rectangle((0,0),1,1,fc='#adb5bd', edgecolor='none'),
plt.scatter([], [], color='black', marker='x', s=12),
plt.scatter([], [], color='black', marker='o', s=12)]
label_list = ['selected', 'non-selected', '34-residue', 'non-34-residue']
l = ax.legend(handles=legend_list, labels=label_list,
loc='lower left', bbox_to_anchor=(-0.2,1), ncol=2, fontsize=self.fontsize)
l.draw_frame(True)
# layout
ax.set_xticks([0.0, 0.2, 0.4, 0.6, 0.8, 1.0, 1.02])
ax.set_xticklabels([0.0, 0.2, 0.4, 0.6, 0.8, 1.0, ''])
fig.tight_layout()
# pearson correlation
pearson, pvalue = pearsonr(df['importance'], df['polymorphism'])
ax.text(0.05, 1.6, 'r=%.2f, p=%.2e'%(pearson, pvalue))
# save figure
if figfile:
fig.savefig(figfile, bbox_inches='tight')
return df
def _epitope_importance_polymorphism_plot(self, figsize=(3.5,3.5), figfile=None):
# get epitope polymorphism
peptides = self.df[self.df[self.pred_basename] > self.pred_threshold]['sequence'].to_list()
peptides = [i[:self.submotif_len] + i[-self.submotif_len:] for i in peptides]
seqlogo_df = lm.alignment_to_matrix(sequences=peptides, to_type="probability",
characters_to_ignore=".", pseudocount=0)
polymorphism = -(seqlogo_df*np.log(seqlogo_df)).sum(axis=1).to_numpy()
# df for plot
df = pd.DataFrame(index=list(range(1, 1+self.submotif_len)) + list(range(-self.submotif_len, 0)))
df['polymorphism'] = polymorphism
df['mask_score'] = self.epitope_mask_df.mean(axis=1)[df.index]
df['residue_tag'] = 'other'
df.loc[[2,-1], 'residue_tag'] = 'anchor'
# plot
fig, ax = plt.subplots(1, 1, figsize=figsize, dpi=self.dpi)
sns.scatterplot(data=df, x='mask_score', y='polymorphism', hue='residue_tag', ax=ax)
for pos in [2, -1]:
ax.text(x=df.loc[pos, 'mask_score']-0.25, y=df.loc[pos, 'polymorphism'], s='Position: {}'.format(pos))
fig.tight_layout()
if figfile:
fig.savefig(figfile, bbox_inches='tight')
return df
def _importance_venn_plot(self, position_dict, figsize=(3.5,3.5), figfile=None):
keys = ['A','B','C','polymorphism']
position_dict = {k: set(v) for k, v in position_dict.items() if k in keys}
petal_labels = generate_petal_labels(position_dict.values())
colors = [list(np.array(self._convert_color_code(self.color_dict[k]))/256) + [0.4] for k in keys]
fig, ax = plt.subplots(1, figsize=figsize, dpi=self.dpi)
draw_venn(petal_labels=petal_labels, dataset_labels=position_dict.keys(),hint_hidden=False,
colors=colors, figsize=figsize, fontsize=self.fontsize, legend_loc="best", ax=ax)
ax.get_legend().remove()
legends = [matplotlib.patches.Rectangle((0,0),1,1,fc=color, edgecolor='none') for color in colors]
l = fig.legend(legends, keys, fontsize=self.fontsize,
ncol=4, loc="lower center", bbox_to_anchor=(0, 0.75, 1, 0.2),
columnspacing=1, handlelength=0.5, handletextpad=0.2, borderpad=0.2)
fig.tight_layout()
if figfile:
fig.savefig(figfile, bbox_inches='tight')
""""""""""""""""""""""""""""""""""""""
# Minor Functions
""""""""""""""""""""""""""""""""""""""
def _parse_mask(self, dirname, mask_type):
masks = OrderedDict()
for allele in os.listdir(dirname):
if re.match(r'[ABC][0-9]+', allele):
if not os.path.isfile('%s/%s/record.npy'%(dirname, allele)):
continue
if mask_type == 'mhc':
masks[self._convert_allele(allele)] \
= np.load('%s/%s/record.npy'%(dirname, allele), allow_pickle=True)[()]['mhc_masks'].mean(axis=0)
else:
masks[self._convert_allele(allele)] \
= np.load('%s/%s/record.npy'%(dirname, allele), allow_pickle=True)[()]['epitope_masks'].mean(axis=0)
return masks
def _parse_motif(self, basename, threshold, sample_num):
motifs = OrderedDict()
for i in range(self.allele_num):
allele = self.alleles[i]
seqs = self.df.loc[(self.df['mhc']==allele) & (self.df[basename] >= threshold), 'sequence']
if len(seqs) >= sample_num:
seqs = seqs.apply(lambda x: x[:self.submotif_len] + x[-self.submotif_len:])
temp_df = pd.DataFrame(columns=list(self.aa_str))
seqlogo_df = lm.alignment_to_matrix(sequences=seqs, to_type="information", characters_to_ignore="XU")
temp_df = pd.concat([temp_df, seqlogo_df], axis=0)
temp_df = temp_df.fillna(0.0)
motifs[allele] = temp_df.to_numpy()
return motifs
def _residue_importance_count(self, alleles, cam_threshold):
importance_count = np.array([0]*self.mhc_len)
for allele in alleles:
importance_count[self.allele_mask_df[allele] > cam_threshold] += 1
return importance_count / len(alleles)
def _mhc_seqlogo_df(self, alleles, positions):
seqs = list()
for allele in alleles:
seqs.append(''.join(self.mhc_seq_dict[allele][j] for j in positions))
temp_df = pd.DataFrame(columns=list(self.aa_str))
seqlogo_df = lm.alignment_to_matrix(sequences=seqs, to_type="probability",
characters_to_ignore=".", pseudocount=0)
temp_df = pd.concat([temp_df, seqlogo_df], axis=0)
temp_df = temp_df.fillna(0.0)
return temp_df
def _select_residue(self, cam_threshold, importance_threshold):
importance_positions = dict()
importance_position_set = set()
importance_positions['res34'] = self.res34_pos
# by HLA
for hla in ['A', 'B', 'C']:
alleles = [i for i in self.alleles if hla in i]
importance_count = self._residue_importance_count(alleles, cam_threshold)
pos = list(map(int, np.where(importance_count > importance_threshold)[0]))
importance_positions[hla] = pos
importance_position_set = importance_position_set | set(pos)
# polymorphism
polymorphism_position = list(map(int,self.mhc_seqlogo_df[~(self.mhc_seqlogo_df.max(axis=1)==1)].index))
importance_positions['polymorphism'] = sorted(polymorphism_position)
importance_position_set = importance_position_set & set(polymorphism_position)
# final
importance_position = sorted(list(importance_position_set))
importance_positions['selected'] = importance_position
return importance_positions
def _convert_allele(self, allele):
if re.match(r'[ABC][0-9]+', allele):
return allele[0] + '*' + allele[1:-2] + ':' + allele[-2:]
elif re.match(r'[ABC]\*[0-9]+\:[0-9]+', allele):
return allele
def _convert_color_code(self, code):
return tuple(int(code[i:i+2], 16) for i in (1, 3, 5))
| [
"pandas.read_csv",
"numpy.log",
"numpy.array",
"seaborn.scatterplot",
"scipy.stats.pearsonr",
"seaborn.regplot",
"os.listdir",
"numpy.where",
"scipy.cluster.hierarchy.linkage",
"matplotlib.pyplot.scatter",
"logomaker.alignment_to_matrix",
"pandas.DataFrame",
"collections.OrderedDict",
"mat... | [((412, 445), 'warnings.filterwarnings', 'warnings.filterwarnings', (['"""ignore"""'], {}), "('ignore')\n", (435, 445), False, 'import warnings\n'), ((3833, 3870), 'pandas.read_csv', 'pd.read_csv', (['df_filename'], {'index_col': '(0)'}), '(df_filename, index_col=0)\n', (3844, 3870), True, 'import pandas as pd\n'), ((9393, 9760), 'seaborn.clustermap', 'sns.clustermap', (['df'], {'col_cluster': 'position_linkage', 'row_cluster': 'allele_linkage', 'row_linkage': 'zy', 'col_linkage': 'zx', 'row_colors': 'color_list', 'cmap': '"""Blues"""', 'cbar_kws': "{'orientation': 'horizontal', 'label': 'mask score'}", 'cbar_pos': '(0.3, -0.05, 0.4, 0.02)', 'dendrogram_ratio': '(0.1)', 'colors_ratio': '(0.02)', 'xticklabels': 'xticklabels', 'yticklabels': 'yticklabels', 'figsize': 'figsize'}), "(df, col_cluster=position_linkage, row_cluster=allele_linkage,\n row_linkage=zy, col_linkage=zx, row_colors=color_list, cmap='Blues',\n cbar_kws={'orientation': 'horizontal', 'label': 'mask score'}, cbar_pos\n =(0.3, -0.05, 0.4, 0.02), dendrogram_ratio=0.1, colors_ratio=0.02,\n xticklabels=xticklabels, yticklabels=yticklabels, figsize=figsize)\n", (9407, 9760), True, 'import seaborn as sns\n'), ((10935, 11006), 'matplotlib.pyplot.subplots', 'plt.subplots', (['allele_num'], {'figsize': '(0.8, allele_num * 0.2)', 'dpi': 'self.dpi'}), '(allele_num, figsize=(0.8, allele_num * 0.2), dpi=self.dpi)\n', (10947, 11006), True, 'import matplotlib.pyplot as plt\n'), ((11622, 11668), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)'], {'figsize': 'figsize', 'dpi': 'self.dpi'}), '(1, figsize=figsize, dpi=self.dpi)\n', (11634, 11668), True, 'import matplotlib.pyplot as plt\n'), ((12946, 12961), 'pandas.DataFrame', 'pd.DataFrame', (['d'], {}), '(d)\n', (12958, 12961), True, 'import pandas as pd\n'), ((13002, 13043), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': 'figsize', 'dpi': 'self.dpi'}), '(figsize=figsize, dpi=self.dpi)\n', (13012, 13043), True, 'import matplotlib.pyplot as plt\n'), ((14342, 14431), 'matplotlib.patches.Rectangle', 'matplotlib.patches.Rectangle', (['(0, 0)', '(1)', '(1)'], {'fc': "self.color_dict['A']", 'edgecolor': '"""none"""'}), "((0, 0), 1, 1, fc=self.color_dict['A'],\n edgecolor='none')\n", (14370, 14431), False, 'import matplotlib\n'), ((14439, 14528), 'matplotlib.patches.Rectangle', 'matplotlib.patches.Rectangle', (['(0, 0)', '(1)', '(1)'], {'fc': "self.color_dict['B']", 'edgecolor': '"""none"""'}), "((0, 0), 1, 1, fc=self.color_dict['B'],\n edgecolor='none')\n", (14467, 14528), False, 'import matplotlib\n'), ((14536, 14625), 'matplotlib.patches.Rectangle', 'matplotlib.patches.Rectangle', (['(0, 0)', '(1)', '(1)'], {'fc': "self.color_dict['C']", 'edgecolor': '"""none"""'}), "((0, 0), 1, 1, fc=self.color_dict['C'],\n edgecolor='none')\n", (14564, 14625), False, 'import matplotlib\n'), ((15089, 15103), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (15101, 15103), True, 'import pandas as pd\n'), ((15122, 15168), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)'], {'figsize': 'figsize', 'dpi': 'self.dpi'}), '(1, figsize=figsize, dpi=self.dpi)\n', (15134, 15168), True, 'import matplotlib.pyplot as plt\n'), ((15581, 15608), 'numpy.array', 'np.array', (['importance_counts'], {}), '(importance_counts)\n', (15589, 15608), True, 'import numpy as np\n'), ((16067, 16329), 'collections.OrderedDict', 'OrderedDict', (["{'selected': {'color': '#ff4949', 'marker': 'o', 's': 12}, 'intersection':\n {'color': '#ff4949', 'marker': 'x', 's': 12}, '34-residue': {'color':\n '#adb5bd', 'marker': 'x', 's': 12}, 'others': {'color': '#adb5bd',\n 'marker': 'o', 's': 12}}"], {}), "({'selected': {'color': '#ff4949', 'marker': 'o', 's': 12},\n 'intersection': {'color': '#ff4949', 'marker': 'x', 's': 12},\n '34-residue': {'color': '#adb5bd', 'marker': 'x', 's': 12}, 'others': {\n 'color': '#adb5bd', 'marker': 'o', 's': 12}})\n", (16078, 16329), False, 'from collections import OrderedDict\n'), ((16488, 16589), 'seaborn.regplot', 'sns.regplot', ([], {'x': '"""importance"""', 'y': '"""polymorphism"""', 'data': 'df', 'ax': 'ax', 'fit_reg': '(True)', 'scatter_kws': "{'s': 0}"}), "(x='importance', y='polymorphism', data=df, ax=ax, fit_reg=True,\n scatter_kws={'s': 0})\n", (16499, 16589), True, 'import seaborn as sns\n'), ((18173, 18219), 'scipy.stats.pearsonr', 'pearsonr', (["df['importance']", "df['polymorphism']"], {}), "(df['importance'], df['polymorphism'])\n", (18181, 18219), False, 'from scipy.stats import pearsonr\n'), ((18733, 18843), 'logomaker.alignment_to_matrix', 'lm.alignment_to_matrix', ([], {'sequences': 'peptides', 'to_type': '"""probability"""', 'characters_to_ignore': '"""."""', 'pseudocount': '(0)'}), "(sequences=peptides, to_type='probability',\n characters_to_ignore='.', pseudocount=0)\n", (18755, 18843), True, 'import logomaker as lm\n'), ((19332, 19381), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(1)'], {'figsize': 'figsize', 'dpi': 'self.dpi'}), '(1, 1, figsize=figsize, dpi=self.dpi)\n', (19344, 19381), True, 'import matplotlib.pyplot as plt\n'), ((19390, 19479), 'seaborn.scatterplot', 'sns.scatterplot', ([], {'data': 'df', 'x': '"""mask_score"""', 'y': '"""polymorphism"""', 'hue': '"""residue_tag"""', 'ax': 'ax'}), "(data=df, x='mask_score', y='polymorphism', hue=\n 'residue_tag', ax=ax)\n", (19405, 19479), True, 'import seaborn as sns\n'), ((20184, 20230), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)'], {'figsize': 'figsize', 'dpi': 'self.dpi'}), '(1, figsize=figsize, dpi=self.dpi)\n', (20196, 20230), True, 'import matplotlib.pyplot as plt\n'), ((21120, 21133), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (21131, 21133), False, 'from collections import OrderedDict\n'), ((21156, 21175), 'os.listdir', 'os.listdir', (['dirname'], {}), '(dirname)\n', (21166, 21175), False, 'import os, sys, re, json, random, importlib\n'), ((21848, 21861), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (21859, 21861), False, 'from collections import OrderedDict\n'), ((22641, 22669), 'numpy.array', 'np.array', (['([0] * self.mhc_len)'], {}), '([0] * self.mhc_len)\n', (22649, 22669), True, 'import numpy as np\n'), ((23092, 23198), 'logomaker.alignment_to_matrix', 'lm.alignment_to_matrix', ([], {'sequences': 'seqs', 'to_type': '"""probability"""', 'characters_to_ignore': '"""."""', 'pseudocount': '(0)'}), "(sequences=seqs, to_type='probability',\n characters_to_ignore='.', pseudocount=0)\n", (23114, 23198), True, 'import logomaker as lm\n'), ((23257, 23297), 'pandas.concat', 'pd.concat', (['[temp_df, seqlogo_df]'], {'axis': '(0)'}), '([temp_df, seqlogo_df], axis=0)\n', (23266, 23297), True, 'import pandas as pd\n'), ((24527, 24558), 're.match', 're.match', (['"""[ABC][0-9]+"""', 'allele'], {}), "('[ABC][0-9]+', allele)\n", (24535, 24558), False, 'import os, sys, re, json, random, importlib\n'), ((8886, 8960), 'scipy.cluster.hierarchy.linkage', 'hierarchy.linkage', (['df'], {'method': 'method', 'metric': 'metric', 'optimal_ordering': '(True)'}), '(df, method=method, metric=metric, optimal_ordering=True)\n', (8903, 8960), False, 'from scipy.cluster import hierarchy\n'), ((9007, 9083), 'scipy.cluster.hierarchy.linkage', 'hierarchy.linkage', (['df.T'], {'method': 'method', 'metric': 'metric', 'optimal_ordering': '(True)'}), '(df.T, method=method, metric=metric, optimal_ordering=True)\n', (9024, 9083), False, 'from scipy.cluster import hierarchy\n'), ((10706, 10761), 'matplotlib.pyplot.savefig', 'plt.savefig', (['figfile'], {'bbox_inches': '"""tight"""', 'dpi': 'self.dpi'}), "(figfile, bbox_inches='tight', dpi=self.dpi)\n", (10717, 10761), True, 'import matplotlib.pyplot as plt\n'), ((11177, 11238), 'logomaker.Logo', 'lm.Logo', (['seqlogo_df'], {'ax': 'ax[i]', 'color_scheme': '"""skylign_protein"""'}), "(seqlogo_df, ax=ax[i], color_scheme='skylign_protein')\n", (11184, 11238), True, 'import logomaker as lm\n'), ((16650, 16858), 'seaborn.regplot', 'sns.regplot', ([], {'x': '"""importance"""', 'y': '"""polymorphism"""', 'data': "df[df['label'] == label]", 'ax': 'ax', 'fit_reg': '(False)', 'marker': "params['marker']", 'scatter_kws': "{'color': params['color'], 's': params['s'], 'linewidths': 0.1}"}), "(x='importance', y='polymorphism', data=df[df['label'] == label],\n ax=ax, fit_reg=False, marker=params['marker'], scatter_kws={'color':\n params['color'], 's': params['s'], 'linewidths': 0.1})\n", (16661, 16858), True, 'import seaborn as sns\n'), ((17359, 17433), 'matplotlib.patches.Rectangle', 'matplotlib.patches.Rectangle', (['(0, 0)', '(1)', '(1)'], {'fc': '"""#ff4949"""', 'edgecolor': '"""none"""'}), "((0, 0), 1, 1, fc='#ff4949', edgecolor='none')\n", (17387, 17433), False, 'import matplotlib\n'), ((17454, 17528), 'matplotlib.patches.Rectangle', 'matplotlib.patches.Rectangle', (['(0, 0)', '(1)', '(1)'], {'fc': '"""#adb5bd"""', 'edgecolor': '"""none"""'}), "((0, 0), 1, 1, fc='#adb5bd', edgecolor='none')\n", (17482, 17528), False, 'import matplotlib\n'), ((17549, 17601), 'matplotlib.pyplot.scatter', 'plt.scatter', (['[]', '[]'], {'color': '"""black"""', 'marker': '"""x"""', 's': '(12)'}), "([], [], color='black', marker='x', s=12)\n", (17560, 17601), True, 'import matplotlib.pyplot as plt\n'), ((17626, 17678), 'matplotlib.pyplot.scatter', 'plt.scatter', (['[]', '[]'], {'color': '"""black"""', 'marker': '"""o"""', 's': '(12)'}), "([], [], color='black', marker='o', s=12)\n", (17637, 17678), True, 'import matplotlib.pyplot as plt\n'), ((20501, 20571), 'matplotlib.patches.Rectangle', 'matplotlib.patches.Rectangle', (['(0, 0)', '(1)', '(1)'], {'fc': 'color', 'edgecolor': '"""none"""'}), "((0, 0), 1, 1, fc=color, edgecolor='none')\n", (20529, 20571), False, 'import matplotlib\n'), ((21192, 21223), 're.match', 're.match', (['"""[ABC][0-9]+"""', 'allele'], {}), "('[ABC][0-9]+', allele)\n", (21200, 21223), False, 'import os, sys, re, json, random, importlib\n'), ((24644, 24687), 're.match', 're.match', (['"""[ABC]\\\\*[0-9]+\\\\:[0-9]+"""', 'allele'], {}), "('[ABC]\\\\*[0-9]+\\\\:[0-9]+', allele)\n", (24652, 24687), False, 'import os, sys, re, json, random, importlib\n'), ((22271, 22363), 'logomaker.alignment_to_matrix', 'lm.alignment_to_matrix', ([], {'sequences': 'seqs', 'to_type': '"""information"""', 'characters_to_ignore': '"""XU"""'}), "(sequences=seqs, to_type='information',\n characters_to_ignore='XU')\n", (22293, 22363), True, 'import logomaker as lm\n'), ((22386, 22426), 'pandas.concat', 'pd.concat', (['[temp_df, seqlogo_df]'], {'axis': '(0)'}), '([temp_df, seqlogo_df], axis=0)\n', (22395, 22426), True, 'import pandas as pd\n'), ((1602, 1636), 'os.listdir', 'os.listdir', (['allele_mask_dirname[0]'], {}), '(allele_mask_dirname[0])\n', (1612, 1636), False, 'import os, sys, re, json, random, importlib\n'), ((2672, 2707), 'os.listdir', 'os.listdir', (['epitope_mask_dirname[0]'], {}), '(epitope_mask_dirname[0])\n', (2682, 2707), False, 'import os, sys, re, json, random, importlib\n'), ((21249, 21303), 'os.path.isfile', 'os.path.isfile', (["('%s/%s/record.npy' % (dirname, allele))"], {}), "('%s/%s/record.npy' % (dirname, allele))\n", (21263, 21303), False, 'import os, sys, re, json, random, importlib\n'), ((15249, 15276), 'numpy.log', 'np.log', (['self.mhc_seqlogo_df'], {}), '(self.mhc_seqlogo_df)\n', (15255, 15276), True, 'import numpy as np\n'), ((23794, 23843), 'numpy.where', 'np.where', (['(importance_count > importance_threshold)'], {}), '(importance_count > importance_threshold)\n', (23802, 23843), True, 'import numpy as np\n'), ((18920, 18938), 'numpy.log', 'np.log', (['seqlogo_df'], {}), '(seqlogo_df)\n', (18926, 18938), True, 'import numpy as np\n'), ((21451, 21517), 'numpy.load', 'np.load', (["('%s/%s/record.npy' % (dirname, allele))"], {'allow_pickle': '(True)'}), "('%s/%s/record.npy' % (dirname, allele), allow_pickle=True)\n", (21458, 21517), True, 'import numpy as np\n'), ((21648, 21714), 'numpy.load', 'np.load', (["('%s/%s/record.npy' % (dirname, allele))"], {'allow_pickle': '(True)'}), "('%s/%s/record.npy' % (dirname, allele), allow_pickle=True)\n", (21655, 21714), True, 'import numpy as np\n')] |
from flask_script import Manager, Server
from flask_migrate import Migrate, MigrateCommand
from app import create_app, db
from app.models import User, Article, Category, Comment, Quote
app = create_app('development')
manager = Manager(app)
migrate= Migrate(app, db)
manager.add_command('db', MigrateCommand)
manager.add_command('server', Server)
@manager.shell
def make_shell_context():
return dict(app = app, db = db, User= User, Article= Article, Category= Category, Comment= Comment, Quote= Quote)
if __name__ == '__main__':
manager.run() | [
"flask_script.Manager",
"flask_migrate.Migrate",
"app.create_app"
] | [((193, 218), 'app.create_app', 'create_app', (['"""development"""'], {}), "('development')\n", (203, 218), False, 'from app import create_app, db\n'), ((230, 242), 'flask_script.Manager', 'Manager', (['app'], {}), '(app)\n', (237, 242), False, 'from flask_script import Manager, Server\n'), ((252, 268), 'flask_migrate.Migrate', 'Migrate', (['app', 'db'], {}), '(app, db)\n', (259, 268), False, 'from flask_migrate import Migrate, MigrateCommand\n')] |
from django.urls import path
from . import views
urlpatterns = [
path("", views.home, name="home"),
path("faq/", views.faq, name="faq"),
path("plagiarism_policy/", views.plagiarism_policy,
name="plagiarism_policy"),
path("privacy_policy/", views.privacy_policy, name="privacy_policy"),
path("post_login/", views.index, name="post_login"),
path("save_partnership_contact_form/", views.save_partnership_contact_form,
name="save_partnership_contact_form"),
path("500/", views.test_500),
path("404/", views.test_404),
]
| [
"django.urls.path"
] | [((70, 103), 'django.urls.path', 'path', (['""""""', 'views.home'], {'name': '"""home"""'}), "('', views.home, name='home')\n", (74, 103), False, 'from django.urls import path\n'), ((109, 144), 'django.urls.path', 'path', (['"""faq/"""', 'views.faq'], {'name': '"""faq"""'}), "('faq/', views.faq, name='faq')\n", (113, 144), False, 'from django.urls import path\n'), ((150, 227), 'django.urls.path', 'path', (['"""plagiarism_policy/"""', 'views.plagiarism_policy'], {'name': '"""plagiarism_policy"""'}), "('plagiarism_policy/', views.plagiarism_policy, name='plagiarism_policy')\n", (154, 227), False, 'from django.urls import path\n'), ((242, 310), 'django.urls.path', 'path', (['"""privacy_policy/"""', 'views.privacy_policy'], {'name': '"""privacy_policy"""'}), "('privacy_policy/', views.privacy_policy, name='privacy_policy')\n", (246, 310), False, 'from django.urls import path\n'), ((316, 367), 'django.urls.path', 'path', (['"""post_login/"""', 'views.index'], {'name': '"""post_login"""'}), "('post_login/', views.index, name='post_login')\n", (320, 367), False, 'from django.urls import path\n'), ((374, 491), 'django.urls.path', 'path', (['"""save_partnership_contact_form/"""', 'views.save_partnership_contact_form'], {'name': '"""save_partnership_contact_form"""'}), "('save_partnership_contact_form/', views.save_partnership_contact_form,\n name='save_partnership_contact_form')\n", (378, 491), False, 'from django.urls import path\n'), ((502, 530), 'django.urls.path', 'path', (['"""500/"""', 'views.test_500'], {}), "('500/', views.test_500)\n", (506, 530), False, 'from django.urls import path\n'), ((536, 564), 'django.urls.path', 'path', (['"""404/"""', 'views.test_404'], {}), "('404/', views.test_404)\n", (540, 564), False, 'from django.urls import path\n')] |
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from stevedore import extension
from heat.engine import clients
from heat.engine import environment
from heat.engine import plugin_manager
def _register_resources(env, type_pairs):
for res_name, res_class in type_pairs:
env.register_class(res_name, res_class)
def _register_constraints(env, type_pairs):
for constraint_name, constraint in type_pairs:
env.register_constraint(constraint_name, constraint)
def _register_stack_lifecycle_plugins(env, type_pairs):
for stack_lifecycle_name, stack_lifecycle_class in type_pairs:
env.register_stack_lifecycle_plugin(stack_lifecycle_name,
stack_lifecycle_class)
def _get_mapping(namespace):
mgr = extension.ExtensionManager(
namespace=namespace,
invoke_on_load=False)
return [[name, mgr[name].plugin] for name in mgr.names()]
_environment = None
def global_env():
if _environment is None:
initialise()
return _environment
def initialise():
global _environment
if _environment is not None:
return
clients.initialise()
global_env = environment.Environment({}, user_env=False)
_load_global_environment(global_env)
_environment = global_env
def _load_global_environment(env):
_load_global_resources(env)
environment.read_global_environment(env)
def _load_global_resources(env):
_register_constraints(env, _get_mapping('heat.constraints'))
_register_stack_lifecycle_plugins(
env,
_get_mapping('heat.stack_lifecycle_plugins'))
manager = plugin_manager.PluginManager(__name__)
# Sometimes resources should not be available for registration in Heat due
# to unsatisfied dependencies. We look first for the function
# 'available_resource_mapping', which should return the filtered resources.
# If it is not found, we look for the legacy 'resource_mapping'.
resource_mapping = plugin_manager.PluginMapping(['available_resource',
'resource'])
constraint_mapping = plugin_manager.PluginMapping('constraint')
_register_resources(env, resource_mapping.load_all(manager))
_register_constraints(env, constraint_mapping.load_all(manager))
def list_opts():
from heat.engine.resources.aws.lb import loadbalancer
yield None, loadbalancer.loadbalancer_opts
| [
"heat.engine.plugin_manager.PluginManager",
"stevedore.extension.ExtensionManager",
"heat.engine.clients.initialise",
"heat.engine.environment.Environment",
"heat.engine.environment.read_global_environment",
"heat.engine.plugin_manager.PluginMapping"
] | [((1307, 1376), 'stevedore.extension.ExtensionManager', 'extension.ExtensionManager', ([], {'namespace': 'namespace', 'invoke_on_load': '(False)'}), '(namespace=namespace, invoke_on_load=False)\n', (1333, 1376), False, 'from stevedore import extension\n'), ((1669, 1689), 'heat.engine.clients.initialise', 'clients.initialise', ([], {}), '()\n', (1687, 1689), False, 'from heat.engine import clients\n'), ((1708, 1751), 'heat.engine.environment.Environment', 'environment.Environment', (['{}'], {'user_env': '(False)'}), '({}, user_env=False)\n', (1731, 1751), False, 'from heat.engine import environment\n'), ((1896, 1936), 'heat.engine.environment.read_global_environment', 'environment.read_global_environment', (['env'], {}), '(env)\n', (1931, 1936), False, 'from heat.engine import environment\n'), ((2158, 2196), 'heat.engine.plugin_manager.PluginManager', 'plugin_manager.PluginManager', (['__name__'], {}), '(__name__)\n', (2186, 2196), False, 'from heat.engine import plugin_manager\n'), ((2514, 2578), 'heat.engine.plugin_manager.PluginMapping', 'plugin_manager.PluginMapping', (["['available_resource', 'resource']"], {}), "(['available_resource', 'resource'])\n", (2542, 2578), False, 'from heat.engine import plugin_manager\n'), ((2657, 2699), 'heat.engine.plugin_manager.PluginMapping', 'plugin_manager.PluginMapping', (['"""constraint"""'], {}), "('constraint')\n", (2685, 2699), False, 'from heat.engine import plugin_manager\n')] |
import os
from datetime import datetime
directory = "../runs"
current = os.path.join(directory, ".current")
class Run:
def __init__(self, runName):
run = os.path.join(directory, runName)
self.model = os.path.join(run, "model.h5")
self.log = os.path.join(run, "log.csv")
self.accuracy = os.path.join(run, "accuracy.png")
self.modelDiagram = os.path.join(run, "model.png")
self.modelSummary = os.path.join(run, "modelSummary")
def make():
runName = f"{datetime.now():%m-%d_%H%M}"
newRun = os.path.join(directory, runName)
os.mkdir(newRun)
with open(current, 'w') as f:
f.write(runName)
def loadFromName(runName):
return Run(runName)
def loadCurrent():
with open(current) as f:
return loadFromName(f.readline())
def has(path):
return os.path.isfile(path)
| [
"os.path.isfile",
"datetime.datetime.now",
"os.path.join",
"os.mkdir"
] | [((73, 108), 'os.path.join', 'os.path.join', (['directory', '""".current"""'], {}), "(directory, '.current')\n", (85, 108), False, 'import os\n'), ((550, 582), 'os.path.join', 'os.path.join', (['directory', 'runName'], {}), '(directory, runName)\n', (562, 582), False, 'import os\n'), ((587, 603), 'os.mkdir', 'os.mkdir', (['newRun'], {}), '(newRun)\n', (595, 603), False, 'import os\n'), ((837, 857), 'os.path.isfile', 'os.path.isfile', (['path'], {}), '(path)\n', (851, 857), False, 'import os\n'), ((168, 200), 'os.path.join', 'os.path.join', (['directory', 'runName'], {}), '(directory, runName)\n', (180, 200), False, 'import os\n'), ((222, 251), 'os.path.join', 'os.path.join', (['run', '"""model.h5"""'], {}), "(run, 'model.h5')\n", (234, 251), False, 'import os\n'), ((271, 299), 'os.path.join', 'os.path.join', (['run', '"""log.csv"""'], {}), "(run, 'log.csv')\n", (283, 299), False, 'import os\n'), ((324, 357), 'os.path.join', 'os.path.join', (['run', '"""accuracy.png"""'], {}), "(run, 'accuracy.png')\n", (336, 357), False, 'import os\n'), ((386, 416), 'os.path.join', 'os.path.join', (['run', '"""model.png"""'], {}), "(run, 'model.png')\n", (398, 416), False, 'import os\n'), ((445, 478), 'os.path.join', 'os.path.join', (['run', '"""modelSummary"""'], {}), "(run, 'modelSummary')\n", (457, 478), False, 'import os\n'), ((509, 523), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (521, 523), False, 'from datetime import datetime\n')] |
from utils import *
import torch
import sys
import numpy as np
import time
import torchvision
from torch.autograd import Variable
import torchvision.transforms as transforms
import torchvision.datasets as datasets
def validate_pgd(val_loader, model, criterion, K, step, configs, logger, save_image=False, HE=False):
# Mean/Std for normalization
mean = torch.Tensor(np.array(configs.TRAIN.mean)[:, np.newaxis, np.newaxis])
mean = mean.expand(3,configs.DATA.crop_size, configs.DATA.crop_size).cuda()
std = torch.Tensor(np.array(configs.TRAIN.std)[:, np.newaxis, np.newaxis])
std = std.expand(3, configs.DATA.crop_size, configs.DATA.crop_size).cuda()
# Initiate the meters
batch_time = AverageMeter()
losses = AverageMeter()
top1 = AverageMeter()
top5 = AverageMeter()
eps = configs.ADV.clip_eps
model.eval()
end = time.time()
logger.info(pad_str(' PGD eps: {}, K: {}, step: {} '.format(eps, K, step)))
if HE == True:
is_HE = '_HE'
else:
is_HE = ''
if configs.pretrained:
is_HE = '_pretrained'
for i, (input, target) in enumerate(val_loader):
input = input.cuda(non_blocking=True)
target = target.cuda(non_blocking=True)
#save original images
if save_image == True and i < 2:
original_images_save = input.clone()
for o in range(input.size(0)):
torchvision.utils.save_image(original_images_save[o, :, :, :], 'saved_images/original_images'+is_HE+'/{}.png'.format(o + configs.DATA.batch_size*i))
randn = torch.FloatTensor(input.size()).uniform_(-eps, eps).cuda()
input += randn
input.clamp_(0, 1.0)
orig_input = input.clone()
for _ in range(K):
invar = Variable(input, requires_grad=True)
in1 = invar - mean
in1.div_(std)
output = model(in1)
ascend_loss = criterion(output, target)
ascend_grad = torch.autograd.grad(ascend_loss, invar)[0]
pert = fgsm(ascend_grad, step)
# Apply purturbation
input += pert.data
input = torch.max(orig_input-eps, input)
input = torch.min(orig_input+eps, input)
input.clamp_(0, 1.0)
#save adv images
if save_image == True and i < 2:
adv_images_save = input.clone()
for o in range(input.size(0)):
torchvision.utils.save_image(adv_images_save[o, :, :, :], 'saved_images/adv_images'+is_HE+'/{}.png'.format(o + configs.DATA.batch_size*i))
#save scaled perturbation
perturbation = input - orig_input
perturbation.clamp_(-eps,eps)
scaled_perturbation = (perturbation.clone() + eps) / (2 * eps)
scaled_perturbation.clamp_(0, 1.0)
if save_image == True and i < 2:
for o in range(input.size(0)):
torchvision.utils.save_image(scaled_perturbation[o, :, :, :], 'saved_images/scaled_perturbation'+is_HE+'/{}.png'.format(o + configs.DATA.batch_size*i))
input.sub_(mean).div_(std)
with torch.no_grad():
# compute output
output = model(input)
loss = criterion(output, target)
# measure accuracy and record loss
prec1, prec5 = accuracy(output, target, topk=(1, 5))
losses.update(loss.item(), input.size(0))
top1.update(prec1[0], input.size(0))
top5.update(prec5[0], input.size(0))
# measure elapsed time
batch_time.update(time.time() - end)
end = time.time()
if i % configs.TRAIN.print_freq == 0:
print('PGD Test: [{0}/{1}]\t'
'Time {batch_time.val:.3f} ({batch_time.avg:.3f})\t'
'Loss {loss.val:.4f} ({loss.avg:.4f})\t'
'Prec@1 {top1.val:.3f} ({top1.avg:.3f})\t'
'Prec@5 {top5.val:.3f} ({top5.avg:.3f})'.format(
i, len(val_loader), batch_time=batch_time, loss=losses,
top1=top1, top5=top5))
sys.stdout.flush()
print(' PGD Final Prec@1 {top1.avg:.3f} Prec@5 {top5.avg:.3f}'
.format(top1=top1, top5=top5))
return top1.avg
def validate(val_loader, model, criterion, configs, logger):
# Mean/Std for normalization
mean = torch.Tensor(np.array(configs.TRAIN.mean)[:, np.newaxis, np.newaxis])
mean = mean.expand(3,configs.DATA.crop_size, configs.DATA.crop_size).cuda()
std = torch.Tensor(np.array(configs.TRAIN.std)[:, np.newaxis, np.newaxis])
std = std.expand(3, configs.DATA.crop_size, configs.DATA.crop_size).cuda()
# Initiate the meters
batch_time = AverageMeter()
losses = AverageMeter()
top1 = AverageMeter()
top5 = AverageMeter()
# switch to evaluate mode
model.eval()
end = time.time()
for i, (input, target) in enumerate(val_loader):
with torch.no_grad():
input = input.cuda(non_blocking=True)
target = target.cuda(non_blocking=True)
# compute output
input = input - mean
input.div_(std)
output = model(input)
loss = criterion(output, target)
# measure accuracy and record loss
prec1, prec5 = accuracy(output, target, topk=(1, 5))
losses.update(loss.item(), input.size(0))
top1.update(prec1[0], input.size(0))
top5.update(prec5[0], input.size(0))
# measure elapsed time
batch_time.update(time.time() - end)
end = time.time()
if i % configs.TRAIN.print_freq == 0:
print('Test: [{0}/{1}]\t'
'Time {batch_time.val:.3f} ({batch_time.avg:.3f})\t'
'Loss {loss.val:.4f} ({loss.avg:.4f})\t'
'Prec@1 {top1.val:.3f} ({top1.avg:.3f})\t'
'Prec@5 {top5.val:.3f} ({top5.avg:.3f})'.format(
i, len(val_loader), batch_time=batch_time, loss=losses,
top1=top1, top5=top5))
sys.stdout.flush()
print(' Final Prec@1 {top1.avg:.3f} Prec@5 {top5.avg:.3f}'
.format(top1=top1, top5=top5))
return top1.avg
def validate_ImagetNet_C(val_loader_name, model, criterion, configs, logger):
# Mean/Std for normalization
mean = torch.Tensor(np.array(configs.TRAIN.mean)[:, np.newaxis, np.newaxis])
mean = mean.expand(3,configs.DATA.crop_size, configs.DATA.crop_size).cuda()
std = torch.Tensor(np.array(configs.TRAIN.std)[:, np.newaxis, np.newaxis])
std = std.expand(3, configs.DATA.crop_size, configs.DATA.crop_size).cuda()
# switch to evaluate mode
model.eval()
fil_index = ['/1','/2','/3','/4','/5']
avg_return = 0
for f in fil_index:
valdir = os.path.join(configs.data, val_loader_name+f)
print(' File: ', valdir)
val_loader = torch.utils.data.DataLoader(
datasets.ImageFolder(valdir, transforms.Compose([
transforms.Resize(configs.DATA.img_size),
transforms.CenterCrop(configs.DATA.crop_size),
transforms.ToTensor(),
])),
batch_size=configs.DATA.batch_size, shuffle=False,
num_workers=configs.DATA.workers, pin_memory=True)
# Initiate the meters
top1 = AverageMeter()
end = time.time()
for i, (input, target) in enumerate(val_loader):
with torch.no_grad():
input = input.cuda(non_blocking=True)
target = target.cuda(non_blocking=True)
# compute output
input = input - mean
input.div_(std)
output = model(input)
# measure accuracy and record loss
prec1,_ = accuracy(output, target, topk=(1,2))
top1.update(prec1[0], input.size(0))
# if i % configs.TRAIN.print_freq == 0:
# print('PGD Test: [{0}/{1}]\t'
# 'Prec@1 {top1.val:.3f} ({top1.avg:.3f})'.format(
# i, len(val_loader),top1=top1))
# print('Time: ', time.time() - end)
# sys.stdout.flush()
print('Prec: ',top1.avg.cpu().item())
avg_return += top1.avg.cpu().item()
print('Avergae Classification Accuracy is: ', avg_return / 5.)
return
| [
"torchvision.transforms.CenterCrop",
"torchvision.transforms.ToTensor",
"torch.max",
"torch.min",
"numpy.array",
"torch.autograd.grad",
"torchvision.transforms.Resize",
"torch.no_grad",
"sys.stdout.flush",
"torch.autograd.Variable",
"time.time"
] | [((873, 884), 'time.time', 'time.time', ([], {}), '()\n', (882, 884), False, 'import time\n'), ((4940, 4951), 'time.time', 'time.time', ([], {}), '()\n', (4949, 4951), False, 'import time\n'), ((7509, 7520), 'time.time', 'time.time', ([], {}), '()\n', (7518, 7520), False, 'import time\n'), ((377, 405), 'numpy.array', 'np.array', (['configs.TRAIN.mean'], {}), '(configs.TRAIN.mean)\n', (385, 405), True, 'import numpy as np\n'), ((537, 564), 'numpy.array', 'np.array', (['configs.TRAIN.std'], {}), '(configs.TRAIN.std)\n', (545, 564), True, 'import numpy as np\n'), ((1808, 1843), 'torch.autograd.Variable', 'Variable', (['input'], {'requires_grad': '(True)'}), '(input, requires_grad=True)\n', (1816, 1843), False, 'from torch.autograd import Variable\n'), ((2181, 2215), 'torch.max', 'torch.max', (['(orig_input - eps)', 'input'], {}), '(orig_input - eps, input)\n', (2190, 2215), False, 'import torch\n'), ((2234, 2268), 'torch.min', 'torch.min', (['(orig_input + eps)', 'input'], {}), '(orig_input + eps, input)\n', (2243, 2268), False, 'import torch\n'), ((3159, 3174), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (3172, 3174), False, 'import torch\n'), ((3652, 3663), 'time.time', 'time.time', ([], {}), '()\n', (3661, 3663), False, 'import time\n'), ((4445, 4473), 'numpy.array', 'np.array', (['configs.TRAIN.mean'], {}), '(configs.TRAIN.mean)\n', (4453, 4473), True, 'import numpy as np\n'), ((4605, 4632), 'numpy.array', 'np.array', (['configs.TRAIN.std'], {}), '(configs.TRAIN.std)\n', (4613, 4632), True, 'import numpy as np\n'), ((5018, 5033), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (5031, 5033), False, 'import torch\n'), ((5675, 5686), 'time.time', 'time.time', ([], {}), '()\n', (5684, 5686), False, 'import time\n'), ((6481, 6509), 'numpy.array', 'np.array', (['configs.TRAIN.mean'], {}), '(configs.TRAIN.mean)\n', (6489, 6509), True, 'import numpy as np\n'), ((6641, 6668), 'numpy.array', 'np.array', (['configs.TRAIN.std'], {}), '(configs.TRAIN.std)\n', (6649, 6668), True, 'import numpy as np\n'), ((2011, 2050), 'torch.autograd.grad', 'torch.autograd.grad', (['ascend_loss', 'invar'], {}), '(ascend_loss, invar)\n', (2030, 2050), False, 'import torch\n'), ((4176, 4194), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (4192, 4194), False, 'import sys\n'), ((6195, 6213), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (6211, 6213), False, 'import sys\n'), ((7595, 7610), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (7608, 7610), False, 'import torch\n'), ((3615, 3626), 'time.time', 'time.time', ([], {}), '()\n', (3624, 3626), False, 'import time\n'), ((5638, 5649), 'time.time', 'time.time', ([], {}), '()\n', (5647, 5649), False, 'import time\n'), ((7146, 7186), 'torchvision.transforms.Resize', 'transforms.Resize', (['configs.DATA.img_size'], {}), '(configs.DATA.img_size)\n', (7163, 7186), True, 'import torchvision.transforms as transforms\n'), ((7204, 7249), 'torchvision.transforms.CenterCrop', 'transforms.CenterCrop', (['configs.DATA.crop_size'], {}), '(configs.DATA.crop_size)\n', (7225, 7249), True, 'import torchvision.transforms as transforms\n'), ((7267, 7288), 'torchvision.transforms.ToTensor', 'transforms.ToTensor', ([], {}), '()\n', (7286, 7288), True, 'import torchvision.transforms as transforms\n')] |
import numpy as np
from numexpr_kernel import numexpr_kernel
from numba_kernel import numba_kernel
N = 10000
x = np.random.rand(N)
y = np.random.rand(N)
z = np.random.rand(N)
tau = np.random.rand(N)
r1 = numexpr_kernel(x, y, z, tau)
r1 = numexpr_kernel(x, y, z, tau)
r2 = np.zeros(N, dtype=float)
numba_kernel(x, y, z, tau, r2, N)
numba_kernel(x, y, z, tau, r2, N)
| [
"numexpr_kernel.numexpr_kernel",
"numpy.zeros",
"numba_kernel.numba_kernel",
"numpy.random.rand"
] | [((114, 131), 'numpy.random.rand', 'np.random.rand', (['N'], {}), '(N)\n', (128, 131), True, 'import numpy as np\n'), ((136, 153), 'numpy.random.rand', 'np.random.rand', (['N'], {}), '(N)\n', (150, 153), True, 'import numpy as np\n'), ((158, 175), 'numpy.random.rand', 'np.random.rand', (['N'], {}), '(N)\n', (172, 175), True, 'import numpy as np\n'), ((182, 199), 'numpy.random.rand', 'np.random.rand', (['N'], {}), '(N)\n', (196, 199), True, 'import numpy as np\n'), ((206, 234), 'numexpr_kernel.numexpr_kernel', 'numexpr_kernel', (['x', 'y', 'z', 'tau'], {}), '(x, y, z, tau)\n', (220, 234), False, 'from numexpr_kernel import numexpr_kernel\n'), ((240, 268), 'numexpr_kernel.numexpr_kernel', 'numexpr_kernel', (['x', 'y', 'z', 'tau'], {}), '(x, y, z, tau)\n', (254, 268), False, 'from numexpr_kernel import numexpr_kernel\n'), ((274, 298), 'numpy.zeros', 'np.zeros', (['N'], {'dtype': 'float'}), '(N, dtype=float)\n', (282, 298), True, 'import numpy as np\n'), ((299, 332), 'numba_kernel.numba_kernel', 'numba_kernel', (['x', 'y', 'z', 'tau', 'r2', 'N'], {}), '(x, y, z, tau, r2, N)\n', (311, 332), False, 'from numba_kernel import numba_kernel\n'), ((333, 366), 'numba_kernel.numba_kernel', 'numba_kernel', (['x', 'y', 'z', 'tau', 'r2', 'N'], {}), '(x, y, z, tau, r2, N)\n', (345, 366), False, 'from numba_kernel import numba_kernel\n')] |
"""
This module give the classification results for test data using SVM with RBF
kernel.
Email: <EMAIL>
Dtd: 2 - August - 2020
Parameters
----------
classification_type : string
DESCRIPTION - classification_type == "binary_class" loads binary classification artificial data.
classification_type == "multi_class" loads multiclass artificial data
folder_name : string
DESCRIPTION - the name of the folder to store results. For eg., if
folder_name = "hnb", then this function will create two folder "hnb-svm"
and "hnb-svm_rbf" to save the classification report.
target_names : array, 1D, string
DESCRIPTION - if there are two classes, then target_names = ['class-0', class-1]
Note- At the present version of the code, the results for binary classification
and five class classification will be saved.
Returns : None
-------
Computes the accuracy_svm_rbf, fscore_svm_rbf
"""
import os
import numpy as np
from sklearn.metrics import f1_score, accuracy_score
from sklearn import svm
from sklearn.metrics import confusion_matrix as cm
from sklearn.metrics import classification_report
from load_data_synthetic import get_data
#from Codes import classification_report_csv_
classification_type = "concentric_circle_noise"
folder_name = "full-testdata"
target_names = ['class-0', 'class-1']
path = os.getcwd()
result_path_svm_rbf = path + '/NEUROCHAOS-RESULTS/' + classification_type + '/' + folder_name +'-svm_rbf/'
# Creating Folder to save the results
try:
os.makedirs(result_path_svm_rbf)
except OSError:
print("Creation of the result directory %s failed" % result_path_svm_rbf)
else:
print("Successfully created the result directory %s" % result_path_svm_rbf)
full_artificial_data, full_artificial_label, full_artificial_test_data, full_artificial_test_label = get_data(classification_type)
num_classes = len(np.unique(full_artificial_label)) # Number of classes
print("**** Genome data details ******")
for class_label in range(np.max(full_artificial_label)+1):
print("Total Data instance in Class -", class_label, " = ", full_artificial_label.tolist().count([class_label]))
print(" train data = ", (full_artificial_data.shape[0]))
print("val data = ", (full_artificial_test_data.shape[0]))
# Start of svm_rbf classifier
svm_rbf_classifier = svm.SVC(kernel='rbf', gamma='scale')
svm_rbf_classifier.fit(full_artificial_data, full_artificial_label[:, 0])
predicted_svm_rbf_val_label = svm_rbf_classifier.predict(full_artificial_test_data)
acc_svm_rbf = accuracy_score(full_artificial_test_label, predicted_svm_rbf_val_label)*100
f1score_svm_rbf = f1_score(full_artificial_test_label, predicted_svm_rbf_val_label, average="macro")
report_svm_rbf = classification_report(full_artificial_test_label, predicted_svm_rbf_val_label, target_names=target_names)
# Saving the classification report to csv file for svm_rbf classifier.
print(report_svm_rbf)
#classification_report_csv_(report_svm_rbf, num_classes).to_csv(result_path_svm_rbf+'svm_rbf_report_'+ str(iterations) +'.csv', index=False)
confusion_matrix_svm_rbf = cm(full_artificial_test_label, predicted_svm_rbf_val_label)
print("Confusion matrixfor svm_rbf\n", confusion_matrix_svm_rbf)
# End of svm_rbf classifier.
# saving the f1-score
np.save(result_path_svm_rbf + 'f1score.npy', f1score_svm_rbf)
| [
"sklearn.metrics.f1_score",
"sklearn.metrics.confusion_matrix",
"os.makedirs",
"numpy.unique",
"sklearn.metrics.classification_report",
"os.getcwd",
"load_data_synthetic.get_data",
"numpy.max",
"sklearn.metrics.accuracy_score",
"numpy.save",
"sklearn.svm.SVC"
] | [((1369, 1380), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (1378, 1380), False, 'import os\n'), ((1867, 1896), 'load_data_synthetic.get_data', 'get_data', (['classification_type'], {}), '(classification_type)\n', (1875, 1896), False, 'from load_data_synthetic import get_data\n'), ((2376, 2412), 'sklearn.svm.SVC', 'svm.SVC', ([], {'kernel': '"""rbf"""', 'gamma': '"""scale"""'}), "(kernel='rbf', gamma='scale')\n", (2383, 2412), False, 'from sklearn import svm\n'), ((2685, 2772), 'sklearn.metrics.f1_score', 'f1_score', (['full_artificial_test_label', 'predicted_svm_rbf_val_label'], {'average': '"""macro"""'}), "(full_artificial_test_label, predicted_svm_rbf_val_label, average=\n 'macro')\n", (2693, 2772), False, 'from sklearn.metrics import f1_score, accuracy_score\n'), ((2786, 2895), 'sklearn.metrics.classification_report', 'classification_report', (['full_artificial_test_label', 'predicted_svm_rbf_val_label'], {'target_names': 'target_names'}), '(full_artificial_test_label,\n predicted_svm_rbf_val_label, target_names=target_names)\n', (2807, 2895), False, 'from sklearn.metrics import classification_report\n'), ((3161, 3220), 'sklearn.metrics.confusion_matrix', 'cm', (['full_artificial_test_label', 'predicted_svm_rbf_val_label'], {}), '(full_artificial_test_label, predicted_svm_rbf_val_label)\n', (3163, 3220), True, 'from sklearn.metrics import confusion_matrix as cm\n'), ((3343, 3404), 'numpy.save', 'np.save', (["(result_path_svm_rbf + 'f1score.npy')", 'f1score_svm_rbf'], {}), "(result_path_svm_rbf + 'f1score.npy', f1score_svm_rbf)\n", (3350, 3404), True, 'import numpy as np\n'), ((1544, 1576), 'os.makedirs', 'os.makedirs', (['result_path_svm_rbf'], {}), '(result_path_svm_rbf)\n', (1555, 1576), False, 'import os\n'), ((1918, 1950), 'numpy.unique', 'np.unique', (['full_artificial_label'], {}), '(full_artificial_label)\n', (1927, 1950), True, 'import numpy as np\n'), ((2590, 2661), 'sklearn.metrics.accuracy_score', 'accuracy_score', (['full_artificial_test_label', 'predicted_svm_rbf_val_label'], {}), '(full_artificial_test_label, predicted_svm_rbf_val_label)\n', (2604, 2661), False, 'from sklearn.metrics import f1_score, accuracy_score\n'), ((2042, 2071), 'numpy.max', 'np.max', (['full_artificial_label'], {}), '(full_artificial_label)\n', (2048, 2071), True, 'import numpy as np\n')] |
from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
from conditional import db
from conditional.models import models, old_models as zoo
import flask_migrate
# pylint: skip-file
old_engine = None
zoo_session = None
# Takes in param of SqlAlchemy Database Connection String
def free_the_zoo(zoo_url):
confirm = str(input('Are you sure you want to clear and re-migrate the database? (y/N): ')).strip()
if confirm == 'y':
init_zoo_db(zoo_url)
if flask_migrate.current() is not None:
flask_migrate.downgrade(tag='base')
flask_migrate.upgrade()
migrate_models()
# Connect to Zookeeper
def init_zoo_db(database_url):
global old_engine, zoo_session
old_engine = create_engine(database_url, convert_unicode=True)
zoo_session = scoped_session(sessionmaker(autocommit=False,
autoflush=False,
bind=old_engine))
zoo.Base.metadata.create_all(bind=old_engine)
def id_to_committee(comm_id):
committees = [
'Evaluations',
'Financial',
'History',
'House Improvements',
'Opcomm',
'R&D',
'Social',
'Social',
'Chairman'
]
return committees[comm_id]
def get_fid(name):
from conditional.models.models import FreshmanAccount
print(name)
return FreshmanAccount.query.filter(FreshmanAccount.name == name).first().id
# Begin the Great Migration!
def migrate_models():
print("BEGIN: freshman evals")
# ==========
tech_sems = {}
freshman_evals = [
{
'username': f.username,
'evalDate': f.voteDate,
'projectStatus': f.freshProjPass,
'signaturesMissed': f.numMissedSigs,
'socialEvents': f.socEvents,
'techSems': f.techSems,
'comments': f.comments,
'result': f.result
} for f in zoo_session.query(zoo.FreshmanEval).all()]
for f in freshman_evals:
if not f['username'].startswith('f_'):
# freshman who have completed packet and have a CSH account
eval_data = models.FreshmanEvalData(f['username'], f['signaturesMissed'])
# FIXME: Zookeeper was only pass/fail for freshman project not pending
if f['projectStatus'] == 1:
eval_data.freshman_project = 'Passed'
eval_data.social_events = f['socialEvents']
eval_data.other_notes = f['comments']
eval_data.eval_date = f['evalDate']
# TODO: conditional
if f['result'] == "pass":
eval_data.freshman_eval_result = "Passed"
elif f['result'] == "fail":
eval_data.freshman_eval_result = "Failed"
else:
eval_data.freshman_eval_result = "Pending"
if f['techSems'] is not None:
t_sems = f['techSems'].split(',')
for sem in t_sems:
if sem not in tech_sems:
tech_sems[sem] = [f['username']]
else:
tech_sems[sem].append(f['username'])
db.session.add(eval_data)
else:
# freshman not yet done with packet
# TODO FIXME The FALSE dictates that they are not given onfloor
# status
account = models.FreshmanAccount(f['username'], False)
account.eval_date = f['evalDate']
if f['techSems'] is not None:
t_sems = f['techSems'].split(',')
for sem in t_sems:
if sem not in tech_sems:
tech_sems[sem] = [f['username']]
else:
tech_sems[sem].append(f['username'])
db.session.add(account)
print("tech sems")
tech_sems.pop('', None)
print(tech_sems)
for t_sem in tech_sems:
# TODO FIXME: Is there a timestamp we can migrate for seminars?
from datetime import datetime
sem = models.TechnicalSeminar(t_sem, datetime.now())
db.session.add(sem)
db.session.flush()
db.session.refresh(sem)
print(sem.__dict__)
for m in tech_sems[t_sem]:
if m.startswith("f_"):
print(sem.id)
a = models.FreshmanSeminarAttendance(get_fid(m), sem.id)
db.session.add(a)
else:
a = models.MemberSeminarAttendance(m, sem.id)
db.session.add(a)
db.session.flush()
print("END: freshman evals")
# ==========
print("BEGIN: migrate committee meeting attendance")
# ==========
c_meetings = [
(
m.meeting_date,
m.committee_id
) for m in zoo_session.query(zoo.Attendance).all()]
c_meetings = list(set(c_meetings))
c_meetings = list(filter(lambda x: x[0] is not None, c_meetings))
c_meetings.sort(key=lambda col: col[0])
com_meetings = []
for cm in c_meetings:
m = models.CommitteeMeeting(id_to_committee(cm[1]), cm[0])
if cm[0] is None:
# fuck man
continue
db.session.add(m)
db.session.flush()
db.session.refresh(m)
com_meetings.append(cm)
c_meetings = [
(
m.username,
(
m.meeting_date,
m.committee_id
)
) for m in zoo_session.query(zoo.Attendance).all()]
for cm in c_meetings:
if cm[1][0] is None:
# fuck man
continue
if cm[1][1] == 8:
continue
if cm[0].startswith('f_'):
f = models.FreshmanCommitteeAttendance(
get_fid(cm[0]),
com_meetings.index(cm[1])
)
db.session.add(f)
else:
m = models.MemberCommitteeAttendance(cm[0], com_meetings.index(cm[1]) + 1)
db.session.add(m)
db.session.flush()
print("END: migrate committee meeting attendance")
# ==========
print("BEGIN: migrate conditionals")
# ==========
condits = [
{
"uid": c.username,
"desc": c.description,
"deadline": c.deadline,
"status": c.status
} for c in zoo_session.query(zoo.Conditional).all()]
for c in condits:
condit = models.Conditional(c['uid'], c['desc'], c['deadline'])
db.session.add(condit)
print("END: migrate conditionals")
# ==========
print("BEGIN: house meetings")
h_meetings = [hm.date for hm in zoo_session.query(zoo.HouseMeeting).all()]
h_meetings = list(set(h_meetings))
h_meetings.sort()
print(h_meetings)
house_meetings = {}
for hm in h_meetings:
m = models.HouseMeeting(hm)
db.session.add(m)
db.session.flush()
db.session.refresh(m)
house_meetings[hm.strftime("%Y-%m-%d")] = m.id
print(house_meetings)
hma = [
{
'uid': hm.username,
'date': hm.date,
'present': hm.present,
'excused': hm.excused,
'comments': hm.comments
} for hm in zoo_session.query(zoo.HouseMeeting).all()]
for a in hma:
meeting_id = house_meetings[a['date'].strftime("%Y-%m-%d")]
if a['present'] == 1:
status = "Attended"
elif a['excused'] == 1:
status = "Excused"
else:
status = "Absent"
excuse = a['comments']
if a['uid'].startswith("f_"):
# freshman
fhma = models.FreshmanHouseMeetingAttendance(
get_fid(a['uid']),
meeting_id,
excuse,
status)
db.session.add(fhma)
else:
# member
mhma = models.MemberHouseMeetingAttendance(
a['uid'],
meeting_id,
excuse,
status)
db.session.add(mhma)
print("END: house meetings")
# ==========
print("BEGIN: Major Projects")
projects = [
{
'username': mp.username,
'name': mp.project_name,
'description': mp.project_description,
'status': mp.status
} for mp in zoo_session.query(zoo.MajorProject).all()]
for p in projects:
mp = models.MajorProject(
p['username'],
p['name'],
p['description']
)
if p['status'] == 'pass':
mp.status = 'Passed'
if p['status'] == 'fail':
mp.status = 'Failed'
db.session.add(mp)
print("END: Major Projects")
# ==========
print("BEGIN: ON FLOOR")
import conditional.util.ldap as ldap
from datetime import datetime
members = [m['uid'][0].decode('utf-8') for m in ldap.ldap_get_onfloor_members()]
for m in members:
db.session.add(models.OnFloorStatusAssigned(m, datetime.now()))
print("END: ON FLOOR")
print("BEGIN: SPRING EVALS")
members = [m['uid'][0].decode('utf-8') for m in ldap.ldap_get_active_members()]
for m in members:
db.session.add(models.SpringEval(m))
print("END: SPRING EVALS")
print("BEGIN: Housing Evals")
hevals = [
{
'username': he.username,
'social_attended': he.social_attended,
'social_hosted': he.social_hosted,
'seminars_attended': he.seminars_attended,
'seminars_hosted': he.seminars_hosted,
'projects': he.projects,
'comments': he.comments
} for he in zoo_session.query(zoo.WinterEval).all()]
for he in hevals:
db.session.add(
models.HousingEvalsSubmission(
he['username'],
he['social_attended'],
he['social_hosted'],
he['seminars_attended'],
he['seminars_hosted'],
he['projects'],
he['comments']))
print("END: Housing Evals")
# Default EvalDB Settings
db.session.add(models.EvalSettings())
db.session.flush()
db.session.commit()
| [
"conditional.models.models.HousingEvalsSubmission",
"conditional.models.models.FreshmanEvalData",
"conditional.models.models.MemberSeminarAttendance",
"conditional.db.session.flush",
"conditional.models.old_models.Base.metadata.create_all",
"conditional.models.models.SpringEval",
"conditional.models.mod... | [((785, 834), 'sqlalchemy.create_engine', 'create_engine', (['database_url'], {'convert_unicode': '(True)'}), '(database_url, convert_unicode=True)\n', (798, 834), False, 'from sqlalchemy import create_engine\n'), ((1030, 1075), 'conditional.models.old_models.Base.metadata.create_all', 'zoo.Base.metadata.create_all', ([], {'bind': 'old_engine'}), '(bind=old_engine)\n', (1058, 1075), True, 'from conditional.models import models, old_models as zoo\n'), ((4618, 4636), 'conditional.db.session.flush', 'db.session.flush', ([], {}), '()\n', (4634, 4636), False, 'from conditional import db\n'), ((6055, 6073), 'conditional.db.session.flush', 'db.session.flush', ([], {}), '()\n', (6071, 6073), False, 'from conditional import db\n'), ((10204, 10222), 'conditional.db.session.flush', 'db.session.flush', ([], {}), '()\n', (10220, 10222), False, 'from conditional import db\n'), ((10227, 10246), 'conditional.db.session.commit', 'db.session.commit', ([], {}), '()\n', (10244, 10246), False, 'from conditional import db\n'), ((627, 650), 'flask_migrate.upgrade', 'flask_migrate.upgrade', ([], {}), '()\n', (648, 650), False, 'import flask_migrate\n'), ((868, 932), 'sqlalchemy.orm.sessionmaker', 'sessionmaker', ([], {'autocommit': '(False)', 'autoflush': '(False)', 'bind': 'old_engine'}), '(autocommit=False, autoflush=False, bind=old_engine)\n', (880, 932), False, 'from sqlalchemy.orm import scoped_session, sessionmaker\n'), ((4185, 4204), 'conditional.db.session.add', 'db.session.add', (['sem'], {}), '(sem)\n', (4199, 4204), False, 'from conditional import db\n'), ((4213, 4231), 'conditional.db.session.flush', 'db.session.flush', ([], {}), '()\n', (4229, 4231), False, 'from conditional import db\n'), ((4240, 4263), 'conditional.db.session.refresh', 'db.session.refresh', (['sem'], {}), '(sem)\n', (4258, 4263), False, 'from conditional import db\n'), ((5254, 5271), 'conditional.db.session.add', 'db.session.add', (['m'], {}), '(m)\n', (5268, 5271), False, 'from conditional import db\n'), ((5280, 5298), 'conditional.db.session.flush', 'db.session.flush', ([], {}), '()\n', (5296, 5298), False, 'from conditional import db\n'), ((5307, 5328), 'conditional.db.session.refresh', 'db.session.refresh', (['m'], {}), '(m)\n', (5325, 5328), False, 'from conditional import db\n'), ((6467, 6521), 'conditional.models.models.Conditional', 'models.Conditional', (["c['uid']", "c['desc']", "c['deadline']"], {}), "(c['uid'], c['desc'], c['deadline'])\n", (6485, 6521), False, 'from conditional.models import models, old_models as zoo\n'), ((6530, 6552), 'conditional.db.session.add', 'db.session.add', (['condit'], {}), '(condit)\n', (6544, 6552), False, 'from conditional import db\n'), ((6873, 6896), 'conditional.models.models.HouseMeeting', 'models.HouseMeeting', (['hm'], {}), '(hm)\n', (6892, 6896), False, 'from conditional.models import models, old_models as zoo\n'), ((6905, 6922), 'conditional.db.session.add', 'db.session.add', (['m'], {}), '(m)\n', (6919, 6922), False, 'from conditional import db\n'), ((6931, 6949), 'conditional.db.session.flush', 'db.session.flush', ([], {}), '()\n', (6947, 6949), False, 'from conditional import db\n'), ((6958, 6979), 'conditional.db.session.refresh', 'db.session.refresh', (['m'], {}), '(m)\n', (6976, 6979), False, 'from conditional import db\n'), ((8466, 8529), 'conditional.models.models.MajorProject', 'models.MajorProject', (["p['username']", "p['name']", "p['description']"], {}), "(p['username'], p['name'], p['description'])\n", (8485, 8529), False, 'from conditional.models import models, old_models as zoo\n'), ((8720, 8738), 'conditional.db.session.add', 'db.session.add', (['mp'], {}), '(mp)\n', (8734, 8738), False, 'from conditional import db\n'), ((10176, 10197), 'conditional.models.models.EvalSettings', 'models.EvalSettings', ([], {}), '()\n', (10195, 10197), False, 'from conditional.models import models, old_models as zoo\n'), ((521, 544), 'flask_migrate.current', 'flask_migrate.current', ([], {}), '()\n', (542, 544), False, 'import flask_migrate\n'), ((570, 605), 'flask_migrate.downgrade', 'flask_migrate.downgrade', ([], {'tag': '"""base"""'}), "(tag='base')\n", (593, 605), False, 'import flask_migrate\n'), ((2227, 2288), 'conditional.models.models.FreshmanEvalData', 'models.FreshmanEvalData', (["f['username']", "f['signaturesMissed']"], {}), "(f['username'], f['signaturesMissed'])\n", (2250, 2288), False, 'from conditional.models import models, old_models as zoo\n'), ((3254, 3279), 'conditional.db.session.add', 'db.session.add', (['eval_data'], {}), '(eval_data)\n', (3268, 3279), False, 'from conditional import db\n'), ((3461, 3505), 'conditional.models.models.FreshmanAccount', 'models.FreshmanAccount', (["f['username']", '(False)'], {}), "(f['username'], False)\n", (3483, 3505), False, 'from conditional.models import models, old_models as zoo\n'), ((3880, 3903), 'conditional.db.session.add', 'db.session.add', (['account'], {}), '(account)\n', (3894, 3903), False, 'from conditional import db\n'), ((4161, 4175), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (4173, 4175), False, 'from datetime import datetime\n'), ((5901, 5918), 'conditional.db.session.add', 'db.session.add', (['f'], {}), '(f)\n', (5915, 5918), False, 'from conditional import db\n'), ((6032, 6049), 'conditional.db.session.add', 'db.session.add', (['m'], {}), '(m)\n', (6046, 6049), False, 'from conditional import db\n'), ((7846, 7866), 'conditional.db.session.add', 'db.session.add', (['fhma'], {}), '(fhma)\n', (7860, 7866), False, 'from conditional import db\n'), ((7921, 7994), 'conditional.models.models.MemberHouseMeetingAttendance', 'models.MemberHouseMeetingAttendance', (["a['uid']", 'meeting_id', 'excuse', 'status'], {}), "(a['uid'], meeting_id, excuse, status)\n", (7956, 7994), False, 'from conditional.models import models, old_models as zoo\n'), ((8072, 8092), 'conditional.db.session.add', 'db.session.add', (['mhma'], {}), '(mhma)\n', (8086, 8092), False, 'from conditional import db\n'), ((8947, 8978), 'conditional.util.ldap.ldap_get_onfloor_members', 'ldap.ldap_get_onfloor_members', ([], {}), '()\n', (8976, 8978), True, 'import conditional.util.ldap as ldap\n'), ((9187, 9217), 'conditional.util.ldap.ldap_get_active_members', 'ldap.ldap_get_active_members', ([], {}), '()\n', (9215, 9217), True, 'import conditional.util.ldap as ldap\n'), ((9264, 9284), 'conditional.models.models.SpringEval', 'models.SpringEval', (['m'], {}), '(m)\n', (9281, 9284), False, 'from conditional.models import models, old_models as zoo\n'), ((9810, 9989), 'conditional.models.models.HousingEvalsSubmission', 'models.HousingEvalsSubmission', (["he['username']", "he['social_attended']", "he['social_hosted']", "he['seminars_attended']", "he['seminars_hosted']", "he['projects']", "he['comments']"], {}), "(he['username'], he['social_attended'], he[\n 'social_hosted'], he['seminars_attended'], he['seminars_hosted'], he[\n 'projects'], he['comments'])\n", (9839, 9989), False, 'from conditional.models import models, old_models as zoo\n'), ((1452, 1510), 'conditional.models.models.FreshmanAccount.query.filter', 'FreshmanAccount.query.filter', (['(FreshmanAccount.name == name)'], {}), '(FreshmanAccount.name == name)\n', (1480, 1510), False, 'from conditional.models.models import FreshmanAccount\n'), ((4481, 4498), 'conditional.db.session.add', 'db.session.add', (['a'], {}), '(a)\n', (4495, 4498), False, 'from conditional import db\n'), ((4537, 4578), 'conditional.models.models.MemberSeminarAttendance', 'models.MemberSeminarAttendance', (['m', 'sem.id'], {}), '(m, sem.id)\n', (4567, 4578), False, 'from conditional.models import models, old_models as zoo\n'), ((4595, 4612), 'conditional.db.session.add', 'db.session.add', (['a'], {}), '(a)\n', (4609, 4612), False, 'from conditional import db\n'), ((9057, 9071), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (9069, 9071), False, 'from datetime import datetime\n')] |
# Pro-Football-Reference.com
# TEAMS
import re, os
import uuid
import json
from datetime import datetime, date, time
from bs4 import BeautifulSoup
import bs4
from Constants import *
from PathUtils import *
from PluginSupport import *
from Serialization import *
from StringUtils import *
import ProFootballReferenceFranchiseScraper as FranschiseScraper
pfr_cached_franchises = dict()
pfr_abbreviation_corrections = {
"CRD": "ARI",
"RAV": "BAL",
"BBA": "BUF",
"GNB": "GB",
"HTX": "HOU",
"CLT": "IND",
"KAN": "KC",
"RAI": "LV",
"SDG": "LAC",
"RAM": "LAR",
"NWE": "NE",
"NOR": "NO",
"SFO": "SF",
"TAM": "TB",
"OTI": "TEN"
}
def DownloadAllFranchises(league):
pfrFranchises = dict(FranschiseScraper.GetFranchises())
# Adapt franchises to global teams model
for franchise in pfrFranchises.values():
franchiseName = deunicode(franchise.get("fullName")) or deunicode(franchise.get("name"))
if not franchise.get("fullName"): franchise["fullName"] = franchiseName
franchiseName = franchise["fullName"]
franchise["fromYear"] = franchise["from"]
del(franchise["from"])
franchise["toYear"] = franchise["to"]
del(franchise["to"])
for team in franchise["teams"].values():
teamName = deunicode(team.get("fullName")) or deunicode(team.get("name"))
# abbrev - NFL official abbreviation
# id - identifier for the team, used by espn, relative to pfr
abbrev = id = deunicode(franchise["abbrev"])
active = team.get("active") == True
aliases = team.get("aliases") or []
if active:
for inactiveTeam in franchise["teams"].values():
if inactiveTeam.get("active") == True: continue
inactiveName = deunicode(inactiveTeam.get("fullName")) or deunicode(inactiveTeam.get("name")) or ""
if inactiveName:
aliases.append(inactiveName)
if team.get("city"):
if inactiveName[:len(team["city"])] == team["city"]:
# Get any deadname cities
aliases.append(deunicode(inactiveName[:len(team["city"])].strip()))
if abbrev in pfr_abbreviation_corrections.keys():
if active: aliases.append(abbrev)
abbrev = pfr_abbreviation_corrections[abbrev]
team["aliases"] = list(set(aliases))
team["key"] = uuid.uuid4()
if active:
team["abbreviation"] = abbrev
else:
team["fullName"] = teamName
del(team["name"])
prefix = abbrev
franchisePrefix = strip_to_capitals(franchiseName)
if prefix != franchisePrefix: prefix = "%s.%s" % (prefix, franchisePrefix)
id = prefix
suffix = strip_to_capitals(teamName)
if suffix != franchisePrefix:
id = "%s.%s" % (prefix, suffix)
if team.get("abbreviation"): del(team["abbreviation"])
team["ProFootballReferenceID"] = id
team["identity"] = {"ProFootballReferenceID": id}
yrs = list(team["years"])
team["years"] = []
for span in yrs:
team["years"].append({"fromYear":span["from"], "toYear":span["to"]})
assets = dict()
if franchise.get("logo"):
assets.setdefault("logo", [])
assets["logo"].append({"source": "profootballreference", "url": deunicode(franchise["logo"])})
if team.get("years"):
for span in team["years"]:
for year in range(int(span["fromYear"]), int(span["toYear"])+1):
season = str(year)
if team.get(season) and team[season].get("logo"):
assets.setdefault("logo", [])
assets["logo"].append({"source": "profootballreference", "season": season, "url": deunicode(team[season]["logo"])})
if assets:
team["assets"] = assets
return pfrFranchises
| [
"ProFootballReferenceFranchiseScraper.GetFranchises",
"uuid.uuid4"
] | [((741, 774), 'ProFootballReferenceFranchiseScraper.GetFranchises', 'FranschiseScraper.GetFranchises', ([], {}), '()\n', (772, 774), True, 'import ProFootballReferenceFranchiseScraper as FranschiseScraper\n'), ((2277, 2289), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (2287, 2289), False, 'import uuid\n')] |
import torch
import numpy as np
import time
import datetime
import random
from Kfold import KFold
from split_data import DataManager
from transformers import BertTokenizer
from transformers import BertTokenizer
from torch.utils.data import TensorDataset, random_split
from torch.utils.data import DataLoader, RandomSampler, SequentialSampler
from transformers import BertForSequenceClassification, AdamW, BertConfig
from transformers import get_linear_schedule_with_warmup
class KfoldBERTData(DataManager):
def __init__(self, data, labels, num_folds):
super().__init__(data, labels, num_folds)
self.tokenizer = BertTokenizer.from_pretrained('bert-base-uncased', do_lower_case=True)
def pre_process(self, sentences, labels):
max_len = 0
for sent in sentences:
input_ids = self.tokenizer.encode(sent, add_special_tokens=True)
max_len = max(max_len, len(input_ids))
input_ids = []
attention_masks = []
for sent in sentences:
encoded_dict = self.tokenizer.encode_plus(
sent,
add_special_tokens = True,
max_length = 350,
pad_to_max_length = True,
return_attention_mask = True,
return_tensors = 'pt',
truncation=True
)
input_ids.append(encoded_dict['input_ids'])
attention_masks.append(encoded_dict['attention_mask'])
# Convert the lists into tensors.
input_ids = torch.cat(input_ids, dim=0)
attention_masks = torch.cat(attention_masks, dim=0)
labels = torch.tensor(labels)
dataset = TensorDataset(input_ids, attention_masks, labels)
d, _ = random_split(dataset, [len(dataset), 0])
return d
class KfoldBERT(KFold):
def __init__(self, data, labels, num_folds):
super().__init__(data, labels, num_folds)
self.batch_size = 8
self.epochs = 10
self.data = KfoldBERTData(data, labels, num_folds)
if torch.cuda.is_available():
self.device = torch.device("cuda")
def flat_accuracy(self, preds, labels):
pred_flat = np.argmax(preds, axis=1).flatten()
labels_flat = labels.flatten()
return np.sum(pred_flat == labels_flat) / len(labels_flat)
def format_time(self, time):
'''
Takes a time in seconds and returns a string hh:mm:ss
'''
elapsed_rounded = int(round((time)))
return str(datetime.timedelta(seconds=elapsed_rounded))
def train(self, train_dataset, val_dataset):
train_dataloader = DataLoader(
train_dataset, # The training samples.
sampler = RandomSampler(train_dataset), # Select batches randomly
batch_size = self.batch_size # Trains with this batch size.
)
validation_dataloader = DataLoader(
val_dataset, # The validation samples.
sampler = SequentialSampler(val_dataset), # Pull out batches sequentially.
batch_size = self.batch_size # Evaluate with this batch size.
)
model = BertForSequenceClassification.from_pretrained(
"bert-base-uncased", # Use the 12-layer BERT model, with an uncased vocab.
num_labels = 4, # The number of output labels--2 for binary classification.
# You can increase this for multi-class tasks.
output_attentions = False, # Whether the model returns attentions weights.
output_hidden_states = False, # Whether the model returns all hidden-states.
)
model.cuda()
optimizer = AdamW(model.parameters(),
lr = 2e-5, # args.learning_rate - default is 5e-5, our notebook had 2e-5
eps = 1e-8 # args.adam_epsilon - default is 1e-8.
)
total_steps = len(train_dataloader) * self.epochs
scheduler = get_linear_schedule_with_warmup(optimizer,
num_warmup_steps = 0, # Default value in run_glue.py
num_training_steps = total_steps)
seed_val = 42
random.seed(seed_val)
np.random.seed(seed_val)
torch.manual_seed(seed_val)
torch.cuda.manual_seed_all(seed_val)
training_stats = []
# Measure the total training time for the whole run.
total_t0 = time.time()
# For each epoch...
for epoch_i in range(0, self.epochs):
print("")
print('======== Epoch {:} / {:} ========'.format(epoch_i + 1, self.epochs))
print('Training...')
t0 = time.time()
total_train_loss = 0
model.train()
# For each batch of training data...
for step, batch in enumerate(train_dataloader):
# Progress update every 40 batches.
if step % 40 == 0 and not step == 0:
# Calculate elapsed time in minutes.
elapsed = self.format_time(time.time() - t0)
# Report progress.
print(' Batch {:>5,} of {:>5,}. Elapsed: {:}.'.format(step, len(train_dataloader), elapsed))
b_input_ids = batch[0].to(self.device)
b_input_mask = batch[1].to(self.device)
b_labels = batch[2].to(self.device)
model.zero_grad()
loss, logits = model(b_input_ids,
token_type_ids=None,
attention_mask=b_input_mask,
labels=b_labels)
total_train_loss += loss.item()
loss.backward()
torch.nn.utils.clip_grad_norm_(model.parameters(), 1.0)
optimizer.step()
scheduler.step()
# Calculate the average loss over all of the batches.
avg_train_loss = total_train_loss / len(train_dataloader)
# Measure how long this epoch took.
training_time = self.format_time(time.time() - t0)
print("")
print(" Average training loss: {0:.2f}".format(avg_train_loss))
print(" Training epcoh took: {:}".format(training_time))
# ========================================
# Validation
# ========================================
# After the completion of each training epoch, measure our performance on
# our validation set.
print("")
print("Running Validation...")
t0 = time.time()
model.eval()
# Tracking variables
total_eval_accuracy = 0
total_eval_loss = 0
nb_eval_steps = 0
for batch in validation_dataloader:
b_input_ids = batch[0].to(self.device)
b_input_mask = batch[1].to(self.device)
b_labels = batch[2].to(self.device)
with torch.no_grad():
loss, logits = model(b_input_ids,
token_type_ids=None,
attention_mask=b_input_mask,
labels=b_labels)
# Accumulate the validation loss.
total_eval_loss += loss.item()
# Move logits and labels to CPU
logits = logits.detach().cpu().numpy()
label_ids = b_labels.to('cpu').numpy()
# Calculate the accuracy for this batch of test sentences, and
# accumulate it over all batches.
total_eval_accuracy += self.flat_accuracy(logits, label_ids)
# Report the final accuracy for this validation run.
avg_val_accuracy = total_eval_accuracy / len(validation_dataloader)
print(" Accuracy: {0:.2f}".format(avg_val_accuracy))
# Calculate the average loss over all of the batches.
avg_val_loss = total_eval_loss / len(validation_dataloader)
# Measure how long the validation run took.
validation_time = self.format_time(time.time() - t0)
print(" Validation Loss: {0:.2f}".format(avg_val_loss))
print(" Validation took: {:}".format(validation_time))
# Record all statistics from this epoch.
training_stats.append(
{
'epoch': epoch_i + 1,
'Training Loss': avg_train_loss,
'Valid. Loss': avg_val_loss,
'Valid. Accur.': avg_val_accuracy,
'Training Time': training_time,
'Validation Time': validation_time
}
)
torch.save(model.state_dict(), "removed_model_epoch_" + str(epoch_i + 1) +".pth")
print("")
print("Training complete!")
print("Total training took {:} (h:mm:ss)".format(self.format_time(time.time()-total_t0)))
return avg_val_accuracy, avg_val_loss
| [
"torch.cuda.is_available",
"datetime.timedelta",
"numpy.random.seed",
"torch.utils.data.SequentialSampler",
"numpy.argmax",
"torch.utils.data.TensorDataset",
"transformers.BertForSequenceClassification.from_pretrained",
"time.time",
"torch.cat",
"torch.device",
"torch.cuda.manual_seed_all",
"t... | [((638, 708), 'transformers.BertTokenizer.from_pretrained', 'BertTokenizer.from_pretrained', (['"""bert-base-uncased"""'], {'do_lower_case': '(True)'}), "('bert-base-uncased', do_lower_case=True)\n", (667, 708), False, 'from transformers import BertTokenizer\n'), ((1706, 1733), 'torch.cat', 'torch.cat', (['input_ids'], {'dim': '(0)'}), '(input_ids, dim=0)\n', (1715, 1733), False, 'import torch\n'), ((1760, 1793), 'torch.cat', 'torch.cat', (['attention_masks'], {'dim': '(0)'}), '(attention_masks, dim=0)\n', (1769, 1793), False, 'import torch\n'), ((1811, 1831), 'torch.tensor', 'torch.tensor', (['labels'], {}), '(labels)\n', (1823, 1831), False, 'import torch\n'), ((1851, 1900), 'torch.utils.data.TensorDataset', 'TensorDataset', (['input_ids', 'attention_masks', 'labels'], {}), '(input_ids, attention_masks, labels)\n', (1864, 1900), False, 'from torch.utils.data import TensorDataset, random_split\n'), ((2222, 2247), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (2245, 2247), False, 'import torch\n'), ((3360, 3497), 'transformers.BertForSequenceClassification.from_pretrained', 'BertForSequenceClassification.from_pretrained', (['"""bert-base-uncased"""'], {'num_labels': '(4)', 'output_attentions': '(False)', 'output_hidden_states': '(False)'}), "('bert-base-uncased',\n num_labels=4, output_attentions=False, output_hidden_states=False)\n", (3405, 3497), False, 'from transformers import BertForSequenceClassification, AdamW, BertConfig\n'), ((4199, 4297), 'transformers.get_linear_schedule_with_warmup', 'get_linear_schedule_with_warmup', (['optimizer'], {'num_warmup_steps': '(0)', 'num_training_steps': 'total_steps'}), '(optimizer, num_warmup_steps=0,\n num_training_steps=total_steps)\n', (4230, 4297), False, 'from transformers import get_linear_schedule_with_warmup\n'), ((4450, 4471), 'random.seed', 'random.seed', (['seed_val'], {}), '(seed_val)\n', (4461, 4471), False, 'import random\n'), ((4480, 4504), 'numpy.random.seed', 'np.random.seed', (['seed_val'], {}), '(seed_val)\n', (4494, 4504), True, 'import numpy as np\n'), ((4513, 4540), 'torch.manual_seed', 'torch.manual_seed', (['seed_val'], {}), '(seed_val)\n', (4530, 4540), False, 'import torch\n'), ((4549, 4585), 'torch.cuda.manual_seed_all', 'torch.cuda.manual_seed_all', (['seed_val'], {}), '(seed_val)\n', (4575, 4585), False, 'import torch\n'), ((4696, 4707), 'time.time', 'time.time', ([], {}), '()\n', (4705, 4707), False, 'import time\n'), ((2279, 2299), 'torch.device', 'torch.device', (['"""cuda"""'], {}), "('cuda')\n", (2291, 2299), False, 'import torch\n'), ((2455, 2487), 'numpy.sum', 'np.sum', (['(pred_flat == labels_flat)'], {}), '(pred_flat == labels_flat)\n', (2461, 2487), True, 'import numpy as np\n'), ((2691, 2734), 'datetime.timedelta', 'datetime.timedelta', ([], {'seconds': 'elapsed_rounded'}), '(seconds=elapsed_rounded)\n', (2709, 2734), False, 'import datetime\n'), ((4961, 4972), 'time.time', 'time.time', ([], {}), '()\n', (4970, 4972), False, 'import time\n'), ((7023, 7034), 'time.time', 'time.time', ([], {}), '()\n', (7032, 7034), False, 'import time\n'), ((2366, 2390), 'numpy.argmax', 'np.argmax', (['preds'], {'axis': '(1)'}), '(preds, axis=1)\n', (2375, 2390), True, 'import numpy as np\n'), ((2923, 2951), 'torch.utils.data.RandomSampler', 'RandomSampler', (['train_dataset'], {}), '(train_dataset)\n', (2936, 2951), False, 'from torch.utils.data import DataLoader, RandomSampler, SequentialSampler\n'), ((3194, 3224), 'torch.utils.data.SequentialSampler', 'SequentialSampler', (['val_dataset'], {}), '(val_dataset)\n', (3211, 3224), False, 'from torch.utils.data import DataLoader, RandomSampler, SequentialSampler\n'), ((6465, 6476), 'time.time', 'time.time', ([], {}), '()\n', (6474, 6476), False, 'import time\n'), ((7442, 7457), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (7455, 7457), False, 'import torch\n'), ((8657, 8668), 'time.time', 'time.time', ([], {}), '()\n', (8666, 8668), False, 'import time\n'), ((9493, 9504), 'time.time', 'time.time', ([], {}), '()\n', (9502, 9504), False, 'import time\n'), ((5352, 5363), 'time.time', 'time.time', ([], {}), '()\n', (5361, 5363), False, 'import time\n')] |
#!/usr/bin/env python
# pylint: disable=invalid-name
""" Web-based proxy to a Kerberos KDC for Webathena. """
import base64
import json
import os
import select
import socket
import dns.resolver
from pyasn1.codec.der import decoder as der_decoder
from pyasn1.codec.der import encoder as der_encoder
from pyasn1.error import PyAsn1Error
from werkzeug.exceptions import HTTPException
from werkzeug.routing import Map, Rule
from werkzeug.wrappers import Request, Response
import krb_asn1
import settings
# This is the same limit used internally in MIT Kerberos it seems.
MAX_PACKET_SIZE = 4096
# How many bytes of randomness to return
URANDOM_BYTES = 1024 // 8
def wait_on_sockets(socks, timeout):
"""
Selects on a list of UDP sockets until one becomes readable or we
hit a timeout. If one returns a packet we return it. Otherwise
None.
"""
ready_r, _, _ = select.select(socks, [], [], timeout)
for sock in ready_r:
data = sock.recv(MAX_PACKET_SIZE)
if data:
return data
return None
# Algorithm borrowed from MIT kerberos code. This probably works or
# something.
def send_request(socks, data):
"""
Attempts to send a single request to a number of UDP sockets until
one returns or we timeout. Handles retry.
"""
delay = 2
for _ in range(3):
for sock in socks:
# Send the request.
ret = sock.send(data)
if ret == len(data):
# Wait for a reply for a second.
reply = wait_on_sockets(socks, 1)
if reply is not None:
return reply
# Wait for a reply from anyone.
reply = wait_on_sockets(socks, delay)
if reply is not None:
return reply
delay *= 2
return None
class WebKDC:
def __init__(self, realm=settings.REALM):
self.realm = realm
self.url_map = Map([
Rule('/v1/AS_REQ', endpoint=('AS_REQ', krb_asn1.AS_REQ), methods=['POST']),
Rule('/v1/TGS_REQ', endpoint=('TGS_REQ', krb_asn1.TGS_REQ), methods=['POST']),
Rule('/v1/AP_REQ', endpoint=('AP_REQ', krb_asn1.AP_REQ), methods=['POST']),
Rule('/v1/urandom', endpoint=self.handle_urandom, methods=['POST']),
])
@staticmethod
def validate_AS_REQ(req_asn1):
msg_type = int(req_asn1.getComponentByName('msg-type'))
if msg_type != krb_asn1.KDC_REQ.msg_type_as:
raise ValueError('Bad msg-type')
@staticmethod
def validate_TGS_REQ(req_asn1):
msg_type = int(req_asn1.getComponentByName('msg-type'))
if msg_type != krb_asn1.KDC_REQ.msg_type_tgs:
raise ValueError('Bad msg-type')
@staticmethod
def validate_AP_REQ(req_asn1):
pass
@staticmethod
def _error_response(e):
""" Returns a Response corresponding to some exception e. """
data = {'status': 'ERROR', 'msg': str(e)}
return Response(json.dumps(data), mimetype='application/json')
@staticmethod
def handle_urandom():
random = os.urandom(URANDOM_BYTES)
# FIXME: We probably should be using a constant-time encoding
# scheme here...
return Response(
base64.b64encode(random),
mimetype='application/base64',
headers=[('Content-Disposition',
'attachment; filename="b64_response.txt"')])
def proxy_kdc_request(self, request, endpoint):
"""
Common code for all proxied KDC requests. endpoint is a
(req_name, asn1Type) tuple and comes from the URL map. req_b64
is base64-encoded request. Calls self.validate_${req_name} to
perform additional checks before sending it along.
"""
req_name, asn1Type = endpoint
# Werkzeug docs make a big deal about memory problems if the
# client sends you MB of data. So, fine, we'll limit it.
length = request.headers.get('Content-Length', type=int)
if length is None or length > MAX_PACKET_SIZE * 2:
return self._error_response('Payload too large')
req_b64 = request.data
try:
req_der = base64.b64decode(req_b64)
except TypeError as e:
return self._error_response(e)
# Make sure we don't send garbage to the KDC. Otherwise it
# doesn't reply and we time out, which is kinda awkward.
try:
req_asn1, rest = der_decoder.decode(req_der,
asn1Spec=asn1Type())
if rest:
raise ValueError('Garbage after request')
getattr(self, 'validate_' + req_name)(req_asn1)
except (PyAsn1Error, ValueError) as e:
return self._error_response(e)
# Okay, it seems good. Go on and send it, reencoded.
krb_rep = self.send_krb_request(
der_encoder.encode(req_asn1),
use_master='use_master' in request.args)
if krb_rep is None:
data = {'status': 'TIMEOUT'}
else:
# TODO: The JSON wrapping here is really kinda
# pointless. Just make this base64 and report errors with
# HTTP status codes + JSON or whatever.
data = {'status': 'OK', 'reply': base64.b64encode(krb_rep).decode('ascii')}
# Per Tangled Web, add a defensive Content-Disposition to
# prevent an extremely confused browser from interpreting this
# as HTML. Though even navigating to this would be pretty
# difficult as we require a random header be sent.
return Response(
json.dumps(data),
mimetype='application/json',
headers=[('Content-Disposition',
'attachment; filename="json_response.txt"')])
def send_krb_request(self, krb_req, use_master):
"""
Sends Kerberos request krb_req, returns the response or None
if we time out. If use_master is true, we only talk to the
master KDC.
"""
svctype = '_kerberos-master' if use_master else '_kerberos'
# TODO: Support TCP as well as UDP. I think MIT's KDC only
# supports UDP though.
socktype = '_udp'
srv_query = '%s.%s.%s' % (svctype, socktype, self.realm)
srv_records = list(getattr(dns.resolver, 'resolve', dns.resolver.query)(srv_query, 'SRV'))
srv_records.sort(key=lambda r: r.priority)
socks = []
try:
for r in srv_records:
host = str(r.target)
port = int(r.port)
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.setblocking(0)
s.connect((host, port))
socks.append(s)
return send_request(socks, krb_req)
finally:
for s in socks:
s.close()
def dispatch_request(self, request):
adapter = self.url_map.bind_to_environ(request.environ)
try:
endpoint, values = adapter.match()
if callable(endpoint):
return endpoint()
return self.proxy_kdc_request(request, endpoint, **values)
except HTTPException as e:
return e
def wsgi_app(self, environ, start_response):
request = Request(environ)
response = self.dispatch_request(request)
return response(environ, start_response)
def __call__(self, environ, start_response):
return self.wsgi_app(environ, start_response)
def create_app():
return WebKDC()
def main():
# pylint: disable=import-outside-toplevel
import sys
from werkzeug.serving import run_simple
app = create_app()
ip = '127.0.0.1'
port = 5000
if len(sys.argv) > 1:
ip, port = sys.argv[1].rsplit(':', 1)
port = int(port)
run_simple(ip, port, app, use_debugger=True, use_reloader=True)
if __name__ == '__main__':
main()
| [
"select.select",
"socket.socket",
"os.urandom",
"base64.b64encode",
"json.dumps",
"base64.b64decode",
"werkzeug.routing.Rule",
"werkzeug.serving.run_simple",
"pyasn1.codec.der.encoder.encode",
"werkzeug.wrappers.Request"
] | [((884, 921), 'select.select', 'select.select', (['socks', '[]', '[]', 'timeout'], {}), '(socks, [], [], timeout)\n', (897, 921), False, 'import select\n'), ((7853, 7916), 'werkzeug.serving.run_simple', 'run_simple', (['ip', 'port', 'app'], {'use_debugger': '(True)', 'use_reloader': '(True)'}), '(ip, port, app, use_debugger=True, use_reloader=True)\n', (7863, 7916), False, 'from werkzeug.serving import run_simple\n'), ((3080, 3105), 'os.urandom', 'os.urandom', (['URANDOM_BYTES'], {}), '(URANDOM_BYTES)\n', (3090, 3105), False, 'import os\n'), ((7312, 7328), 'werkzeug.wrappers.Request', 'Request', (['environ'], {}), '(environ)\n', (7319, 7328), False, 'from werkzeug.wrappers import Request, Response\n'), ((2971, 2987), 'json.dumps', 'json.dumps', (['data'], {}), '(data)\n', (2981, 2987), False, 'import json\n'), ((3238, 3262), 'base64.b64encode', 'base64.b64encode', (['random'], {}), '(random)\n', (3254, 3262), False, 'import base64\n'), ((4185, 4210), 'base64.b64decode', 'base64.b64decode', (['req_b64'], {}), '(req_b64)\n', (4201, 4210), False, 'import base64\n'), ((4901, 4929), 'pyasn1.codec.der.encoder.encode', 'der_encoder.encode', (['req_asn1'], {}), '(req_asn1)\n', (4919, 4929), True, 'from pyasn1.codec.der import encoder as der_encoder\n'), ((5636, 5652), 'json.dumps', 'json.dumps', (['data'], {}), '(data)\n', (5646, 5652), False, 'import json\n'), ((1932, 2006), 'werkzeug.routing.Rule', 'Rule', (['"""/v1/AS_REQ"""'], {'endpoint': "('AS_REQ', krb_asn1.AS_REQ)", 'methods': "['POST']"}), "('/v1/AS_REQ', endpoint=('AS_REQ', krb_asn1.AS_REQ), methods=['POST'])\n", (1936, 2006), False, 'from werkzeug.routing import Map, Rule\n'), ((2020, 2097), 'werkzeug.routing.Rule', 'Rule', (['"""/v1/TGS_REQ"""'], {'endpoint': "('TGS_REQ', krb_asn1.TGS_REQ)", 'methods': "['POST']"}), "('/v1/TGS_REQ', endpoint=('TGS_REQ', krb_asn1.TGS_REQ), methods=['POST'])\n", (2024, 2097), False, 'from werkzeug.routing import Map, Rule\n'), ((2111, 2185), 'werkzeug.routing.Rule', 'Rule', (['"""/v1/AP_REQ"""'], {'endpoint': "('AP_REQ', krb_asn1.AP_REQ)", 'methods': "['POST']"}), "('/v1/AP_REQ', endpoint=('AP_REQ', krb_asn1.AP_REQ), methods=['POST'])\n", (2115, 2185), False, 'from werkzeug.routing import Map, Rule\n'), ((2199, 2266), 'werkzeug.routing.Rule', 'Rule', (['"""/v1/urandom"""'], {'endpoint': 'self.handle_urandom', 'methods': "['POST']"}), "('/v1/urandom', endpoint=self.handle_urandom, methods=['POST'])\n", (2203, 2266), False, 'from werkzeug.routing import Map, Rule\n'), ((6608, 6656), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_DGRAM'], {}), '(socket.AF_INET, socket.SOCK_DGRAM)\n', (6621, 6656), False, 'import socket\n'), ((5294, 5319), 'base64.b64encode', 'base64.b64encode', (['krb_rep'], {}), '(krb_rep)\n', (5310, 5319), False, 'import base64\n')] |
# -*- coding: utf8 -*-
#
# DB migration 001 by 2017-11-03
#
# New statistics for subvolume - root diff in blocks / bytes
#
__author__ = 'sergey'
__NUMBER__ = 20171103001
def run(manager):
"""
:param manager: Database manager
:type manager: dedupsqlfs.db.sqlite.manager.DbManager|dedupsqlfs.db.mysql.manager.DbManager
:return: bool
"""
try:
table_sv = manager.getTable("subvolume")
"""
:type table_sv: dedupsqlfs.db.sqlite.table.subvolume.TableSubvolume |
dedupsqlfs.db.mysql.table.subvolume.TableSubvolume
"""
from dedupsqlfs.lib.constants import ROOT_SUBVOLUME_NAME
cur = table_sv.getCursor()
manager.getLogger().info("Migration #%s" % (__NUMBER__,))
if not table_sv.hasField('root_diff'):
if manager.TYPE == "sqlite":
cur.execute("ALTER TABLE `subvolume` ADD COLUMN `root_diff` TEXT;")
if manager.TYPE == "mysql":
cur.execute("ALTER TABLE `subvolume` ADD COLUMN `root_diff` TEXT;")
if not table_sv.hasField('root_diff_at'):
if manager.TYPE == "sqlite":
cur.execute("ALTER TABLE `subvolume` ADD COLUMN `root_diff_at` INTEGER;")
if manager.TYPE == "mysql":
cur.execute("ALTER TABLE `subvolume` ADD COLUMN `root_diff_at` INT UNSIGNED;")
table_sv.commit()
table_sv.close()
except Exception as e:
import traceback
manager.getLogger().error("Migration #%s error: %s" % (__NUMBER__, e,))
manager.getLogger().error("Migration #%s trace:\n%s" % (__NUMBER__, traceback.format_exc(),))
return False
table_opts = manager.getTable("option")
table_opts.getCursor()
mignumber = table_opts.get("migration")
if not mignumber:
table_opts.insert("migration", __NUMBER__)
else:
table_opts.update("migration", __NUMBER__)
table_opts.commit()
return True
| [
"traceback.format_exc"
] | [((1639, 1661), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (1659, 1661), False, 'import traceback\n')] |
import pytest
@pytest.mark.parametrize("cli_options", [
('-k', 'notestdeselect',),
])
def test_autoexecute_yml_keywords_skipped(testdir, cli_options):
yml_file = testdir.makefile(".yml", """
---
markers:
- marker1
- marker2
---
- provider: python
type: assert
expression: "1"
""")
assert yml_file.basename.startswith('test_')
assert yml_file.basename.endswith('.yml')
result = testdir.runpytest(*cli_options)
result.assert_outcomes(passed=0, failed=0, error=0)
# Deselected, not skipped. See #3427
# result.assert_outcomes(skipped=1)
| [
"pytest.mark.parametrize"
] | [((17, 83), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""cli_options"""', "[('-k', 'notestdeselect')]"], {}), "('cli_options', [('-k', 'notestdeselect')])\n", (40, 83), False, 'import pytest\n')] |
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
import ipdb
import time
# Clustering penalties
class ClusterLoss(torch.nn.Module):
"""
Cluster loss comes from the SuBiC paper and consists of two losses. First is the Mean Entropy
Loss which makes the output to be close to one-hot encoded vectors.
Second is the Negative Batch Entropy Loss which ensures a uniform distribution of activations
over the output (Uniform block support).
"""
def __init__(self):
super(ClusterLoss, self).__init__()
def entropy(self, logits):
return -1.0*(F.softmax(logits,dim=0)*F.log_softmax(logits,dim=0)).sum()
def forward(self, logits):
"""
Input: logits -> T x K # Where K is the number of classes and T is the batch size
Output: L = MEL, BEL
"""
# Mean Entropy Loss - For one-hotness
# L1 = Sum_batch_i(Sum_block_m(Entropy(block_i_m)))/TM
sum1 = torch.zeros([logits.shape[0],1])
for t in range(logits.shape[0]):
sum1[t] = self.entropy(logits[t,:])
L1 = torch.mean(sum1)
# Batch Entropy Loss - For uniform support
# L2 = -Sum_block_m(Entropy(Sum_batch_i(block_i_m)/T))/M
mean_output = torch.mean(logits, dim=0)
L2 = -1.0*self.entropy(mean_output)
return L1.cuda(), L2.cuda()
# Stochastic Transformation Stability Loss. Introduced in:
# "Regularization With Stochastic Transformations and Perturbations for Deep Semi-Supervised
# Learning"
class StochasticTransformationLoss(torch.nn.Module):
"""
The idea behind this is that stochastic transformations of an image (flips and translations)
should lead to very close features
"""
def __init__(self):
super(StochasticTransformationLoss, self).__init__()
def entropy(self, logits):
"""
Input: logits -> N x 1 x D # Where D is the feature dimension
Output: entropy -> N x 1
"""
# TODO
# Check is this is correct
return -1.0*(F.softmax(logits,dim=-1)*F.log_softmax(logits,dim=-1)).sum(-1)
def cross_entropy(self, logits1, logits2):
"""
Input: logits1 -> N x 1 x D # Where D is the feature dimension
logits2 -> 1 x N x D # Where D is the feature dimension
Output: Pairwise Cross-entropy -> N x N
"""
# TODO
# Check is this is correct
return -1.0*(F.softmax(logits1,dim=-1)*F.log_softmax(logits2,dim=-1)).sum(-1)
def distances(self, A, distance_type='Euclidean', eps=1e-6):
"""
Input: A -> num_transformations x D # Where D is the feature dimension
distance_type -> 'Euclidean'/'cosine'/'KL'
Output: distances -> num_transformations x num_transformations pair wise distances
"""
assert A.dim() == 2
if distance_type == 'Euclidean':
# 1. Numerically stable but too much memory?
B = A.unsqueeze(1)
C = A.unsqueeze(0)
differences = B - C
distances = torch.sum(differences*differences,-1) # N x N
# Do we need sqrt? - Paper doesn't do sqrt
# 2. Less memory but numerically unstable due to rounding errors
#A_norm_1 = (A**2).sum(1).view(-1,1)
#A_norm_2 = A_norm_1.view(1,-1)
#distances = A_norm_1 + A_norm_2 - 2.0*torch.matmul(A, torch.transpose(A,0,1))
elif distance_type == 'cosine':
B = F.normalize(A, p=2, dim=1)
distances = 1.0 - torch.matmul(B,B.t()) # N x N
elif distance_type == 'KL':
# Make sure that A contains logits
B = A.unsqueeze(1)
C = A.unsqueeze(0)
# TODO
# Might have to use a symmetric KL div
# Check - Still probably incorrect. Probably due to incorrect cross_entropy
# implementation
distances = -1.0*self.entropy(B) + self.cross_entropy(B,C) # N x N
return distances
def forward(self, features, num_transformations, distance_type='Euclidean'):
"""
Input: features -> T x D # Where D is the feature dimension and T is the batch size
num_transformations -> Number of transformations applied to the data
(Make sure that T is a multiple of num_transformations)
Output: ST Loss
"""
batch_size = features.shape[0]
#split_features = torch.zeros([batch_size/num_transformations, num_transformations, features.shape[1]])
all_index_groups = [[(i*num_transformations)+j for j in range(num_transformations)] for i in range(batch_size/num_transformations)]
total_loss = 0.0
for i in range(len(all_index_groups)):
split_features = torch.index_select(features, 0, torch.cuda.LongTensor(all_index_groups[i]))
distances = self.distances(split_features,distance_type=distance_type)
total_loss += 0.5*torch.sum(distances)
total_loss = total_loss / (1.0*batch_size)
# Don't know how exactly should we average. Per pair? Per image?
return total_loss
def get_loss(loss_name = 'CE'):
if loss_name == 'CE':
# ignore_index ignores the samples which have label -1000. We specify the unsupervised images by
# label 1000
criterion = nn.CrossEntropyLoss(ignore_index = -1000).cuda()
elif loss_name == 'ClusterLoss':
criterion = ClusterLoss().cuda()
elif loss_name == 'LocalityLoss':
criterion = LocalityLoss().cuda()
elif loss_name == 'CAMLocalityLoss':
criterion = CAMLocalityLoss().cuda()
elif loss_name == 'LEL':
criterion = LocalityEntropyLoss().cuda()
elif loss_name == 'STLoss':
criterion = StochasticTransformationLoss().cuda()
return criterion
| [
"torch.cuda.LongTensor",
"torch.nn.functional.softmax",
"torch.nn.CrossEntropyLoss",
"torch.mean",
"torch.nn.functional.normalize",
"torch.sum",
"torch.nn.functional.log_softmax",
"torch.zeros"
] | [((981, 1014), 'torch.zeros', 'torch.zeros', (['[logits.shape[0], 1]'], {}), '([logits.shape[0], 1])\n', (992, 1014), False, 'import torch\n'), ((1116, 1132), 'torch.mean', 'torch.mean', (['sum1'], {}), '(sum1)\n', (1126, 1132), False, 'import torch\n'), ((1274, 1299), 'torch.mean', 'torch.mean', (['logits'], {'dim': '(0)'}), '(logits, dim=0)\n', (1284, 1299), False, 'import torch\n'), ((3087, 3127), 'torch.sum', 'torch.sum', (['(differences * differences)', '(-1)'], {}), '(differences * differences, -1)\n', (3096, 3127), False, 'import torch\n'), ((3505, 3531), 'torch.nn.functional.normalize', 'F.normalize', (['A'], {'p': '(2)', 'dim': '(1)'}), '(A, p=2, dim=1)\n', (3516, 3531), True, 'import torch.nn.functional as F\n'), ((4831, 4873), 'torch.cuda.LongTensor', 'torch.cuda.LongTensor', (['all_index_groups[i]'], {}), '(all_index_groups[i])\n', (4852, 4873), False, 'import torch\n'), ((4988, 5008), 'torch.sum', 'torch.sum', (['distances'], {}), '(distances)\n', (4997, 5008), False, 'import torch\n'), ((5367, 5406), 'torch.nn.CrossEntropyLoss', 'nn.CrossEntropyLoss', ([], {'ignore_index': '(-1000)'}), '(ignore_index=-1000)\n', (5386, 5406), True, 'import torch.nn as nn\n'), ((622, 646), 'torch.nn.functional.softmax', 'F.softmax', (['logits'], {'dim': '(0)'}), '(logits, dim=0)\n', (631, 646), True, 'import torch.nn.functional as F\n'), ((646, 674), 'torch.nn.functional.log_softmax', 'F.log_softmax', (['logits'], {'dim': '(0)'}), '(logits, dim=0)\n', (659, 674), True, 'import torch.nn.functional as F\n'), ((2066, 2091), 'torch.nn.functional.softmax', 'F.softmax', (['logits'], {'dim': '(-1)'}), '(logits, dim=-1)\n', (2075, 2091), True, 'import torch.nn.functional as F\n'), ((2091, 2120), 'torch.nn.functional.log_softmax', 'F.log_softmax', (['logits'], {'dim': '(-1)'}), '(logits, dim=-1)\n', (2104, 2120), True, 'import torch.nn.functional as F\n'), ((2461, 2487), 'torch.nn.functional.softmax', 'F.softmax', (['logits1'], {'dim': '(-1)'}), '(logits1, dim=-1)\n', (2470, 2487), True, 'import torch.nn.functional as F\n'), ((2487, 2517), 'torch.nn.functional.log_softmax', 'F.log_softmax', (['logits2'], {'dim': '(-1)'}), '(logits2, dim=-1)\n', (2500, 2517), True, 'import torch.nn.functional as F\n')] |
import solana_rpc as rpc
def get_apr_from_rewards(rewards_data):
result = []
if rewards_data is not None:
if 'epochRewards' in rewards_data:
epoch_rewards = rewards_data['epochRewards']
for reward in epoch_rewards:
result.append({
'percent_change': reward['percentChange'],
'apr': reward['apr']
})
return result
def calc_single_apy(apr, percent_change):
epoch_count = apr / percent_change
result = ((1 + percent_change / 100) ** epoch_count - 1) * 100
return result
def calc_apy_list_from_apr(apr_per_epoch):
l_apy = []
for item in apr_per_epoch:
apy = calc_single_apy(item['apr'], item['percent_change'])
l_apy.append(apy)
return l_apy
def process(validators):
data = []
for validator in validators:
rewards_data = rpc.load_stake_account_rewards(validator['stake_account'])
apr_per_epoch = get_apr_from_rewards(rewards_data)
apy_per_epoch = calc_apy_list_from_apr(apr_per_epoch)
data.append(apy_per_epoch)
return data
| [
"solana_rpc.load_stake_account_rewards"
] | [((902, 960), 'solana_rpc.load_stake_account_rewards', 'rpc.load_stake_account_rewards', (["validator['stake_account']"], {}), "(validator['stake_account'])\n", (932, 960), True, 'import solana_rpc as rpc\n')] |
import re
class Solution:
def helper(self, expression: str) -> List[str]:
s = re.search("\{([^}{]+)\}", expression)
if not s: return {expression}
g = s.group(1)
result = set()
for c in g.split(','):
result |= self.helper(expression.replace('{' + g + '}', c, 1))
return result
def braceExpansionII(self, expression: str) -> List[str]:
return sorted(list(self.helper(expression)))
| [
"re.search"
] | [((92, 131), 're.search', 're.search', (['"""\\\\{([^}{]+)\\\\}"""', 'expression'], {}), "('\\\\{([^}{]+)\\\\}', expression)\n", (101, 131), False, 'import re\n')] |
from tensorflow.keras.callbacks import Callback
from poem_generator.word_generator import generate_poem
class PoemCallback(Callback):
def __init__(self, poems, seed_length, dictionary, single=True):
super(PoemCallback, self).__init__()
self.poems = poems
self.dictionary = dictionary
self.reverse_dictionary = {dictionary[key]: key for key in dictionary.keys()}
self.seed_length = seed_length
self.single = single
def on_epoch_end(self, epoch, logs=None):
for i in range(self.poems):
print(f"Poem {i+1}/{self.poems}")
generate_poem(self.model, self.reverse_dictionary, self.dictionary, self.seed_length, single=self.single) | [
"poem_generator.word_generator.generate_poem"
] | [((610, 720), 'poem_generator.word_generator.generate_poem', 'generate_poem', (['self.model', 'self.reverse_dictionary', 'self.dictionary', 'self.seed_length'], {'single': 'self.single'}), '(self.model, self.reverse_dictionary, self.dictionary, self.\n seed_length, single=self.single)\n', (623, 720), False, 'from poem_generator.word_generator import generate_poem\n')] |
# encoding: utf-8
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Author: <NAME> (<EMAIL>)
#
from __future__ import absolute_import, division, unicode_literals
from mo_future import is_text, is_binary
import gzip
from io import BytesIO
import struct
from tempfile import TemporaryFile
import time
import zipfile
import zlib
from mo_future import PY3, long, text
from mo_logs import Log
from mo_logs.exceptions import suppress_exception
import mo_math
# LIBRARY TO DEAL WITH BIG DATA ARRAYS AS ITERATORS OVER (IR)REGULAR SIZED
# BLOCKS, OR AS ITERATORS OVER LINES
DEBUG = False
MIN_READ_SIZE = 8 * 1024
MAX_STRING_SIZE = 1 * 1024 * 1024
class FileString(text):
"""
ACTS LIKE A STRING, BUT IS A FILE
"""
def __init__(self, file):
self.file = file
def decode(self, encoding):
if encoding != "utf8":
Log.error("can not handle {{encoding}}", encoding= encoding)
self.encoding = encoding
return self
def split(self, sep):
if sep != "\n":
Log.error("Can only split by lines")
self.file.seek(0)
return LazyLines(self.file)
def __len__(self):
temp = self.file.tell()
self.file.seek(0, 2)
file_length = self.file.tell()
self.file.seek(temp)
return file_length
def __getslice__(self, i, j):
j = mo_math.min(j, len(self))
if j - 1 > 2 ** 28:
Log.error("Slice of {{num}} bytes is too big", num=j - i)
try:
self.file.seek(i)
output = self.file.read(j - i).decode(self.encoding)
return output
except Exception as e:
Log.error(
"Can not read file slice at {{index}}, with encoding {{encoding}}",
index=i,
encoding=self.encoding,
cause=e
)
def __add__(self, other):
self.file.seek(0, 2)
self.file.write(other)
def __radd__(self, other):
new_file = TemporaryFile()
new_file.write(other)
self.file.seek(0)
for l in self.file:
new_file.write(l)
new_file.seek(0)
return FileString(new_file)
def __getattr__(self, attr):
return getattr(self.file, attr)
def __del__(self):
self.file, temp = None, self.file
if temp:
temp.close()
def __iter__(self):
self.file.seek(0)
return self.file
if PY3:
def __str__(self):
if self.encoding == "utf8":
temp = self.file.tell()
self.file.seek(0, 2)
file_length = self.file.tell()
self.file.seek(0)
output = self.file.read(file_length).decode(self.encoding)
self.file.seek(temp)
return output
else:
def __unicode__(self):
if self.encoding == "utf8":
temp = self.file.tell()
self.file.seek(0, 2)
file_length = self.file.tell()
self.file.seek(0)
output = self.file.read(file_length).decode(self.encoding)
self.file.seek(temp)
return output
def safe_size(source):
"""
READ THE source UP TO SOME LIMIT, THEN COPY TO A FILE IF TOO BIG
RETURN A str() OR A FileString()
"""
if source is None:
return None
total_bytes = 0
bytes = []
b = source.read(MIN_READ_SIZE)
while b:
total_bytes += len(b)
bytes.append(b)
if total_bytes > MAX_STRING_SIZE:
try:
data = FileString(TemporaryFile())
for bb in bytes:
data.write(bb)
del bytes
del bb
b = source.read(MIN_READ_SIZE)
while b:
total_bytes += len(b)
data.write(b)
b = source.read(MIN_READ_SIZE)
data.seek(0)
Log.note("Using file of size {{length}} instead of str()", length= total_bytes)
return data
except Exception as e:
Log.error("Could not write file > {{num}} bytes", num= total_bytes, cause=e)
b = source.read(MIN_READ_SIZE)
data = b"".join(bytes)
del bytes
return data
class LazyLines(object):
"""
SIMPLE LINE ITERATOR, BUT WITH A BIT OF CACHING TO LOOK LIKE AN ARRAY
"""
def __init__(self, source, encoding="utf8"):
"""
ASSUME source IS A LINE ITERATOR OVER utf8 ENCODED BYTE STREAM
"""
self.source = source
self.encoding = encoding
self._iter = self.__iter__()
self._last = None
self._next = 0
def __getslice__(self, i, j):
if i == self._next - 1:
def output():
yield self._last
for v in self._iter:
self._next += 1
yield v
return output()
if i == self._next:
return self._iter
Log.error("Do not know how to slice this generator")
def __iter__(self):
def output():
for v in self.source:
self._last = v
yield self._last
return output()
def __getitem__(self, item):
try:
if item == self._next:
self._next += 1
return self._iter.next()
elif item == self._next - 1:
return self._last
else:
Log.error("can not index out-of-order too much")
except Exception as e:
Log.error("Problem indexing", e)
class CompressedLines(LazyLines):
"""
KEEP COMPRESSED HTTP (content-type: gzip) IN BYTES ARRAY
WHILE PULLING OUT ONE LINE AT A TIME FOR PROCESSING
"""
def __init__(self, compressed, encoding="utf8"):
"""
USED compressed BYTES TO DELIVER LINES OF TEXT
LIKE LazyLines, BUT HAS POTENTIAL TO seek()
"""
self.compressed = compressed
LazyLines.__init__(self, None, encoding=encoding)
self._iter = self.__iter__()
def __iter__(self):
return LazyLines(ibytes2ilines(compressed_bytes2ibytes(self.compressed, MIN_READ_SIZE), encoding=self.encoding)).__iter__()
def __getslice__(self, i, j):
if i == self._next:
return self._iter
if i == 0:
return self.__iter__()
if i == self._next - 1:
def output():
yield self._last
for v in self._iter:
yield v
return output()
Log.error("Do not know how to slice this generator")
def __getitem__(self, item):
try:
if item == self._next:
self._last = self._iter.next()
self._next += 1
return self._last
elif item == self._next - 1:
return self._last
else:
Log.error("can not index out-of-order too much")
except Exception as e:
Log.error("Problem indexing", e)
def __radd__(self, other):
new_file = TemporaryFile()
new_file.write(other)
self.file.seek(0)
for l in self.file:
new_file.write(l)
new_file.seek(0)
return FileString(new_file)
def compressed_bytes2ibytes(compressed, size):
"""
CONVERT AN ARRAY OF BYTES TO A BYTE-BLOCK GENERATOR
USEFUL IN THE CASE WHEN WE WANT TO LIMIT HOW MUCH WE FEED ANOTHER
GENERATOR (LIKE A DECOMPRESSOR)
"""
decompressor = zlib.decompressobj(16 + zlib.MAX_WBITS)
for i in range(0, mo_math.ceiling(len(compressed), size), size):
try:
block = compressed[i: i + size]
yield decompressor.decompress(block)
except Exception as e:
Log.error("Not expected", e)
def ibytes2ilines(generator, encoding="utf8", flexible=False, closer=None):
"""
CONVERT A GENERATOR OF (ARBITRARY-SIZED) byte BLOCKS
TO A LINE (CR-DELIMITED) GENERATOR
:param generator:
:param encoding: None TO DO NO DECODING
:param closer: OPTIONAL FUNCTION TO RUN WHEN DONE ITERATING
:return:
"""
decode = get_decoder(encoding=encoding, flexible=flexible)
_buffer = generator.next()
s = 0
e = _buffer.find(b"\n")
while True:
while e == -1:
try:
next_block = generator.next()
_buffer = _buffer[s:] + next_block
s = 0
e = _buffer.find(b"\n")
except StopIteration:
_buffer = _buffer[s:]
del generator
if closer:
closer()
if _buffer:
yield decode(_buffer)
return
yield decode(_buffer[s:e])
s = e + 1
e = _buffer.find(b"\n", s)
def ibytes2icompressed(source):
yield (
b'\037\213\010\000' + # Gzip file, deflate, no filename
struct.pack('<L', long(time.time())) + # compression start time
b'\002\377' # maximum compression, no OS specified
)
crc = zlib.crc32(b"")
length = 0
compressor = zlib.compressobj(9, zlib.DEFLATED, -zlib.MAX_WBITS, zlib.DEF_MEM_LEVEL, 0)
for d in source:
crc = zlib.crc32(d, crc) & 0xffffffff
length += len(d)
chunk = compressor.compress(d)
if chunk:
yield chunk
yield compressor.flush()
yield struct.pack("<2L", crc, length & 0xffffffff)
class GzipLines(CompressedLines):
"""
SAME AS CompressedLines, BUT USING THE GzipFile FORMAT FOR COMPRESSED BYTES
"""
def __init__(self, compressed, encoding="utf8"):
CompressedLines.__init__(self, compressed, encoding=encoding)
def __iter__(self):
buff = BytesIO(self.compressed)
return LazyLines(gzip.GzipFile(fileobj=buff, mode='r'), encoding=self.encoding).__iter__()
class ZipfileLines(CompressedLines):
"""
SAME AS CompressedLines, BUT USING THE ZipFile FORMAT FOR COMPRESSED BYTES
"""
def __init__(self, compressed, encoding="utf8"):
CompressedLines.__init__(self, compressed, encoding=encoding)
def __iter__(self):
buff = BytesIO(self.compressed)
archive = zipfile.ZipFile(buff, mode='r')
names = archive.namelist()
if len(names) != 1:
Log.error("*.zip file has {{num}} files, expecting only one.", num= len(names))
stream = archive.open(names[0], "r")
return LazyLines(sbytes2ilines(stream), encoding=self.encoding).__iter__()
def icompressed2ibytes(source):
"""
:param source: GENERATOR OF COMPRESSED BYTES
:return: GENERATOR OF BYTES
"""
decompressor = zlib.decompressobj(16 + zlib.MAX_WBITS)
last_bytes_count = 0 # Track the last byte count, so we do not show too many debug lines
bytes_count = 0
for bytes_ in source:
try:
data = decompressor.decompress(bytes_)
except Exception as e:
Log.error("problem", cause=e)
bytes_count += len(data)
if mo_math.floor(last_bytes_count, 1000000) != mo_math.floor(bytes_count, 1000000):
last_bytes_count = bytes_count
DEBUG and Log.note("bytes={{bytes}}", bytes=bytes_count)
yield data
def scompressed2ibytes(stream):
"""
:param stream: SOMETHING WITH read() METHOD TO GET MORE BYTES
:return: GENERATOR OF UNCOMPRESSED BYTES
"""
def more():
try:
while True:
bytes_ = stream.read(4096)
if not bytes_:
return
yield bytes_
except Exception as e:
Log.error("Problem iterating through stream", cause=e)
finally:
with suppress_exception:
stream.close()
return icompressed2ibytes(more())
def sbytes2ilines(stream, encoding="utf8", closer=None):
"""
CONVERT A STREAM (with read() method) OF (ARBITRARY-SIZED) byte BLOCKS
TO A LINE (CR-DELIMITED) GENERATOR
"""
def read():
try:
while True:
bytes_ = stream.read(4096)
if not bytes_:
return
yield bytes_
except Exception as e:
Log.error("Problem iterating through stream", cause=e)
finally:
try:
stream.close()
except Exception:
pass
if closer:
try:
closer()
except Exception:
pass
return ibytes2ilines(read(), encoding=encoding)
def get_decoder(encoding, flexible=False):
"""
RETURN FUNCTION TO PERFORM DECODE
:param encoding: STRING OF THE ENCODING
:param flexible: True IF YOU WISH TO TRY OUR BEST, AND KEEP GOING
:return: FUNCTION
"""
if encoding == None:
def no_decode(v):
return v
return no_decode
elif flexible:
def do_decode1(v):
return v.decode(encoding, 'ignore')
return do_decode1
else:
def do_decode2(v):
return v.decode(encoding)
return do_decode2
| [
"mo_math.floor",
"zlib.decompressobj",
"mo_logs.Log.error",
"zipfile.ZipFile",
"io.BytesIO",
"struct.pack",
"gzip.GzipFile",
"zlib.crc32",
"tempfile.TemporaryFile",
"zlib.compressobj",
"time.time",
"mo_logs.Log.note"
] | [((7801, 7840), 'zlib.decompressobj', 'zlib.decompressobj', (['(16 + zlib.MAX_WBITS)'], {}), '(16 + zlib.MAX_WBITS)\n', (7819, 7840), False, 'import zlib\n'), ((9371, 9386), 'zlib.crc32', 'zlib.crc32', (["b''"], {}), "(b'')\n", (9381, 9386), False, 'import zlib\n'), ((9419, 9493), 'zlib.compressobj', 'zlib.compressobj', (['(9)', 'zlib.DEFLATED', '(-zlib.MAX_WBITS)', 'zlib.DEF_MEM_LEVEL', '(0)'], {}), '(9, zlib.DEFLATED, -zlib.MAX_WBITS, zlib.DEF_MEM_LEVEL, 0)\n', (9435, 9493), False, 'import zlib\n'), ((10978, 11017), 'zlib.decompressobj', 'zlib.decompressobj', (['(16 + zlib.MAX_WBITS)'], {}), '(16 + zlib.MAX_WBITS)\n', (10996, 11017), False, 'import zlib\n'), ((2154, 2169), 'tempfile.TemporaryFile', 'TemporaryFile', ([], {}), '()\n', (2167, 2169), False, 'from tempfile import TemporaryFile\n'), ((5232, 5284), 'mo_logs.Log.error', 'Log.error', (['"""Do not know how to slice this generator"""'], {}), "('Do not know how to slice this generator')\n", (5241, 5284), False, 'from mo_logs import Log\n'), ((6829, 6881), 'mo_logs.Log.error', 'Log.error', (['"""Do not know how to slice this generator"""'], {}), "('Do not know how to slice this generator')\n", (6838, 6881), False, 'from mo_logs import Log\n'), ((7363, 7378), 'tempfile.TemporaryFile', 'TemporaryFile', ([], {}), '()\n', (7376, 7378), False, 'from tempfile import TemporaryFile\n'), ((9706, 9750), 'struct.pack', 'struct.pack', (['"""<2L"""', 'crc', '(length & 4294967295)'], {}), "('<2L', crc, length & 4294967295)\n", (9717, 9750), False, 'import struct\n'), ((10047, 10071), 'io.BytesIO', 'BytesIO', (['self.compressed'], {}), '(self.compressed)\n', (10054, 10071), False, 'from io import BytesIO\n'), ((10469, 10493), 'io.BytesIO', 'BytesIO', (['self.compressed'], {}), '(self.compressed)\n', (10476, 10493), False, 'from io import BytesIO\n'), ((10512, 10543), 'zipfile.ZipFile', 'zipfile.ZipFile', (['buff'], {'mode': '"""r"""'}), "(buff, mode='r')\n", (10527, 10543), False, 'import zipfile\n'), ((1009, 1068), 'mo_logs.Log.error', 'Log.error', (['"""can not handle {{encoding}}"""'], {'encoding': 'encoding'}), "('can not handle {{encoding}}', encoding=encoding)\n", (1018, 1068), False, 'from mo_logs import Log\n'), ((1187, 1223), 'mo_logs.Log.error', 'Log.error', (['"""Can only split by lines"""'], {}), "('Can only split by lines')\n", (1196, 1223), False, 'from mo_logs import Log\n'), ((1579, 1636), 'mo_logs.Log.error', 'Log.error', (['"""Slice of {{num}} bytes is too big"""'], {'num': '(j - i)'}), "('Slice of {{num}} bytes is too big', num=j - i)\n", (1588, 1636), False, 'from mo_logs import Log\n'), ((9529, 9547), 'zlib.crc32', 'zlib.crc32', (['d', 'crc'], {}), '(d, crc)\n', (9539, 9547), False, 'import zlib\n'), ((11339, 11379), 'mo_math.floor', 'mo_math.floor', (['last_bytes_count', '(1000000)'], {}), '(last_bytes_count, 1000000)\n', (11352, 11379), False, 'import mo_math\n'), ((11383, 11418), 'mo_math.floor', 'mo_math.floor', (['bytes_count', '(1000000)'], {}), '(bytes_count, 1000000)\n', (11396, 11418), False, 'import mo_math\n'), ((1814, 1937), 'mo_logs.Log.error', 'Log.error', (['"""Can not read file slice at {{index}}, with encoding {{encoding}}"""'], {'index': 'i', 'encoding': 'self.encoding', 'cause': 'e'}), "('Can not read file slice at {{index}}, with encoding {{encoding}}',\n index=i, encoding=self.encoding, cause=e)\n", (1823, 1937), False, 'from mo_logs import Log\n'), ((4164, 4242), 'mo_logs.Log.note', 'Log.note', (['"""Using file of size {{length}} instead of str()"""'], {'length': 'total_bytes'}), "('Using file of size {{length}} instead of str()', length=total_bytes)\n", (4172, 4242), False, 'from mo_logs import Log\n'), ((5811, 5843), 'mo_logs.Log.error', 'Log.error', (['"""Problem indexing"""', 'e'], {}), "('Problem indexing', e)\n", (5820, 5843), False, 'from mo_logs import Log\n'), ((7278, 7310), 'mo_logs.Log.error', 'Log.error', (['"""Problem indexing"""', 'e'], {}), "('Problem indexing', e)\n", (7287, 7310), False, 'from mo_logs import Log\n'), ((8060, 8088), 'mo_logs.Log.error', 'Log.error', (['"""Not expected"""', 'e'], {}), "('Not expected', e)\n", (8069, 8088), False, 'from mo_logs import Log\n'), ((11265, 11294), 'mo_logs.Log.error', 'Log.error', (['"""problem"""'], {'cause': 'e'}), "('problem', cause=e)\n", (11274, 11294), False, 'from mo_logs import Log\n'), ((11485, 11531), 'mo_logs.Log.note', 'Log.note', (['"""bytes={{bytes}}"""'], {'bytes': 'bytes_count'}), "('bytes={{bytes}}', bytes=bytes_count)\n", (11493, 11531), False, 'from mo_logs import Log\n'), ((11939, 11993), 'mo_logs.Log.error', 'Log.error', (['"""Problem iterating through stream"""'], {'cause': 'e'}), "('Problem iterating through stream', cause=e)\n", (11948, 11993), False, 'from mo_logs import Log\n'), ((12533, 12587), 'mo_logs.Log.error', 'Log.error', (['"""Problem iterating through stream"""'], {'cause': 'e'}), "('Problem iterating through stream', cause=e)\n", (12542, 12587), False, 'from mo_logs import Log\n'), ((3786, 3801), 'tempfile.TemporaryFile', 'TemporaryFile', ([], {}), '()\n', (3799, 3801), False, 'from tempfile import TemporaryFile\n'), ((4325, 4400), 'mo_logs.Log.error', 'Log.error', (['"""Could not write file > {{num}} bytes"""'], {'num': 'total_bytes', 'cause': 'e'}), "('Could not write file > {{num}} bytes', num=total_bytes, cause=e)\n", (4334, 4400), False, 'from mo_logs import Log\n'), ((5719, 5767), 'mo_logs.Log.error', 'Log.error', (['"""can not index out-of-order too much"""'], {}), "('can not index out-of-order too much')\n", (5728, 5767), False, 'from mo_logs import Log\n'), ((7186, 7234), 'mo_logs.Log.error', 'Log.error', (['"""can not index out-of-order too much"""'], {}), "('can not index out-of-order too much')\n", (7195, 7234), False, 'from mo_logs import Log\n'), ((10097, 10134), 'gzip.GzipFile', 'gzip.GzipFile', ([], {'fileobj': 'buff', 'mode': '"""r"""'}), "(fileobj=buff, mode='r')\n", (10110, 10134), False, 'import gzip\n'), ((9252, 9263), 'time.time', 'time.time', ([], {}), '()\n', (9261, 9263), False, 'import time\n')] |
from unittest.mock import patch
from dependent import parameter_dependent
@patch('math.sqrt')
def test_negative(mock_sqrt):
assert parameter_dependent(-1) == 0
mock_sqrt.assert_not_called()
@patch('math.sqrt')
def test_zero(mock_sqrt):
mock_sqrt.return_value = 0
assert parameter_dependent(0) == 0
mock_sqrt.assert_called_once_with(0)
@patch('math.sqrt')
def test_twenty_five(mock_sqrt):
mock_sqrt.return_value = 5
assert parameter_dependent(25) == 5
mock_sqrt.assert_called_with(25)
@patch('math.sqrt')
def test_hundred(mock_sqrt):
mock_sqrt.return_value = 10
assert parameter_dependent(100) == 10
mock_sqrt.assert_called_with(100)
@patch('math.sqrt')
def test_hundred_and_one(mock_sqrt):
assert parameter_dependent(101) == 10
mock_sqrt.assert_not_called()
| [
"dependent.parameter_dependent",
"unittest.mock.patch"
] | [((77, 95), 'unittest.mock.patch', 'patch', (['"""math.sqrt"""'], {}), "('math.sqrt')\n", (82, 95), False, 'from unittest.mock import patch\n'), ((203, 221), 'unittest.mock.patch', 'patch', (['"""math.sqrt"""'], {}), "('math.sqrt')\n", (208, 221), False, 'from unittest.mock import patch\n'), ((362, 380), 'unittest.mock.patch', 'patch', (['"""math.sqrt"""'], {}), "('math.sqrt')\n", (367, 380), False, 'from unittest.mock import patch\n'), ((525, 543), 'unittest.mock.patch', 'patch', (['"""math.sqrt"""'], {}), "('math.sqrt')\n", (530, 543), False, 'from unittest.mock import patch\n'), ((688, 706), 'unittest.mock.patch', 'patch', (['"""math.sqrt"""'], {}), "('math.sqrt')\n", (693, 706), False, 'from unittest.mock import patch\n'), ((137, 160), 'dependent.parameter_dependent', 'parameter_dependent', (['(-1)'], {}), '(-1)\n', (156, 160), False, 'from dependent import parameter_dependent\n'), ((290, 312), 'dependent.parameter_dependent', 'parameter_dependent', (['(0)'], {}), '(0)\n', (309, 312), False, 'from dependent import parameter_dependent\n'), ((456, 479), 'dependent.parameter_dependent', 'parameter_dependent', (['(25)'], {}), '(25)\n', (475, 479), False, 'from dependent import parameter_dependent\n'), ((616, 640), 'dependent.parameter_dependent', 'parameter_dependent', (['(100)'], {}), '(100)\n', (635, 640), False, 'from dependent import parameter_dependent\n'), ((755, 779), 'dependent.parameter_dependent', 'parameter_dependent', (['(101)'], {}), '(101)\n', (774, 779), False, 'from dependent import parameter_dependent\n')] |
"""Tests a package installation on a user OS."""
import pathlib
import subprocess
import unittest
class TestPlErrPackage(unittest.TestCase):
def test_plerr_error_getter(self):
# Given: a command to get a description of a pylint error by an
# error code.
command = ['python3', '-m', 'plerr', 'R1710']
# When: the command invokes.
proc = subprocess.Popen(
command,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE
)
stdout, stderr = proc.communicate()
expected_stdout = (
(
pathlib.Path(__file__).resolve().parent /
'command_output_fixture.txt'
)
.read_bytes()
)
# Then: it produces a highlighted output to stdout of the given error
# with the exit code 0.
assert stdout == expected_stdout
assert not stderr
assert proc.returncode == 0
def test_plerr_non_existent_error(self):
# Given: a command to get a description of a pylint error with an
# existent error code.
command = ['python3', '-m', 'plerr', 'R0000']
# When: the command invokes.
proc = subprocess.Popen(
command,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE
)
stdout, stderr = proc.communicate()
expected_stdout = (
b'Cannot find R0000 pylint error by such error code.\n'
)
# Then: it produces an error message to stderr with the exit code 1.
assert stderr == expected_stdout
assert not stdout
assert proc.returncode == 1
| [
"subprocess.Popen",
"pathlib.Path"
] | [((383, 456), 'subprocess.Popen', 'subprocess.Popen', (['command'], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE'}), '(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n', (399, 456), False, 'import subprocess\n'), ((1214, 1287), 'subprocess.Popen', 'subprocess.Popen', (['command'], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE'}), '(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n', (1230, 1287), False, 'import subprocess\n'), ((605, 627), 'pathlib.Path', 'pathlib.Path', (['__file__'], {}), '(__file__)\n', (617, 627), False, 'import pathlib\n')] |
import os
_lab_components = """from api2db.ingest import *
CACHE=True # Caches API data so that only a single API call is made if True
def import_target():
return None
def pre_process():
return None
def data_features():
return None
def post_process():
return None
if __name__ == "__main__":
api_form = ApiForm(name="lab",
pre_process=pre_process(),
data_features=data_features(),
post_process=post_process()
)
api_form.experiment(CACHE, import_target)
"""
def mlab():
"""
This shell command is used for creation of a lab. Labs offer an easier way to design an ApiForm.
Given a project directory
::
project_dir-----/
|
apis-----/
| |- __init__.py
| |- FooCollector.py
| |- BarCollector.py
|
AUTH-----/
| |- bigquery_auth_template.json
| |- omnisci_auth_template.json
| |- sql_auth_template.json
|
CACHE/
|
STORE/
|
helpers.py
|
main.py
**Shell Command:** ``path/to/project_dir> mlab``
::
project_dir-----/
|
apis-------/
| |- __init__.py
| |- FooCollector.py
| |- BarCollector.py
|
AUTH-------/
| |- bigquery_auth_template.json
| |- omnisci_auth_template.json
| |- sql_auth_template.json
|
CACHE/
|
STORE/
|
laboratory-/
| |- lab.py EDIT THIS FILE!
|
helpers.py
|
main.py
Returns:
None
"""
lab_dir_path = os.path.join(os.getcwd(), "laboratory")
if not os.path.isdir(lab_dir_path):
os.makedirs(lab_dir_path)
with open(os.path.join(lab_dir_path, "lab.py"), "w") as f:
for line in _lab_components:
f.write(line)
print("Lab has been created. Edit the file found in laboratory/lab.py")
else:
print("Lab already exists!")
| [
"os.path.isdir",
"os.path.join",
"os.makedirs",
"os.getcwd"
] | [((2460, 2471), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (2469, 2471), False, 'import os\n'), ((2498, 2525), 'os.path.isdir', 'os.path.isdir', (['lab_dir_path'], {}), '(lab_dir_path)\n', (2511, 2525), False, 'import os\n'), ((2535, 2560), 'os.makedirs', 'os.makedirs', (['lab_dir_path'], {}), '(lab_dir_path)\n', (2546, 2560), False, 'import os\n'), ((2579, 2615), 'os.path.join', 'os.path.join', (['lab_dir_path', '"""lab.py"""'], {}), "(lab_dir_path, 'lab.py')\n", (2591, 2615), False, 'import os\n')] |
#-*- coding: utf-8 -*-
"""Forms for the django-shop app."""
from django import forms
from django.conf import settings
from django.forms.models import modelformset_factory
from django.utils.translation import ugettext_lazy as _
from shop.backends_pool import backends_pool
from shop.models.cartmodel import CartItem
from shop.util.loader import load_class
def get_shipping_backends_choices():
shipping_backends = backends_pool.get_shipping_backends_list()
return tuple([(x.url_namespace, getattr(x, 'backend_verbose_name', x.backend_name)) for x in shipping_backends])
def get_billing_backends_choices():
billing_backends = backends_pool.get_payment_backends_list()
return tuple([(x.url_namespace, getattr(x, 'backend_verbose_name', x.backend_name)) for x in billing_backends])
class BillingShippingForm(forms.Form):
"""
A form displaying all available payment and shipping methods (the ones
defined in settings.SHOP_SHIPPING_BACKENDS and
settings.SHOP_PAYMENT_BACKENDS)
"""
shipping_method = forms.ChoiceField(choices=get_shipping_backends_choices(), label=_('Shipping method'))
payment_method = forms.ChoiceField(choices=get_billing_backends_choices(), label=_('Payment method'))
class CartItemModelForm(forms.ModelForm):
"""A form for the CartItem model. To be used in the CartDetails view."""
quantity = forms.IntegerField(min_value=0, max_value=9999)
class Meta:
model = CartItem
fields = ('quantity', )
def save(self, *args, **kwargs):
"""
We don't save the model using the regular way here because the
Cart's ``update_quantity()`` method already takes care of deleting
items from the cart when the quantity is set to 0.
"""
quantity = self.cleaned_data['quantity']
instance = self.instance.cart.update_quantity(self.instance.pk,
quantity)
return instance
def get_cart_item_modelform_class():
"""
Return the class of the CartItem ModelForm.
The default `shop.forms.CartItemModelForm` can be overridden settings
``SHOP_CART_ITEM_FORM`` parameter in settings
"""
cls_name = getattr(settings, 'SHOP_CART_ITEM_FORM', 'shop.forms.CartItemModelForm')
cls = load_class(cls_name)
return cls
def get_cart_item_formset(cart_items=None, data=None):
"""
Returns a CartItemFormSet which can be used in the CartDetails view.
:param cart_items: The queryset to be used for this formset. This should
be the list of updated cart items of the current cart.
:param data: Optional POST data to be bound to this formset.
"""
assert(cart_items is not None)
CartItemFormSet = modelformset_factory(CartItem, form=get_cart_item_modelform_class(),
extra=0)
kwargs = {'queryset': cart_items, }
form_set = CartItemFormSet(data, **kwargs)
# The Django ModelFormSet pulls the item out of the database again and we
# would lose the updated line_subtotals
for form in form_set:
for cart_item in cart_items:
if form.instance.pk == cart_item.pk:
form.instance = cart_item
return form_set
| [
"django.utils.translation.ugettext_lazy",
"shop.backends_pool.backends_pool.get_shipping_backends_list",
"shop.backends_pool.backends_pool.get_payment_backends_list",
"shop.util.loader.load_class",
"django.forms.IntegerField"
] | [((419, 461), 'shop.backends_pool.backends_pool.get_shipping_backends_list', 'backends_pool.get_shipping_backends_list', ([], {}), '()\n', (459, 461), False, 'from shop.backends_pool import backends_pool\n'), ((640, 681), 'shop.backends_pool.backends_pool.get_payment_backends_list', 'backends_pool.get_payment_backends_list', ([], {}), '()\n', (679, 681), False, 'from shop.backends_pool import backends_pool\n'), ((1369, 1416), 'django.forms.IntegerField', 'forms.IntegerField', ([], {'min_value': '(0)', 'max_value': '(9999)'}), '(min_value=0, max_value=9999)\n', (1387, 1416), False, 'from django import forms\n'), ((2255, 2275), 'shop.util.loader.load_class', 'load_class', (['cls_name'], {}), '(cls_name)\n', (2265, 2275), False, 'from shop.util.loader import load_class\n'), ((1104, 1124), 'django.utils.translation.ugettext_lazy', '_', (['"""Shipping method"""'], {}), "('Shipping method')\n", (1105, 1124), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1211, 1230), 'django.utils.translation.ugettext_lazy', '_', (['"""Payment method"""'], {}), "('Payment method')\n", (1212, 1230), True, 'from django.utils.translation import ugettext_lazy as _\n')] |
from src.access import UserRemoveAccess
from generate_access_data import generate_access_data
def test_remove_user_access():
sessions = generate_access_data()
user = sessions['user'].users.get('user')
useraccess = UserRemoveAccess(sessions['user'], user)
manageraccess = UserRemoveAccess(sessions['manager'], user)
superaccess = UserRemoveAccess(sessions['super'], user)
noneaccess = UserRemoveAccess(sessions['none'], user)
assert useraccess.granted()
assert not manageraccess.granted()
assert superaccess.granted()
assert not noneaccess.granted()
| [
"generate_access_data.generate_access_data",
"src.access.UserRemoveAccess"
] | [((142, 164), 'generate_access_data.generate_access_data', 'generate_access_data', ([], {}), '()\n', (162, 164), False, 'from generate_access_data import generate_access_data\n'), ((228, 268), 'src.access.UserRemoveAccess', 'UserRemoveAccess', (["sessions['user']", 'user'], {}), "(sessions['user'], user)\n", (244, 268), False, 'from src.access import UserRemoveAccess\n'), ((289, 332), 'src.access.UserRemoveAccess', 'UserRemoveAccess', (["sessions['manager']", 'user'], {}), "(sessions['manager'], user)\n", (305, 332), False, 'from src.access import UserRemoveAccess\n'), ((351, 392), 'src.access.UserRemoveAccess', 'UserRemoveAccess', (["sessions['super']", 'user'], {}), "(sessions['super'], user)\n", (367, 392), False, 'from src.access import UserRemoveAccess\n'), ((410, 450), 'src.access.UserRemoveAccess', 'UserRemoveAccess', (["sessions['none']", 'user'], {}), "(sessions['none'], user)\n", (426, 450), False, 'from src.access import UserRemoveAccess\n')] |
import re
import unicodedata
# Remove empty brackets (that could happen if the contents have been removed already
# e.g. for citation ( [3] [4] ) -> ( ) -> nothing
def remove_brackets_without_words(text: str) -> str:
fixed = re.sub(r"\([\W\s]*\)", " ", text)
fixed = re.sub(r"\[[\W\s]*\]", " ", fixed)
fixed = re.sub(r"\{[\W\s]*\}", " ", fixed)
return fixed
# Some older articles have titles like "[A study of ...]."
# This removes the brackets while retaining the full stop
def remove_weird_brackets_from_old_titles(title_text: str) -> str:
title_text = title_text.strip()
if title_text[0] == "[" and title_text[-2:] == "].":
title_text = title_text[1:-2] + "."
return title_text
def cleanup_text(text: str) -> str:
# Remove some "control-like" characters (left/right separator)
text = text.replace(u"\u2028", " ").replace(u"\u2029", " ")
text = "".join(ch for ch in text if unicodedata.category(ch)[0] != "C")
text = "".join(ch if unicodedata.category(ch)[0] != "Z" else " " for ch in text)
# Remove repeated commands and commas next to periods
text = re.sub(r",(\s*,)*", ",", text)
text = re.sub(r"(,\s*)*\.", ".", text)
return text.strip()
# XML elements to ignore the contents of
ignore_list = [
"table",
"table-wrap",
"xref",
"disp-formula",
"inline-formula",
"ref-list",
"bio",
"ack",
"graphic",
"media",
"tex-math",
"mml:math",
"object-id",
"ext-link",
]
# XML elements to separate text between
separation_list = ["title", "p", "sec", "break", "def-item", "list-item", "caption"]
def extract_text_from_elem(elem):
# Extract any raw text directly in XML element or just after
head = ""
if elem.text:
head = elem.text
tail = ""
if elem.tail:
tail = elem.tail
# Then get the text from all child XML nodes recursively
child_text = []
for child in elem:
child_text = child_text + extract_text_from_elem(child)
# Check if the tag should be ignore (so don't use main contents)
if elem.tag in ignore_list:
return [tail.strip()]
# Add a zero delimiter if it should be separated
elif elem.tag in separation_list:
return [0] + [head] + child_text + [tail]
# Or just use the whole text
else:
return [head] + child_text + [tail]
# Merge a list of extracted text blocks and deal with the zero delimiter
def extract_text_from_elem_list_merge(list):
text_list = []
current = ""
# Basically merge a list of text, except separate into a new list
# whenever a zero appears
for t in list:
if t == 0: # Zero delimiter so split
if len(current) > 0:
text_list.append(current)
current = ""
else: # Just keep adding
current = current + " " + t
current = current.strip()
if len(current) > 0:
text_list.append(current)
return text_list
# Main function that extracts text from XML element or list of XML elements
def extract_text_from_elem_list(elem_list):
text_list = []
# Extracts text and adds delimiters (so text is accidentally merged later)
if isinstance(elem_list, list):
for e in elem_list:
text_list = text_list + extract_text_from_elem(e) + [0]
else:
text_list = extract_text_from_elem(elem_list) + [0]
# Merge text blocks with awareness of zero delimiters
merged_list = extract_text_from_elem_list_merge(text_list)
# Remove any newlines (as they can be trusted to be syntactically important)
merged_list = [text.replace("\n", " ") for text in merged_list]
# Remove no-break spaces
merged_list = [cleanup_text(text) for text in merged_list]
return merged_list
def trim_sentence_lengths(text: str) -> str:
MAXLENGTH = 90000
return ".".join(line[:MAXLENGTH] for line in text.split("."))
| [
"re.sub",
"unicodedata.category"
] | [((238, 274), 're.sub', 're.sub', (['"""\\\\([\\\\W\\\\s]*\\\\)"""', '""" """', 'text'], {}), "('\\\\([\\\\W\\\\s]*\\\\)', ' ', text)\n", (244, 274), False, 'import re\n'), ((285, 322), 're.sub', 're.sub', (['"""\\\\[[\\\\W\\\\s]*\\\\]"""', '""" """', 'fixed'], {}), "('\\\\[[\\\\W\\\\s]*\\\\]', ' ', fixed)\n", (291, 322), False, 'import re\n'), ((333, 370), 're.sub', 're.sub', (['"""\\\\{[\\\\W\\\\s]*\\\\}"""', '""" """', 'fixed'], {}), "('\\\\{[\\\\W\\\\s]*\\\\}', ' ', fixed)\n", (339, 370), False, 'import re\n'), ((1150, 1180), 're.sub', 're.sub', (['""",(\\\\s*,)*"""', '""","""', 'text'], {}), "(',(\\\\s*,)*', ',', text)\n", (1156, 1180), False, 'import re\n'), ((1193, 1225), 're.sub', 're.sub', (['"""(,\\\\s*)*\\\\."""', '"""."""', 'text'], {}), "('(,\\\\s*)*\\\\.', '.', text)\n", (1199, 1225), False, 'import re\n'), ((955, 979), 'unicodedata.category', 'unicodedata.category', (['ch'], {}), '(ch)\n', (975, 979), False, 'import unicodedata\n'), ((1017, 1041), 'unicodedata.category', 'unicodedata.category', (['ch'], {}), '(ch)\n', (1037, 1041), False, 'import unicodedata\n')] |
#!/usr/bin/env python3
#
# Import built in packages
#
import logging
import platform
import os
import time
import socket
import subprocess
import signal
import psutil
from .util import setup_logger
from .util import PylinxException
import re
# Import 3th party modules:
# - wexpect/pexpect to launch ant interact with subprocesses.
if platform.system() == 'Windows':
import wexpect as expect
print(expect.__version__)
else: # Linux
import pexpect as expect
# The directory of this script file.
__here__ = os.path.dirname(os.path.realpath(__file__))
#
# Get the logger (util.py has sets it)
#
setup_logger()
logger = logging.getLogger('pylinx')
# xsct_line_end is the line endings in the XSCT console. It doesn't depend on the platform. It is
# always Windows-style \\r\\n.
xsct_line_end = '\r\n'
# The default host and port.
HOST = '127.0.0.1' # Standard loop-back interface address (localhost)
PORT = 4567
class XsctServer:
"""The controller of the XSCT server application. This is an optional feature. The commands will
be given to the client.
"""
def __init__(self, xsct_executable=None, port=PORT, verbose=False):
""" Initialize the Server object.
:param xsct_executable: The full-path to the XSCT/XSDB executable
:param port: TCP port where the server should be started
:param verbose: True: prints the XSCT's stdout to python's stdout.
"""
self._xsct_server = None
if (xsct_executable is not None) and (port is not None):
self.start_server(xsct_executable, port, verbose)
def start_server(self, xsct_executable=None, port=PORT, verbose=False):
"""Starts the server.
:param xsct_executable: The full-path to the XSCT/XSDB executable
:param port: TCP port where the server should be started
:param verbose: True: prints the XSCT's stdout to python's stdout.
:return: None
"""
if (xsct_executable is None) or (port is None):
raise ValueError("xsct_executable and port must be non None.")
start_server_command = 'xsdbserver start -port {}'.format(port)
start_command = '{} -eval "{}" -interactive'.format(xsct_executable, start_server_command)
self._launch_child(start_command)
def _start_dummy_server(self):
"""Starts a dummy server, just for test purposes.
:return: None
"""
dummy_executable = os.path.abspath(os.path.join(__here__, 'dummy_xsct.tcl'))
start_command = ['tclsh', dummy_executable]
self._launch_child(start_command)
def _launch_child(self, start_command, verbose=False):
logger.info('Starting xsct server: %s', start_command)
if verbose:
stdout = None
else:
stdout = open(os.devnull, 'w')
self._xsct_server = subprocess.Popen(start_command, stdout=stdout)
logger.info('xsct started with PID: %d', self._xsct_server.pid)
def stop_server(self, wait=True):
"""Kills the server.
:param wait: Wait for complete kill, or just send kill signals.
:return: None
"""
if not self._xsct_server:
logger.debug('The server is not started or it has been killed.')
return
poll = self._xsct_server.poll()
if poll is None:
logger.debug("The server is alive, let's kill it.")
# Kill all child process the XSCT starts in a terminal.
current_process = None
try:
current_process = psutil.Process(self._xsct_server.pid)
children = current_process.children(recursive=True)
children.append(current_process)
for child in reversed(children):
logger.debug("Killing child with pid: %d", child.pid)
os.kill(child.pid, signal.SIGTERM) # or signal.SIGKILL
except psutil._exceptions.NoSuchProcess as e:
logger.debug('psutil.NoSuchProcess process no longer exists.')
if wait:
poll = self._xsct_server.poll()
while poll is None:
logger.debug("The server is still alive, wait for it.")
time.sleep(.1)
poll = self._xsct_server.poll()
self._xsct_server = None
else:
logger.debug("The server is not alive, return...")
def pid(self):
return self._xsct_server.pid
class Xsct:
"""The XSCT client class. This communicates with the server and sends commands.
"""
def __init__(self, host=HOST, port=PORT):
"""Initializes the client object.
:param host: the URL of the machine address where the XSDB server is running.
:param port: the port of the the XSDB server is running.
"""
self._socket = None
if host is not None:
self.connect(host, port)
def connect(self, host=HOST, port=PORT, timeout=10):
"""Connect to the xsdbserver
:param host: Host machine where the xsdbserver is running.
:param port: Port of the xsdbserver.
:param timeout: Set a timeout on blocking socket operations. The value argument can be a non-negative float
expressing seconds.
:return: None
"""
self._socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self._socket.connect((host, port))
if timeout is not None:
self._socket.settimeout(timeout)
logger.info('Connected to: %s...', repr((host, port)))
def close(self):
"""Closes the connection
:return: None
"""
self._socket.close()
def send(self, msg):
"""Sends a simple message to the xsdbserver through the socket. Note, that this method don't appends
line-endings. It just sends natively the message. Use `do` instead.
:param msg: The message to be sent.
:return: Noting
"""
if isinstance(msg, str):
msg = msg.encode()
logger.debug('Sending message: %s ...', repr(msg))
self._socket.sendall(msg)
def recv(self, bufsize=1024, timeout=None):
"""Receives the answer from the server. Not recommended to use it natively. Use `do`
:param bufsize:The maximum amount of data to be received at once is specified by bufsize.
:param timeout:
:return:
"""
if timeout is not None:
self._socket.settimeout(timeout)
ans = ''
while True:
data = self._socket.recv(bufsize)
logger.debug('Data received: %s ...', repr(data))
ans += data.decode("utf-8")
ans = ans.split(xsct_line_end)
if len(ans) > 1:
return ans[0]
def do(self, command):
"""The main function of the client. Sends a command and returns the return value of the command.
:param command:
:return:
"""
command += xsct_line_end
logger.info('Sending command: %s ...', repr(command))
self.send(command)
ans = self.recv()
if ans.startswith('okay'):
return ans[5:]
if ans.startswith('error'):
raise PylinxException(ans[6:])
raise PylinxException('Illegal start-string in protocol. Answer is: ' + ans)
default_vivado_prompt = 'Vivado% '
class Vivado:
"""Vivado is a native interface towards the Vivado TCL console. You can run TCL commands in it
using do() method. This is a quasi state-less class
"""
def __init__(self, executable, args=None, name='Vivado_01',
prompt=default_vivado_prompt, timeout=10, encoding="utf-8", wait_startup=True):
self.child_proc = None
self.name = name
self.prompt = prompt
self.timeout = timeout
self.encoding = encoding
self.last_cmds = []
self.last_befores = []
self.last_prompts = []
if args is None:
args = ['-mode', 'tcl']
if executable is not None: # None is fake run
logger.info('Spawning Vivado: ' + executable + str(args))
self.child_proc = expect.spawn(executable, args)
if wait_startup:
self.wait_startup()
def wait_startup(self, **kwargs):
self.do(cmd=None, **kwargs)
def do(self, cmd, prompt=None, timeout=None, wait_prompt=True, errmsgs=[], encoding="utf-8",
native_answer=False):
""" do a simple command in Vivado console
:rtype: str
"""
if self.child_proc.terminated:
logger.error('The process has been terminated. Sending command is not possible.')
raise PylinxException('The process has been terminated. Sending command is not possible.')
if cmd is not None:
logger.debug('Sending command: ' + str(cmd))
if platform.system() == 'Windows':
self.child_proc.sendline(cmd)
else:
self.child_proc.sendline(cmd.encode())
if prompt is None:
prompt = self.prompt
if timeout is None:
timeout = self.timeout
if encoding is None:
encoding = self.encoding
if wait_prompt:
self.child_proc.expect(prompt, timeout=timeout)
logger.debug("before: " + repr(self.child_proc.before))
self.last_cmds.append(cmd)
if platform.system() == 'Windows':
before = self.child_proc.before
prompt = self.child_proc.after
else:
before = self.child_proc.before.decode(encoding)
prompt = self.child_proc.after.decode(encoding)
self.last_befores.append(before)
self.last_prompts.append(prompt)
for em in errmsgs:
if isinstance(em, str):
em = re.compile(em)
if em.search(before):
logger.error('during running command: {}, before: {}'.format(cmd, before))
raise PylinxException('during running command: {}, before: {}'.format(cmd, before))
if native_answer:
return before
else:
# remove first line, which is always empty
ret = os.linesep.join(before.split(xsct_line_end)[1:-1])
# print(repr(before.split(xsct_line_end)))
return ret.rstrip()
return None
def interact(self, cmd=None, **kwargs):
if cmd is not None:
self.do(cmd, **kwargs)
before_to_print = os.linesep.join(self.last_befores[-1].split(xsct_line_end)[1:])
print(before_to_print, end='')
print(self.last_prompts[-1], end='')
def get_var(self, varname, **kwargs):
no_var_msg = 'can\'t read "{}": no such variable'.format(varname)
errmsgs = [re.compile(no_var_msg)]
command = 'puts ${}'.format(varname)
ans = self.do(command, errmsgs=errmsgs, **kwargs)
return ans
def set_var(self, varname, value, **kwargs):
command = 'set {} {}'.format(varname, value)
ans = self.do(command, **kwargs)
return ans
def get_property(self, propName, objectName, **kwargs):
""" does a get_property command in vivado terminal.
It fetches the given property and returns it.
"""
cmd = 'get_property {} {}'.format(propName, objectName)
return self.do(cmd, **kwargs).strip()
def set_property(self, propName, value, objectName, **kwargs):
""" Sets a property.
"""
cmd = 'set_property {} {} {}'.format(propName, value, objectName)
self.do(cmd, **kwargs)
def pid(self):
parent = psutil.Process(self.child_proc.pid)
children = parent.children(recursive=True)
if len(children) == 0:
return self.child_proc.pid
for child in children:
if re.match(".*vivado.*", child.name(), re.I):
return child.pid
raise PylinxException('Unknown pid')
def exit(self, force=False, **kwargs):
logger.debug('start')
if self.child_proc is None:
return None
if self.child_proc.terminated:
logger.warning('This process has been terminated.')
return None
else:
if force:
return self.child_proc.terminate()
else:
self.do('exit', wait_prompt=False, **kwargs)
return self.child_proc.wait()
class VivadoHWServer(Vivado):
"""VivadoHWServer adds hw_server dependent handlers for the Vivado class.
"""
'''allDevices is a static variable. Its stores all the devices for all hardware server. The indexes
are the urls and the values are lists of the available hardware devices. The default behaviour
is the following: One key is "localhost:3121" (which is the default hw server) and this key
indexes a list with all local devices (which are normally includes two devices).
See get_devices() and fetchDevices for more details.'''
allDevices = {} # type: dict[str, list]
def __init__(self, executable, hw_server_url='localhost:3121', wait_startup=True, full_init=True, **kwargs):
self.hw_server_url = hw_server_url
self.sio = None
self.sioLink = None
self.hw_server_url = hw_server_url
super(VivadoHWServer, self).__init__(executable, wait_startup=wait_startup, **kwargs)
if full_init:
assert wait_startup
hw_server_tcl = os.path.join(__here__, 'hw_server.tcl')
hw_server_tcl = hw_server_tcl.replace(os.sep, '/')
self.do('source ' + hw_server_tcl, errmsgs=['no such file or directory'])
self.do('init ' + hw_server_url)
def fetch_devices(self, force=True):
"""_fetchDevices go thorugh the blasters and fetches all the hw devices and stores into the
allDevices dict. Private method, use get_devices, which will fetch devices if it needed.
"""
if force or self.get_devices(auto_fetch=False) is None:
logger.info('Exploring target devices (fetch_devices: this can take a while)')
self.do('set devices [fetch_devices]', errmsgs=["Labtoolstcl 44-133", "No target blaster found"])
try:
devices = self.get_var('devices')
except PylinxException as ex:
raise PylinxException('No target device found. Please connect and power up your device(s)')
# Get a list of all devices on all target.
# Remove the brackets. (fetch_devices returns lists.)
logger.debug("devices: " + str(devices))
devices = re.findall(r'\{(.+?)\}', devices)
VivadoHWServer.allDevices[self.hw_server_url] = devices
logger.debug("allDevices: " + str(VivadoHWServer.allDevices))
return self.get_devices(auto_fetch=False)
def get_devices(self, auto_fetch=True, hw_server_url=None):
"""Returns the hardware devices. auto_fetch fetches automatically the devices, if they have
not fetched yet."""
if hw_server_url is None:
hw_server_url = self.hw_server_url
try:
return VivadoHWServer.allDevices[hw_server_url]
except KeyError:
if auto_fetch and hw_server_url == self.hw_server_url:
return self.fetch_devices(force=True)
raise PylinxException('KeyError: No devices has fetched yet. Use fetchDevices() first!')
def choose_device(self, **kwargs):
""" set the hw target (blaster) and device (FPGA) for TX and RX side.
"""
# Print devices to user to choose from them.
devices = self.get_devices()
if len(devices) < 1:
raise PylinxException("There is no devices! Please use fetch_devices() first!")
for i, dev in enumerate(devices):
print(str(i) + ' ' + dev)
device_id = input('Choose device for {} (Give a number): '.format(self.name))
device_id = int(device_id)
device = devices[device_id]
errmsgs = ['DONE status = 0', 'The debug hub core was not detected.']
self.do('set_device ' + device, errmsgs=errmsgs, **kwargs)
def choose_sio(self, createLink=True, **kwargs):
""" Set the transceiver channel for TX/RX side.
"""
self.do('', **kwargs)
errmsgs = ['No matching hw_sio_gts were found.']
sios = self.do('get_hw_sio_gts', errmsgs=errmsgs, **kwargs).strip()
sios = sios.split(' ')
for i, sio in enumerate(sios):
print(str(i) + ' ' + sio)
print('Print choose a SIO for {} side (Give a number): '.format(self.name), end='')
sio_id = int(input())
self.sio = sios[sio_id]
if createLink:
self.do('create_link ' + self.sio, **kwargs)
def reset_gt(self):
resetName = 'PORT.GT{}RESET'.format(self.name)
self.set_property(resetName, '1', '[get_hw_sio_gts {{}}]'.format(self.sio))
self.commit_hw_sio()
self.set_property(resetName, '0', '[get_hw_sio_gts {{}}]'.format(self.sio))
self.commit_hw_sio()
def commit_hw_sio(self):
self.set_property('commit_hw_sio' '0' '[get_hw_sio_gts {{}}]'.format(self.sio))
| [
"logging.getLogger",
"os.kill",
"socket.socket",
"re.compile",
"subprocess.Popen",
"pexpect.spawn",
"psutil.Process",
"os.path.join",
"time.sleep",
"os.path.realpath",
"platform.system",
"re.findall"
] | [((668, 695), 'logging.getLogger', 'logging.getLogger', (['"""pylinx"""'], {}), "('pylinx')\n", (685, 695), False, 'import logging\n'), ((357, 374), 'platform.system', 'platform.system', ([], {}), '()\n', (372, 374), False, 'import platform\n'), ((566, 592), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (582, 592), False, 'import os\n'), ((2955, 3001), 'subprocess.Popen', 'subprocess.Popen', (['start_command'], {'stdout': 'stdout'}), '(start_command, stdout=stdout)\n', (2971, 3001), False, 'import subprocess\n'), ((5527, 5576), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_STREAM'], {}), '(socket.AF_INET, socket.SOCK_STREAM)\n', (5540, 5576), False, 'import socket\n'), ((12153, 12188), 'psutil.Process', 'psutil.Process', (['self.child_proc.pid'], {}), '(self.child_proc.pid)\n', (12167, 12188), False, 'import psutil\n'), ((2555, 2595), 'os.path.join', 'os.path.join', (['__here__', '"""dummy_xsct.tcl"""'], {}), "(__here__, 'dummy_xsct.tcl')\n", (2567, 2595), False, 'import os\n'), ((8466, 8496), 'pexpect.spawn', 'expect.spawn', (['executable', 'args'], {}), '(executable, args)\n', (8478, 8496), True, 'import pexpect as expect\n'), ((11264, 11286), 're.compile', 're.compile', (['no_var_msg'], {}), '(no_var_msg)\n', (11274, 11286), False, 'import re\n'), ((14034, 14073), 'os.path.join', 'os.path.join', (['__here__', '"""hw_server.tcl"""'], {}), "(__here__, 'hw_server.tcl')\n", (14046, 14073), False, 'import os\n'), ((15220, 15254), 're.findall', 're.findall', (['"""\\\\{(.+?)\\\\}"""', 'devices'], {}), "('\\\\{(.+?)\\\\}', devices)\n", (15230, 15254), False, 'import re\n'), ((3684, 3721), 'psutil.Process', 'psutil.Process', (['self._xsct_server.pid'], {}), '(self._xsct_server.pid)\n', (3698, 3721), False, 'import psutil\n'), ((9199, 9216), 'platform.system', 'platform.system', ([], {}), '()\n', (9214, 9216), False, 'import platform\n'), ((9773, 9790), 'platform.system', 'platform.system', ([], {}), '()\n', (9788, 9790), False, 'import platform\n'), ((3987, 4021), 'os.kill', 'os.kill', (['child.pid', 'signal.SIGTERM'], {}), '(child.pid, signal.SIGTERM)\n', (3994, 4021), False, 'import os\n'), ((4388, 4403), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (4398, 4403), False, 'import time\n'), ((10243, 10257), 're.compile', 're.compile', (['em'], {}), '(em)\n', (10253, 10257), False, 'import re\n')] |
from behave import *
import requests
from django.contrib.auth.models import User
from rest_framework.authtoken.models import Token
use_step_matcher("re")
@given("that I am a registered host of privilege walk events and exists events on my username")
def step_impl(context):
context.username = "12thMan"
context.password = "<PASSWORD>"
context.first_name = "12th"
context.last_name = "Man"
context.email = "<EMAIL>"
usr = User.objects.create_user(
context.username,
context.email,
context.password
)
usr.first_name = context.first_name
usr.last_name = context.last_name
usr.save()
registered_user = User.objects.filter(username="12thMan")
assert len(registered_user) == 1
user_auth_token, _ = Token.objects.get_or_create(user=usr)
context.key = user_auth_token.key
data = {
"name": "New year event",
"x_label_min": "Some text to be displayed on the graph",
"x_label_max": "Something else you want to be displayed on the graph",
}
headers = {
'Authorization':'Token '+ context.key
}
resp = requests.post(context.test.live_server_url + "/host/events/create/", data, headers=headers)
@when("I make an API call to the get events API with my correct username")
def step_impl(context):
headers = {
'Authorization':'Token '+ context.key
}
resp = requests.get(context.test.live_server_url + "/host/events/all/", headers=headers)
assert resp.status_code >= 200 and resp.status_code < 300
context.api_response_data = resp.json()
@then("I expect the response that gives the list of events on my username as host")
def step_impl(context):
assert context.api_response_data["events"][0]["name"] == "New year event"
@given("that I am a registered host of privilege walk events and there exists no events on my username")
def step_impl(context):
context.username = "12thMan"
context.password = "<PASSWORD>"
context.first_name = "12th"
context.last_name = "Man"
context.email = "<EMAIL>"
usr = User.objects.create_user(
context.username,
context.email,
context.password
)
usr.first_name = context.first_name
usr.last_name = context.last_name
usr.save()
registered_user = User.objects.filter(username="12thMan")
assert len(registered_user) == 1
user_auth_token, _ = Token.objects.get_or_create(user=usr)
context.key = user_auth_token.key
@when("I make an API call to the get events API with my username")
def step_impl(context):
headers = {
'Authorization':'Token '+ context.key
}
resp = requests.get(context.test.live_server_url + "/host/events/all/", headers=headers)
assert resp.status_code >= 200 and resp.status_code < 300
context.api_response_data = resp.json()
@then("I expect the response that gives the empty list as response")
def step_impl(context):
assert context.api_response_data["events"] == []
@given("that I am a registered host of privilege walk events and forgot my username")
def step_impl(context):
pass
@when("I make an API call to the get events API with wrong username")
def step_impl(context):
resp = requests.get(context.test.live_server_url + "/host/events/all/")
assert resp.status_code >= 400 and resp.status_code < 500
context.api_response_data = resp.json()
@then("I expect the response that says username doesn't exists")
def step_impl(context):
assert context.api_response_data["detail"] == "Authentication credentials were not provided." | [
"requests.post",
"requests.get",
"django.contrib.auth.models.User.objects.filter",
"django.contrib.auth.models.User.objects.create_user",
"rest_framework.authtoken.models.Token.objects.get_or_create"
] | [((450, 525), 'django.contrib.auth.models.User.objects.create_user', 'User.objects.create_user', (['context.username', 'context.email', 'context.password'], {}), '(context.username, context.email, context.password)\n', (474, 525), False, 'from django.contrib.auth.models import User\n'), ((672, 711), 'django.contrib.auth.models.User.objects.filter', 'User.objects.filter', ([], {'username': '"""12thMan"""'}), "(username='12thMan')\n", (691, 711), False, 'from django.contrib.auth.models import User\n'), ((776, 813), 'rest_framework.authtoken.models.Token.objects.get_or_create', 'Token.objects.get_or_create', ([], {'user': 'usr'}), '(user=usr)\n', (803, 813), False, 'from rest_framework.authtoken.models import Token\n'), ((1130, 1225), 'requests.post', 'requests.post', (["(context.test.live_server_url + '/host/events/create/')", 'data'], {'headers': 'headers'}), "(context.test.live_server_url + '/host/events/create/', data,\n headers=headers)\n", (1143, 1225), False, 'import requests\n'), ((1403, 1489), 'requests.get', 'requests.get', (["(context.test.live_server_url + '/host/events/all/')"], {'headers': 'headers'}), "(context.test.live_server_url + '/host/events/all/', headers=\n headers)\n", (1415, 1489), False, 'import requests\n'), ((2084, 2159), 'django.contrib.auth.models.User.objects.create_user', 'User.objects.create_user', (['context.username', 'context.email', 'context.password'], {}), '(context.username, context.email, context.password)\n', (2108, 2159), False, 'from django.contrib.auth.models import User\n'), ((2306, 2345), 'django.contrib.auth.models.User.objects.filter', 'User.objects.filter', ([], {'username': '"""12thMan"""'}), "(username='12thMan')\n", (2325, 2345), False, 'from django.contrib.auth.models import User\n'), ((2410, 2447), 'rest_framework.authtoken.models.Token.objects.get_or_create', 'Token.objects.get_or_create', ([], {'user': 'usr'}), '(user=usr)\n', (2437, 2447), False, 'from rest_framework.authtoken.models import Token\n'), ((2659, 2745), 'requests.get', 'requests.get', (["(context.test.live_server_url + '/host/events/all/')"], {'headers': 'headers'}), "(context.test.live_server_url + '/host/events/all/', headers=\n headers)\n", (2671, 2745), False, 'import requests\n'), ((3224, 3288), 'requests.get', 'requests.get', (["(context.test.live_server_url + '/host/events/all/')"], {}), "(context.test.live_server_url + '/host/events/all/')\n", (3236, 3288), False, 'import requests\n')] |
"""soft_encoder.py: Encoding sentence with LSTM.
It encodes sentence with Bi-LSTM.
After encoding, it uses all tokens for sentence, and extract some parts for trigger.
Written in 2020 by <NAME>.
"""
from torch.nn.utils.rnn import pack_padded_sequence, pad_packed_sequence
import torch.nn as nn
import torch
from ..utilities.config import ContextEmb
from .charbilstm import CharBiLSTM
class SoftEncoder(nn.Module):
def __init__(self, config, encoder=None):
super(SoftEncoder, self).__init__()
self.config = config
self.device = config.device
self.use_char = config.use_char_rnn
self.context_emb = config.context_emb
self.input_size = config.embedding_dim
if self.context_emb != ContextEmb.none:
self.input_size += config.context_emb_size
if self.use_char:
self.char_feature = CharBiLSTM(config)
self.input_size += config.charlstm_hidden_dim
self.word_embedding = nn.Embedding.from_pretrained(torch.FloatTensor(config.word_embedding), freeze=False).to(
self.device)
self.word_drop = nn.Dropout(config.dropout).to(self.device)
self.lstm = nn.LSTM(self.input_size, config.hidden_dim // 2, num_layers=1, batch_first=True,
bidirectional=True).to(self.device)
if encoder is not None:
if self.use_char:
self.char_feature = encoder.char_feature
self.word_embedding = encoder.word_embedding
self.word_drop = encoder.word_drop
self.lstm = encoder.lstm
def forward(self, word_seq_tensor: torch.Tensor,
word_seq_lens: torch.Tensor,
batch_context_emb: torch.Tensor,
char_inputs: torch.Tensor,
char_seq_lens: torch.Tensor,
trigger_position):
"""
Get sentence and trigger encodings by Bi-LSTM
:param word_seq_tensor:
:param word_seq_lens:
:param batch_context_emb:
:param char_inputs:
:param char_seq_lens:
:param trigger_position: trigger positions in sentence (e.g. [1,4,5])
:return:
"""
# lstm_encoding
word_emb = self.word_embedding(word_seq_tensor)
if self.context_emb != ContextEmb.none:
word_emb = torch.cat([word_emb, batch_context_emb.to(self.device)], 2)
if self.use_char:
char_features = self.char_feature(char_inputs, char_seq_lens)
word_emb = torch.cat([word_emb, char_features], 2)
word_rep = self.word_drop(word_emb)
sorted_seq_len, permIdx = word_seq_lens.sort(0, descending=True)
_, recover_idx = permIdx.sort(0, descending=False)
sorted_seq_tensor = word_rep[permIdx]
packed_words = pack_padded_sequence(sorted_seq_tensor, sorted_seq_len, True)
output, _ = self.lstm(packed_words, None)
output, _ = pad_packed_sequence(output, batch_first=True)
output = output[recover_idx]
sentence_mask = (word_seq_tensor != torch.tensor(0)).float()
# trigger part extraction
if trigger_position is not None:
max_length = 0
output_e_list = []
output_list = [output[i, :, :] for i in range(0, word_rep.size(0))]
for output_l, trigger_p in zip(output_list, trigger_position):
output_e = torch.stack([output_l[p, :] for p in trigger_p])
output_e_list.append(output_e)
if max_length < output_e.size(0):
max_length = output_e.size(0)
trigger_vec = []
trigger_mask = []
for output_e in output_e_list:
trigger_vec.append(
torch.cat([output_e, output_e.new_zeros(max_length - output_e.size(0), self.config.hidden_dim)], 0))
t_ms = []
for i in range(output_e.size(0)):
t_ms.append(True)
for i in range(output_e.size(0), max_length):
t_ms.append(False)
t_ms = torch.tensor(t_ms)
trigger_mask.append(t_ms)
trigger_vec = torch.stack(trigger_vec)
trigger_mask = torch.stack(trigger_mask).float()
else:
trigger_vec = None
trigger_mask = None
return output, sentence_mask, trigger_vec, trigger_mask
| [
"torch.nn.Dropout",
"torch.nn.LSTM",
"torch.stack",
"torch.tensor",
"torch.nn.utils.rnn.pack_padded_sequence",
"torch.nn.utils.rnn.pad_packed_sequence",
"torch.FloatTensor",
"torch.cat"
] | [((2808, 2869), 'torch.nn.utils.rnn.pack_padded_sequence', 'pack_padded_sequence', (['sorted_seq_tensor', 'sorted_seq_len', '(True)'], {}), '(sorted_seq_tensor, sorted_seq_len, True)\n', (2828, 2869), False, 'from torch.nn.utils.rnn import pack_padded_sequence, pad_packed_sequence\n'), ((2940, 2985), 'torch.nn.utils.rnn.pad_packed_sequence', 'pad_packed_sequence', (['output'], {'batch_first': '(True)'}), '(output, batch_first=True)\n', (2959, 2985), False, 'from torch.nn.utils.rnn import pack_padded_sequence, pad_packed_sequence\n'), ((2523, 2562), 'torch.cat', 'torch.cat', (['[word_emb, char_features]', '(2)'], {}), '([word_emb, char_features], 2)\n', (2532, 2562), False, 'import torch\n'), ((4189, 4213), 'torch.stack', 'torch.stack', (['trigger_vec'], {}), '(trigger_vec)\n', (4200, 4213), False, 'import torch\n'), ((1120, 1146), 'torch.nn.Dropout', 'nn.Dropout', (['config.dropout'], {}), '(config.dropout)\n', (1130, 1146), True, 'import torch.nn as nn\n'), ((1183, 1288), 'torch.nn.LSTM', 'nn.LSTM', (['self.input_size', '(config.hidden_dim // 2)'], {'num_layers': '(1)', 'batch_first': '(True)', 'bidirectional': '(True)'}), '(self.input_size, config.hidden_dim // 2, num_layers=1, batch_first=\n True, bidirectional=True)\n', (1190, 1288), True, 'import torch.nn as nn\n'), ((3408, 3456), 'torch.stack', 'torch.stack', (['[output_l[p, :] for p in trigger_p]'], {}), '([output_l[p, :] for p in trigger_p])\n', (3419, 3456), False, 'import torch\n'), ((4102, 4120), 'torch.tensor', 'torch.tensor', (['t_ms'], {}), '(t_ms)\n', (4114, 4120), False, 'import torch\n'), ((1010, 1050), 'torch.FloatTensor', 'torch.FloatTensor', (['config.word_embedding'], {}), '(config.word_embedding)\n', (1027, 1050), False, 'import torch\n'), ((3067, 3082), 'torch.tensor', 'torch.tensor', (['(0)'], {}), '(0)\n', (3079, 3082), False, 'import torch\n'), ((4241, 4266), 'torch.stack', 'torch.stack', (['trigger_mask'], {}), '(trigger_mask)\n', (4252, 4266), False, 'import torch\n')] |
"""Forms for ox_herd commands.
"""
from wtforms import StringField
from ox_herd.core.plugins import base
class BackupForm(base.GenericOxForm):
"""Use this form to enter parameters for a new backup job.
"""
bucket_name = StringField(
'bucket_name', [], description=(
'Name of AWS bucket to put backup into.'))
bucket_name = StringField(
'prefix', [], default='misc', description=(
'Prefix to use in creating remote backup name.'))
| [
"wtforms.StringField"
] | [((236, 325), 'wtforms.StringField', 'StringField', (['"""bucket_name"""', '[]'], {'description': '"""Name of AWS bucket to put backup into."""'}), "('bucket_name', [], description=\n 'Name of AWS bucket to put backup into.')\n", (247, 325), False, 'from wtforms import StringField\n'), ((364, 471), 'wtforms.StringField', 'StringField', (['"""prefix"""', '[]'], {'default': '"""misc"""', 'description': '"""Prefix to use in creating remote backup name."""'}), "('prefix', [], default='misc', description=\n 'Prefix to use in creating remote backup name.')\n", (375, 471), False, 'from wtforms import StringField\n')] |
import numpy as np
import operator
# TODO: Make Mutation Operator.
class TerminationCriteria:
@staticmethod
def _convergence_check(convergence_ratio, population_fitness):
if abs((np.max(population_fitness) - np.mean(population_fitness)) / np.mean(
population_fitness)) <= convergence_ratio / 2:
return True
else:
return False
@staticmethod
def _fitness_level_check(fitness_level, population_fitness, _operator):
ops = {'>': operator.gt,
'<': operator.lt,
'>=': operator.ge,
'<=': operator.le,
'=': operator.eq}
inp = abs(np.max(population_fitness))
relate = _operator
cut = fitness_level
return ops[relate](inp, cut)
@staticmethod
def _generations_check(generations, generation_limit):
if generations >= generation_limit:
return True
else:
return False
def __init__(self):
self._checks = []
self._convergence_limit = None
self._fitness_limit = None
self._generation_limit = None
self._operator = None
def _checker_of_convergence(self):
def _checker(population_fitness, generation_number):
return self._convergence_check(self._convergence_limit, population_fitness)
return _checker
def _checker_of_fitness(self):
def _checker(population_fitness, generation_number):
return self._fitness_level_check(self._convergence_limit, population_fitness, self._operator)
return _checker
def _checker_of_generations(self):
def _checker(population_fitness, generation_number):
return self._generations_check(generation_number, self._generation_limit)
return _checker
def add_convergence_limit(self, convergence_ratio):
self._checks.append(self._checker_of_convergence())
self._convergence_limit = convergence_ratio
def add_fitness_limit(self, operator, fitness_level):
self._checks.append(self._checker_of_fitness())
self._generation_limit = fitness_level
self._operator = operator
def add_generation_limit(self, generation_limit):
self._checks.append(self._checker_of_generations())
self._generation_limit = generation_limit
def check(self, population_fitness, generation_number):
if np.any([check(population_fitness, generation_number) for check in self._checks]) == True:
return True
else:
return False
# def convergence_or_100(population_fitness, convergence_ratio):
# if abs((np.max(population_fitness) - np.mean(population_fitness)) / np.mean(
# population_fitness)) <= convergence_ratio / 2:
# return True
# elif abs(np.max(population_fitness)) == 100:
# return True
# else:
# return False
class SelectionOperator:
@staticmethod
def supremacy(m, contestants, fitness):
return np.argpartition(np.array(fitness), -m)[-m:], np.array(contestants)[
np.argpartition(np.array(fitness), -m)[-m:]]
@staticmethod
def random(m, contestants, fitness):
# TODO: Update for idx return. (BROKEN)
# used = None
# assert fitness is not used
return list(np.random.choice(contestants, m))
class CrossoverOperator:
@staticmethod
def random_polygamous(parents, n_children):
gene_lst = []
child_ls = []
for gene_idx in range(len(parents[0].split(' '))):
gene_col = np.random.choice(np.array([parent.split(' ') for parent in parents])[:, gene_idx], n_children)
gene_lst.append(gene_col)
gene_arr = np.array(gene_lst).T
for child_idx in range(len(gene_arr[:, 0])):
child_new = ' '.join(list(gene_arr[child_idx, :]))
child_ls.append(child_new)
return child_ls
@staticmethod
def supremecy_polygamous(parents, n_children, fitness):
raise NotImplemented("Supremacy not implemented yet")
def fitness_function_himmelblau(x, y): # execute himmelblau function
f = (x ** 2. + y - 11.) ** 2. + (x + y ** 2. - 7.) ** 2.
return 100 - f
| [
"numpy.random.choice",
"numpy.array",
"numpy.mean",
"numpy.max"
] | [((674, 700), 'numpy.max', 'np.max', (['population_fitness'], {}), '(population_fitness)\n', (680, 700), True, 'import numpy as np\n'), ((3339, 3371), 'numpy.random.choice', 'np.random.choice', (['contestants', 'm'], {}), '(contestants, m)\n', (3355, 3371), True, 'import numpy as np\n'), ((3744, 3762), 'numpy.array', 'np.array', (['gene_lst'], {}), '(gene_lst)\n', (3752, 3762), True, 'import numpy as np\n'), ((3072, 3093), 'numpy.array', 'np.array', (['contestants'], {}), '(contestants)\n', (3080, 3093), True, 'import numpy as np\n'), ((259, 286), 'numpy.mean', 'np.mean', (['population_fitness'], {}), '(population_fitness)\n', (266, 286), True, 'import numpy as np\n'), ((3043, 3060), 'numpy.array', 'np.array', (['fitness'], {}), '(fitness)\n', (3051, 3060), True, 'import numpy as np\n'), ((199, 225), 'numpy.max', 'np.max', (['population_fitness'], {}), '(population_fitness)\n', (205, 225), True, 'import numpy as np\n'), ((228, 255), 'numpy.mean', 'np.mean', (['population_fitness'], {}), '(population_fitness)\n', (235, 255), True, 'import numpy as np\n'), ((3123, 3140), 'numpy.array', 'np.array', (['fitness'], {}), '(fitness)\n', (3131, 3140), True, 'import numpy as np\n')] |
from distutils.core import setup
setup(
name = 'niu',
packages = ['niu'],
version = '0.2',
description = 'A grouping and pairing library',
author = '<NAME>',
author_email = '<EMAIL>',
url = 'https://github.com/gabeabrams/niu',
download_url = 'https://github.com/gabeabrams/niu/archive/0.1.tar.gz',
keywords = ['grouping', 'pairing', 'matching'],
install_requires=[
'pulp'
],
classifiers = []
) | [
"distutils.core.setup"
] | [((34, 394), 'distutils.core.setup', 'setup', ([], {'name': '"""niu"""', 'packages': "['niu']", 'version': '"""0.2"""', 'description': '"""A grouping and pairing library"""', 'author': '"""<NAME>"""', 'author_email': '"""<EMAIL>"""', 'url': '"""https://github.com/gabeabrams/niu"""', 'download_url': '"""https://github.com/gabeabrams/niu/archive/0.1.tar.gz"""', 'keywords': "['grouping', 'pairing', 'matching']", 'install_requires': "['pulp']", 'classifiers': '[]'}), "(name='niu', packages=['niu'], version='0.2', description=\n 'A grouping and pairing library', author='<NAME>', author_email=\n '<EMAIL>', url='https://github.com/gabeabrams/niu', download_url=\n 'https://github.com/gabeabrams/niu/archive/0.1.tar.gz', keywords=[\n 'grouping', 'pairing', 'matching'], install_requires=['pulp'],\n classifiers=[])\n", (39, 394), False, 'from distutils.core import setup\n')] |
import logging
from datetime import timedelta
from flask import Flask, render_template, redirect, request, url_for, flash
from flask_login import LoginManager, login_user, logout_user, current_user
from preston.crest import Preston as CREST
from preston.xmlapi import Preston as XMLAPI
from auth.shared import db, eveapi
from auth.models import User
from auth.hr.app import app as hr_blueprint
# from auth.wiki.app import app as wiki_blueprint
# Create and configure app
app = Flask(__name__)
app.permanent_session_lifetime = timedelta(days=14)
app.config.from_pyfile('config.cfg')
# EVE XML API connection
user_agent = 'GETIN HR app ({})'.format(app.config['CONTACT_EMAIL'])
eveapi['user_agent'] = user_agent
eveapi['xml'] = XMLAPI(user_agent=user_agent)
# EVE CREST API connection
eveapi['crest'] = CREST(
user_agent=user_agent,
client_id=app.config['EVE_OAUTH_CLIENT_ID'],
client_secret=app.config['EVE_OAUTH_SECRET'],
callback_url=app.config['EVE_OAUTH_CALLBACK']
)
# Database connection
db.app = app
db.init_app(app)
# User management
login_manager = LoginManager(app)
login_manager.login_message = ''
login_manager.login_view = 'login'
# Application logging
app.logger.setLevel(app.config['LOGGING_LEVEL'])
handler = logging.FileHandler('log.txt')
handler.setFormatter(logging.Formatter(style='{', fmt='{asctime} [{levelname}] {message}', datefmt='%Y-%m-%d %H:%M:%S'))
handler.setLevel(app.config['LOGGING_LEVEL'])
app.logger.addHandler(handler)
# Blueprints
app.register_blueprint(hr_blueprint, url_prefix='/hr')
# app.register_blueprint(wiki_blueprint, url_prefix='/wiki')
app.logger.info('Initialization complete')
@login_manager.user_loader
def load_user(user_id):
"""Takes a string int and returns a auth.models.User object for Flask-Login.
Args:
user_id (str): user model id
Returns:
auth.models.User: user with that id
"""
return User.query.filter_by(id=int(user_id)).first()
@app.route('/')
def landing():
return render_template('landing.html')
@app.route('/login')
def login():
"""Shows a user the EVE SSO link so they can log in.
Args:
None
Returns;
str: rendered template 'login.html'
"""
return render_template('login.html', url=eveapi['crest'].get_authorize_url())
@app.route('/eve/callback')
def eve_oauth_callback():
"""Completes the EVE SSO login. Here, hr.models.User models
and hr.models.Member models are created for the user if they don't
exist and the user is redirected the the page appropriate for their
access level.
Args:
None
Returns:
str: redirect to the login endpoint if something failed, join endpoint if
the user is a new user, or the index endpoint if they're already a member.
"""
if 'error' in request.path:
app.logger.error('Error in EVE SSO callback: ' + request.url)
flash('There was an error in EVE\'s response', 'error')
return url_for('login')
try:
auth = eveapi['crest'].authenticate(request.args['code'])
except Exception as e:
app.logger.error('CREST signing error: ' + str(e))
flash('There was an authentication error signing you in.', 'error')
return redirect(url_for('login'))
character_info = auth.whoami()
character_name = character_info['CharacterName']
user = User.query.filter_by(name=character_name).first()
if user:
login_user(user)
app.logger.debug('{} logged in with EVE SSO'.format(current_user.name))
flash('Logged in', 'success')
return redirect(url_for('landing'))
corporation = get_corp_for_name(character_name)
user = User(character_name, corporation)
db.session.add(user)
db.session.commit()
login_user(user)
app.logger.info('{} created an account'.format(current_user.name))
return redirect(url_for('landing'))
@app.route('/logout')
def logout():
"""Logs the user out of the site.
Args:
None
Returns:
str: redirect to the login endpoint
"""
app.logger.debug('{} logged out'.format(current_user.name if not current_user.is_anonymous else 'unknown user'))
logout_user()
return redirect(url_for('login'))
@app.errorhandler(404)
def error_404(e):
"""Catches 404 errors in the app and shows the user an error page.
Args:
e (Exception): the exception from the server
Returns:
str: rendered template 'error_404.html'
"""
app.logger.error('404 error at "{}" by {}: {}'.format(
request.url, current_user.name if not current_user.is_anonymous else 'unknown user', str(e))
)
return render_template('error_404.html')
@app.errorhandler(500)
def error_500(e):
"""Catches 500 errors in the app and shows the user an error page.
Args:
e (Exception): the exception from the server
Returns:
str: rendered template 'error_404.html'
"""
app.logger.error('500 error at "{}" by {}: {}'.format(
request.url, current_user.name if not current_user.is_anonymous else 'unknown user', str(e))
)
return render_template('error_500.html')
def get_corp_for_name(name):
"""Takes a character's name and returns their EVE character ID.
Args:
name (str): full character name
Returns:
int: value of their EVE character ID
"""
return get_corp_for_id(eveapi['xml'].eve.CharacterId(names=name)['rowset']['row']['@characterID'])
def get_corp_for_id(id):
"""Takes a character's id and returns their corporation name.
Args:
name (str): full character name
Returns:
str: value of their corporation's name
"""
return eveapi['xml'].eve.CharacterAffiliation(ids=id)['rowset']['row']['@corporationName']
| [
"flask.render_template",
"flask_login.LoginManager",
"auth.models.User",
"flask.flash",
"preston.crest.Preston",
"flask.Flask",
"auth.shared.db.session.add",
"logging.Formatter",
"flask_login.login_user",
"flask_login.logout_user",
"auth.models.User.query.filter_by",
"flask.url_for",
"loggin... | [((481, 496), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (486, 496), False, 'from flask import Flask, render_template, redirect, request, url_for, flash\n'), ((530, 548), 'datetime.timedelta', 'timedelta', ([], {'days': '(14)'}), '(days=14)\n', (539, 548), False, 'from datetime import timedelta\n'), ((730, 759), 'preston.xmlapi.Preston', 'XMLAPI', ([], {'user_agent': 'user_agent'}), '(user_agent=user_agent)\n', (736, 759), True, 'from preston.xmlapi import Preston as XMLAPI\n'), ((805, 980), 'preston.crest.Preston', 'CREST', ([], {'user_agent': 'user_agent', 'client_id': "app.config['EVE_OAUTH_CLIENT_ID']", 'client_secret': "app.config['EVE_OAUTH_SECRET']", 'callback_url': "app.config['EVE_OAUTH_CALLBACK']"}), "(user_agent=user_agent, client_id=app.config['EVE_OAUTH_CLIENT_ID'],\n client_secret=app.config['EVE_OAUTH_SECRET'], callback_url=app.config[\n 'EVE_OAUTH_CALLBACK'])\n", (810, 980), True, 'from preston.crest import Preston as CREST\n'), ((1025, 1041), 'auth.shared.db.init_app', 'db.init_app', (['app'], {}), '(app)\n', (1036, 1041), False, 'from auth.shared import db, eveapi\n'), ((1076, 1093), 'flask_login.LoginManager', 'LoginManager', (['app'], {}), '(app)\n', (1088, 1093), False, 'from flask_login import LoginManager, login_user, logout_user, current_user\n'), ((1243, 1273), 'logging.FileHandler', 'logging.FileHandler', (['"""log.txt"""'], {}), "('log.txt')\n", (1262, 1273), False, 'import logging\n'), ((1295, 1397), 'logging.Formatter', 'logging.Formatter', ([], {'style': '"""{"""', 'fmt': '"""{asctime} [{levelname}] {message}"""', 'datefmt': '"""%Y-%m-%d %H:%M:%S"""'}), "(style='{', fmt='{asctime} [{levelname}] {message}',\n datefmt='%Y-%m-%d %H:%M:%S')\n", (1312, 1397), False, 'import logging\n'), ((1995, 2026), 'flask.render_template', 'render_template', (['"""landing.html"""'], {}), "('landing.html')\n", (2010, 2026), False, 'from flask import Flask, render_template, redirect, request, url_for, flash\n'), ((3673, 3706), 'auth.models.User', 'User', (['character_name', 'corporation'], {}), '(character_name, corporation)\n', (3677, 3706), False, 'from auth.models import User\n'), ((3711, 3731), 'auth.shared.db.session.add', 'db.session.add', (['user'], {}), '(user)\n', (3725, 3731), False, 'from auth.shared import db, eveapi\n'), ((3736, 3755), 'auth.shared.db.session.commit', 'db.session.commit', ([], {}), '()\n', (3753, 3755), False, 'from auth.shared import db, eveapi\n'), ((3760, 3776), 'flask_login.login_user', 'login_user', (['user'], {}), '(user)\n', (3770, 3776), False, 'from flask_login import LoginManager, login_user, logout_user, current_user\n'), ((4175, 4188), 'flask_login.logout_user', 'logout_user', ([], {}), '()\n', (4186, 4188), False, 'from flask_login import LoginManager, login_user, logout_user, current_user\n'), ((4652, 4685), 'flask.render_template', 'render_template', (['"""error_404.html"""'], {}), "('error_404.html')\n", (4667, 4685), False, 'from flask import Flask, render_template, redirect, request, url_for, flash\n'), ((5111, 5144), 'flask.render_template', 'render_template', (['"""error_500.html"""'], {}), "('error_500.html')\n", (5126, 5144), False, 'from flask import Flask, render_template, redirect, request, url_for, flash\n'), ((2894, 2948), 'flask.flash', 'flash', (['"""There was an error in EVE\'s response"""', '"""error"""'], {}), '("There was an error in EVE\'s response", \'error\')\n', (2899, 2948), False, 'from flask import Flask, render_template, redirect, request, url_for, flash\n'), ((2965, 2981), 'flask.url_for', 'url_for', (['"""login"""'], {}), "('login')\n", (2972, 2981), False, 'from flask import Flask, render_template, redirect, request, url_for, flash\n'), ((3431, 3447), 'flask_login.login_user', 'login_user', (['user'], {}), '(user)\n', (3441, 3447), False, 'from flask_login import LoginManager, login_user, logout_user, current_user\n'), ((3536, 3565), 'flask.flash', 'flash', (['"""Logged in"""', '"""success"""'], {}), "('Logged in', 'success')\n", (3541, 3565), False, 'from flask import Flask, render_template, redirect, request, url_for, flash\n'), ((3868, 3886), 'flask.url_for', 'url_for', (['"""landing"""'], {}), "('landing')\n", (3875, 3886), False, 'from flask import Flask, render_template, redirect, request, url_for, flash\n'), ((4209, 4225), 'flask.url_for', 'url_for', (['"""login"""'], {}), "('login')\n", (4216, 4225), False, 'from flask import Flask, render_template, redirect, request, url_for, flash\n'), ((3151, 3218), 'flask.flash', 'flash', (['"""There was an authentication error signing you in."""', '"""error"""'], {}), "('There was an authentication error signing you in.', 'error')\n", (3156, 3218), False, 'from flask import Flask, render_template, redirect, request, url_for, flash\n'), ((3360, 3401), 'auth.models.User.query.filter_by', 'User.query.filter_by', ([], {'name': 'character_name'}), '(name=character_name)\n', (3380, 3401), False, 'from auth.models import User\n'), ((3590, 3608), 'flask.url_for', 'url_for', (['"""landing"""'], {}), "('landing')\n", (3597, 3608), False, 'from flask import Flask, render_template, redirect, request, url_for, flash\n'), ((3243, 3259), 'flask.url_for', 'url_for', (['"""login"""'], {}), "('login')\n", (3250, 3259), False, 'from flask import Flask, render_template, redirect, request, url_for, flash\n')] |
import MiniNero
import ed25519
import binascii
import PaperWallet
import cherrypy
import os
import time
import bitmonerod
import SimpleXMR2
import SimpleServer
message = "send0d000114545737471em2WCg9QKxRxbo6S3xKF2K4UDvdu6hMc"
message = "send0d0114545747771em2WCg9QKxRxbo6S3xKF2K4UDvdu6hMc"
sec = raw_input("sec?")
print(SimpleServer.Signature(message, sec))
| [
"SimpleServer.Signature"
] | [((321, 357), 'SimpleServer.Signature', 'SimpleServer.Signature', (['message', 'sec'], {}), '(message, sec)\n', (343, 357), False, 'import SimpleServer\n')] |
from vedacore.misc import registry, build_from_cfg
def build_loss(cfg):
loss = build_from_cfg(cfg, registry, 'loss')
return loss
| [
"vedacore.misc.build_from_cfg"
] | [((85, 122), 'vedacore.misc.build_from_cfg', 'build_from_cfg', (['cfg', 'registry', '"""loss"""'], {}), "(cfg, registry, 'loss')\n", (99, 122), False, 'from vedacore.misc import registry, build_from_cfg\n')] |
# -*- coding: UTF-8 -*-
"""
Created by <NAME> <<EMAIL>> on 19/06/2016.
"""
import os
from datetime import datetime
from django.core.management.base import BaseCommand, CommandError
from django_rest_scaffold.settings import DJANGO_REST_SCAFFOLD_SETTINGS as SETTINGS
class Command(BaseCommand):
help = 'Creates a model, serializer and views'
def add_arguments(self, parser):
parser.add_argument('model-name', type=str)
parser.add_argument('resource-name', type=str)
parser.add_argument('file-mode', nargs='?', type=str)
@staticmethod
def gen_header() -> list:
today = datetime.now().strftime('%d/%m/%Y')
return [
'# -*- coding: UTF-8 -*-\n',
'"""\n',
' Created by <NAME> <<EMAIL>> on %s.\n' % today,
'"""\n'
]
@staticmethod
def build_comment(model_name: str, text: str) -> str:
line = '# {0}\n'.format('-' * 118)
return '{0}# {1} {2}\n{0}'.format(line, model_name.upper(), text.upper())
def handle(self, *args, **options):
model_name = options.get('model-name')
resource_name = options.get('resource-name')
file_mode = options.get('file-mode', None) or 'a+'
model_name_slug = ''.join([v if p == 0 or v.islower() else '-' + v for p, v in enumerate(model_name)]).lower()
serializer_name = '%sSerializer' % model_name
view_resource_name = '%sResourceAPIView' % model_name
view_detail_name = '%sDetailAPIView' % model_name
resource_path = os.path.join(SETTINGS['APPS_FOLDER'], resource_name)
apps_folder_name = SETTINGS['APPS_FOLDER_NAME']
apps_folder_package = '' if apps_folder_name is None else '{0}.'.format(apps_folder_name)
if not os.path.exists(resource_path):
CommandError('Resource %s not found.' % resource_name)
# CREATE THE MODELS FILE
models_path = os.path.join(resource_path, 'models.py')
if not os.path.exists(models_path) or 'w' in file_mode:
models_lines = Command.gen_header()
models_lines.append('from django.db import models\n')
else:
models_lines = []
models_lines += [
'\n\n',
'class {0}(models.Model):\n'.format(model_name),
' pass\n'
]
with open(models_path, file_mode, encoding='utf-8') as models_file:
models_file.writelines(models_lines)
# CREATE THE SERIALIZERS FILE
serializers_path = os.path.join(resource_path, 'serializers.py')
if not os.path.exists(serializers_path) or 'w' in file_mode:
serializers_lines = Command.gen_header()
serializers_lines.append('from rest_framework import serializers')
else:
serializers_lines = []
serializers_lines += [
'\n\n',
Command.build_comment(model_name_slug, 'serializers'),
'from {0}{1}.models import {2}\n'.format(apps_folder_package, resource_name, model_name),
'\n\n',
'class {0}(serializers.ModelSerializer):\n'.format(serializer_name),
' class Meta:\n',
' model = {0}\n'.format(model_name)
]
with open(serializers_path, file_mode, encoding='utf-8') as serializers_file:
serializers_file.writelines(serializers_lines)
# CREATE THE VIEWS FILE
views_path = os.path.join(resource_path, 'views.py')
if not os.path.exists(views_path) or 'w' in file_mode:
views_lines = Command.gen_header()
views_lines.append('from rest_framework import generics')
else:
views_lines = []
views_lines += [
'\n\n',
Command.build_comment(model_name_slug, 'views'),
'from {0}{1}.models import {2}\n'.format(apps_folder_package, resource_name, model_name),
'from {0}{1}.serializers import {2}\n'.format(apps_folder_package, resource_name, serializer_name),
'\n\n',
'class {0}(generics.ListCreateAPIView):\n'.format(view_resource_name),
' serializer_class = {0}\n'.format(serializer_name),
' queryset = {0}.objects\n'.format(model_name),
'\n\n',
'class {0}(generics.RetrieveUpdateDestroyAPIView):\n'.format(view_detail_name),
' serializer_class = {0}\n'.format(serializer_name),
' queryset = {0}.objects\n'.format(model_name),
]
with open(views_path, file_mode, encoding='utf-8') as views_file:
views_file.writelines(views_lines)
# CREATE THE URLS FILE
urls_path = os.path.join(resource_path, 'urls.py')
if not os.path.exists(urls_path) or 'w' in file_mode:
urls_lines = Command.gen_header()
urls_lines += [
'from django.conf.urls import url\n',
'\n',
'urlpatterns = []\n'
]
else:
urls_lines = []
urls_lines += [
'\n',
Command.build_comment(model_name_slug, 'endpoints'),
'from {0}{1}.views import {2}, {3}\n'.format(apps_folder_package, resource_name, view_resource_name,
view_detail_name),
'\n',
'urlpatterns += [\n',
" url(r'^{0}s/$', {1}.as_view(), name='{0}-resource'),\n".format(model_name_slug, view_resource_name),
" url(r'^{0}s/(?P<pk>\d+)[/]?$', {1}.as_view(), name='{0}-detail')\n".format(model_name_slug,
view_detail_name),
']\n'
]
with open(urls_path, file_mode, encoding='utf-8') as urls_file:
urls_file.writelines(urls_lines)
# CREATE THE ADMIN FILE
admin_path = os.path.join(resource_path, 'admin.py')
if not os.path.exists(admin_path) or 'w' in file_mode:
admin_lines = Command.gen_header()
admin_lines.append('from django.contrib import admin')
else:
admin_lines = []
admin_lines += [
'\n\n',
Command.build_comment(model_name_slug, 'admin register'),
'from {0}{1}.models import {2}\n'.format(apps_folder_package, resource_name, model_name),
'\n',
'admin.site.register({0})\n'.format(model_name),
]
with open(admin_path, file_mode, encoding='utf-8') as admin_file:
admin_file.writelines(admin_lines)
| [
"os.path.exists",
"os.path.join",
"datetime.datetime.now",
"django.core.management.base.CommandError"
] | [((1556, 1608), 'os.path.join', 'os.path.join', (["SETTINGS['APPS_FOLDER']", 'resource_name'], {}), "(SETTINGS['APPS_FOLDER'], resource_name)\n", (1568, 1608), False, 'import os\n'), ((1932, 1972), 'os.path.join', 'os.path.join', (['resource_path', '"""models.py"""'], {}), "(resource_path, 'models.py')\n", (1944, 1972), False, 'import os\n'), ((2528, 2573), 'os.path.join', 'os.path.join', (['resource_path', '"""serializers.py"""'], {}), "(resource_path, 'serializers.py')\n", (2540, 2573), False, 'import os\n'), ((3442, 3481), 'os.path.join', 'os.path.join', (['resource_path', '"""views.py"""'], {}), "(resource_path, 'views.py')\n", (3454, 3481), False, 'import os\n'), ((4685, 4723), 'os.path.join', 'os.path.join', (['resource_path', '"""urls.py"""'], {}), "(resource_path, 'urls.py')\n", (4697, 4723), False, 'import os\n'), ((5914, 5953), 'os.path.join', 'os.path.join', (['resource_path', '"""admin.py"""'], {}), "(resource_path, 'admin.py')\n", (5926, 5953), False, 'import os\n'), ((1778, 1807), 'os.path.exists', 'os.path.exists', (['resource_path'], {}), '(resource_path)\n', (1792, 1807), False, 'import os\n'), ((1821, 1875), 'django.core.management.base.CommandError', 'CommandError', (["('Resource %s not found.' % resource_name)"], {}), "('Resource %s not found.' % resource_name)\n", (1833, 1875), False, 'from django.core.management.base import BaseCommand, CommandError\n'), ((623, 637), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (635, 637), False, 'from datetime import datetime\n'), ((1988, 2015), 'os.path.exists', 'os.path.exists', (['models_path'], {}), '(models_path)\n', (2002, 2015), False, 'import os\n'), ((2589, 2621), 'os.path.exists', 'os.path.exists', (['serializers_path'], {}), '(serializers_path)\n', (2603, 2621), False, 'import os\n'), ((3497, 3523), 'os.path.exists', 'os.path.exists', (['views_path'], {}), '(views_path)\n', (3511, 3523), False, 'import os\n'), ((4739, 4764), 'os.path.exists', 'os.path.exists', (['urls_path'], {}), '(urls_path)\n', (4753, 4764), False, 'import os\n'), ((5969, 5995), 'os.path.exists', 'os.path.exists', (['admin_path'], {}), '(admin_path)\n', (5983, 5995), False, 'import os\n')] |
#
# derl: CLI Utility for searching for dead URLs <https://github.com/tpiekarski/derl>
# ---
# Copyright 2020 <NAME> <<EMAIL>>
#
from time import perf_counter
from derl.model.stats import Stats
class Singleton(type):
_instances = {}
def __call__(cls: "Singleton", *args: tuple, **kwargs: dict) -> "Tracker":
if cls not in cls._instances:
cls._instances[cls] = super(Singleton, cls).__call__(*args, **kwargs)
return cls._instances[cls]
class Tracker(metaclass=Singleton):
start_time = None
stop_time = None
stats = Stats()
test = False
def start(self: "Tracker"):
if self.start_time is None:
self.start_time = perf_counter()
def stop(self: "Tracker"):
if self.stop_time is None:
self.stop_time = perf_counter()
def calc_time(self: "Tracker") -> float:
if self.test:
return -1
return round(self.stop_time - self.start_time)
def reset(self: "Tracker"):
self.start_time = 0
self.stop_time = 0
self.stats = Stats()
def set_test(self: "Tracker"):
self.test = True
def __str__(self: "Tracker") -> str:
output = ""
if self.start_time is not None and self.stop_time is not None:
output += "\nFinished checking URLs after {0:.2f} second(s).\n".format(self.calc_time())
output += self.stats.__str__()
return output
def __repr__(self: "Tracker") -> str:
return self.__str__()
def get_tracker() -> "Tracker":
return Tracker()
| [
"derl.model.stats.Stats",
"time.perf_counter"
] | [((570, 577), 'derl.model.stats.Stats', 'Stats', ([], {}), '()\n', (575, 577), False, 'from derl.model.stats import Stats\n'), ((1075, 1082), 'derl.model.stats.Stats', 'Stats', ([], {}), '()\n', (1080, 1082), False, 'from derl.model.stats import Stats\n'), ((694, 708), 'time.perf_counter', 'perf_counter', ([], {}), '()\n', (706, 708), False, 'from time import perf_counter\n'), ((805, 819), 'time.perf_counter', 'perf_counter', ([], {}), '()\n', (817, 819), False, 'from time import perf_counter\n')] |
import random
import copy
rr = random.Random ( 22 )
def readNameList(fn):
f = open(fn,"r")
if f == None:
print ( f"Invalid file {fn} - failed to open" )
return None
dt = f.readlines()
f.close()
for i in range (len(dt)):
s = dt[i].rstrip()
dt[i] = s
return dt
letters = ['a','b','c','d','e','f','g','h','i','j','k','l','m','n','o','p','q','r','s','t','u','v','w','x','y','z']
def readKey():
global letters, rr
key = {}
match = copy.deepcopy(letters)
# random.shuffle(match)
rr.shuffle(match)
print ( f"match={match}" )
for i in range(26):
key[match[i]] = letters[i]
return key
def encrypt(ifn,ofn):
dt = readNameList(ifn)
out_list = []
for line_no in range(len(dt)):
line = dt[line_no]
line = line.lower()
new_line = ""
for c in line:
if c in key:
new_line += key[c]
else:
new_line += c
out_list.append ( new_line )
f = open ( ofn, "w" )
for j in out_list:
print ( f"{j}", file=f )
f.close()
def decrypt(ifn,ofn):
dt = readNameList(ifn)
revkey = {}
for k in key.keys():
v = key[k]
revkey[v] = k
print ( f"revkey = {revkey}" )
out_list = []
for line_no in range(len(dt)):
line = dt[line_no]
line = line.lower()
new_line = ""
for c in line:
if c in revkey:
new_line += revkey[c]
else:
new_line += c
out_list.append ( new_line )
f = open ( ofn, "w" )
for j in out_list:
print ( f"{j}", file=f )
f.close()
key = readKey()
print ( f"key = {key}" )
encrypt("test2.txt", "test2.enc")
decrypt("test2.enc", "test2.orig")
| [
"random.Random",
"copy.deepcopy"
] | [((32, 49), 'random.Random', 'random.Random', (['(22)'], {}), '(22)\n', (45, 49), False, 'import random\n'), ((500, 522), 'copy.deepcopy', 'copy.deepcopy', (['letters'], {}), '(letters)\n', (513, 522), False, 'import copy\n')] |
import numpy as np
import torch
import anndata
from celligner2.othermodels.trvae.trvae import trVAE
from celligner2.trainers.trvae.unsupervised import trVAETrainer
def trvae_operate(
network: trVAE,
data: anndata,
condition_key: str = None,
size_factor_key: str = None,
n_epochs: int = 20,
freeze: bool = True,
freeze_expression: bool = True,
remove_dropout: bool = True,
) -> [trVAE, trVAETrainer]:
"""Transfer Learning function for new data. Uses old trained Network and expands it for new conditions.
Parameters
----------
network: trVAE
A scNet model object.
data: Anndata
Query anndata object.
condition_key: String
Key where the conditions in the data can be found.
size_factor_key: String
Key where the size_factors in the data can be found.
n_epochs: Integer
Number of epochs for training the network on query data.
freeze: Boolean
If 'True' freezes every part of the network except the first layers of encoder/decoder.
freeze_expression: Boolean
If 'True' freeze every weight in first layers except the condition weights.
remove_dropout: Boolean
If 'True' remove Dropout for Transfer Learning.
Returns
-------
new_network: trVAE
Newly network that got trained on query data.
new_trainer: trVAETrainer
Trainer for the newly network.
"""
conditions = network.conditions
new_conditions = []
data_conditions = data.obs[condition_key].unique().tolist()
# Check if new conditions are already known
for item in data_conditions:
if item not in conditions:
new_conditions.append(item)
n_new_conditions = len(new_conditions)
# Add new conditions to overall conditions
for condition in new_conditions:
conditions.append(condition)
# Update DR Rate
new_dr = network.dr_rate
if remove_dropout:
new_dr = 0.0
print("Surgery to get new Network...")
new_network = trVAE(
network.input_dim,
conditions=conditions,
hidden_layer_sizes=network.hidden_layer_sizes,
latent_dim=network.latent_dim,
dr_rate=new_dr,
use_mmd=network.use_mmd,
mmd_boundary=network.mmd_boundary,
recon_loss=network.recon_loss,
)
# Expand First Layer weights of encoder/decoder of old network by new conditions
encoder_input_weights = network.encoder.FC.L0.cond_L.weight
to_be_added_encoder_input_weights = np.random.randn(encoder_input_weights.size()[0], n_new_conditions) * np.sqrt(
2 / (encoder_input_weights.size()[0] + 1 + encoder_input_weights.size()[1]))
to_be_added_encoder_input_weights = torch.from_numpy(to_be_added_encoder_input_weights).float().to(network.device)
network.encoder.FC.L0.cond_L.weight.data = torch.cat((encoder_input_weights,
to_be_added_encoder_input_weights), 1)
decoder_input_weights = network.decoder.FirstL.L0.cond_L.weight
to_be_added_decoder_input_weights = np.random.randn(decoder_input_weights.size()[0], n_new_conditions) * np.sqrt(
2 / (decoder_input_weights.size()[0] + 1 + decoder_input_weights.size()[1]))
to_be_added_decoder_input_weights = torch.from_numpy(to_be_added_decoder_input_weights).float().to(network.device)
network.decoder.FirstL.L0.cond_L.weight.data = torch.cat((decoder_input_weights,
to_be_added_decoder_input_weights), 1)
# Set the weights of new network to old network weights
new_network.load_state_dict(network.state_dict())
# Freeze parts of the network
if freeze:
new_network.freeze = True
for name, p in new_network.named_parameters():
p.requires_grad = False
if freeze_expression:
if 'cond_L.weight' in name:
p.requires_grad = True
else:
if "L0" in name or "B0" in name:
p.requires_grad = True
new_trainer = trVAETrainer(
new_network,
data,
condition_key=condition_key,
size_factor_key=size_factor_key,
batch_size=1024,
n_samples=4096
)
new_trainer.train(
n_epochs=n_epochs,
lr=0.001
)
return new_network, new_trainer
| [
"celligner2.trainers.trvae.unsupervised.trVAETrainer",
"torch.from_numpy",
"celligner2.othermodels.trvae.trvae.trVAE",
"torch.cat"
] | [((2141, 2387), 'celligner2.othermodels.trvae.trvae.trVAE', 'trVAE', (['network.input_dim'], {'conditions': 'conditions', 'hidden_layer_sizes': 'network.hidden_layer_sizes', 'latent_dim': 'network.latent_dim', 'dr_rate': 'new_dr', 'use_mmd': 'network.use_mmd', 'mmd_boundary': 'network.mmd_boundary', 'recon_loss': 'network.recon_loss'}), '(network.input_dim, conditions=conditions, hidden_layer_sizes=network.\n hidden_layer_sizes, latent_dim=network.latent_dim, dr_rate=new_dr,\n use_mmd=network.use_mmd, mmd_boundary=network.mmd_boundary, recon_loss=\n network.recon_loss)\n', (2146, 2387), False, 'from celligner2.othermodels.trvae.trvae import trVAE\n'), ((2964, 3036), 'torch.cat', 'torch.cat', (['(encoder_input_weights, to_be_added_encoder_input_weights)', '(1)'], {}), '((encoder_input_weights, to_be_added_encoder_input_weights), 1)\n', (2973, 3036), False, 'import torch\n'), ((3537, 3609), 'torch.cat', 'torch.cat', (['(decoder_input_weights, to_be_added_decoder_input_weights)', '(1)'], {}), '((decoder_input_weights, to_be_added_decoder_input_weights), 1)\n', (3546, 3609), False, 'import torch\n'), ((4212, 4342), 'celligner2.trainers.trvae.unsupervised.trVAETrainer', 'trVAETrainer', (['new_network', 'data'], {'condition_key': 'condition_key', 'size_factor_key': 'size_factor_key', 'batch_size': '(1024)', 'n_samples': '(4096)'}), '(new_network, data, condition_key=condition_key,\n size_factor_key=size_factor_key, batch_size=1024, n_samples=4096)\n', (4224, 4342), False, 'from celligner2.trainers.trvae.unsupervised import trVAETrainer\n'), ((2838, 2889), 'torch.from_numpy', 'torch.from_numpy', (['to_be_added_encoder_input_weights'], {}), '(to_be_added_encoder_input_weights)\n', (2854, 2889), False, 'import torch\n'), ((3407, 3458), 'torch.from_numpy', 'torch.from_numpy', (['to_be_added_decoder_input_weights'], {}), '(to_be_added_decoder_input_weights)\n', (3423, 3458), False, 'import torch\n')] |
import datetime
import remi
import core.globals
connected_clients = {} # Dict with key=session id of App Instance and value=ws_client.client_address of App Instance
connected_clients['number'] = 0 # Special Dict Field for amount of active connections
client_route_url_to_view = {} # Dict to store URL extensions related to session. This is used to switch a view based on url
def handle_connections(AppInst=None):
# Take care of the connection. It is only alive if the websocket still is active.
# Check, if there is a new websocket connection for this App session (= Instance)
if AppInst.connection_established == False and len(AppInst.websockets) == 1:
for session_id, app_inst in remi.server.clients.items():
if session_id == AppInst.session:
for ws_client in app_inst.websockets:
AppInst.logger.info(f'New Session with ID <{AppInst.session}> from host {ws_client.client_address}') # Host Information for direct connection
connected_clients[AppInst.session] = ws_client.client_address
AppInst.logger.info(f'Session <{AppInst.session}> host headers: {ws_client.headers}')
connected_clients['number'] = connected_clients['number'] + 1
AppInst.logger.info(f'Connected clients ({connected_clients["number"]} in total): {connected_clients}')
AppInst.connect_time = datetime.datetime.now()
AppInst.connection_established = True # Set Flag. This can be used by other threads as end signal.
# Check, if the websocket connection is still alive. REMI removes the Websocket from the List if dead.
if len(remi.server.clients[AppInst.session].websockets) == 0 and AppInst.connection_established == True:
AppInst.disconnect_time = datetime.datetime.now() # Store the disconnect time
connection_duration = f'{(AppInst.disconnect_time - AppInst.connect_time).seconds} sec'
AppInst.logger.info(f'Session <{AppInst.session}> from host {connected_clients[AppInst.session]} has disconnected. Connection duration: {connection_duration}')
AppInst.connection_established = False # Set Flag. This can be used by other threads as end signal.
del connected_clients[AppInst.session]
connected_clients['number'] = connected_clients['number'] - 1
AppInst.logger.info(f'Still connected clients: {connected_clients}')
| [
"datetime.datetime.now",
"remi.server.clients.items"
] | [((778, 805), 'remi.server.clients.items', 'remi.server.clients.items', ([], {}), '()\n', (803, 805), False, 'import remi\n'), ((1498, 1521), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (1519, 1521), False, 'import datetime\n'), ((1898, 1921), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (1919, 1921), False, 'import datetime\n')] |
import time
from os import system
from django.http import HttpResponse
from django.template import Context, loader
from django.views.decorators.csrf import csrf_exempt # Pour des formulaires POST libres
from jla_utils.utils import Fichier
from .models import ElementDialogue
class Tunnel:
def __init__(self, longueurArg, generationArg):
self.longueur = longueurArg
self.generation = generationArg
def alimenteBaseDeDonnees (nomEntree, identifiantSerpicon, descriptifTunnel, serveur):
ElementDialogue.objects.create(
nom = nomEntree,
param1 = identifiantSerpicon,
param2 = descriptifTunnel,
param3 = serveur
)
def analyseGraine (ligneLue):
graine = ligneLue[10:len(ligneLue) - 1]
return graine
def analyseNbCell (ligneLue):
nbCellString = ligneLue[9:len(ligneLue)]
nbCell = int(nbCellString)
return nbCell
def analyseTunnel (request):
nomFichTunnel = "resultat_longtun2.txt"
numLigneLue = 0
fichALire = Fichier(nomFichTunnel, 0)
chouBlanc = True # Par defaut
nbCell = 0
graine = ""
mesTunnels = []
while fichALire.index < fichALire.longueur:
ligneLue = fichALire.litUneLigne()
numLigneLue += 1
if numLigneLue == 1:
nbCell = analyseNbCell(ligneLue)
elif numLigneLue == 2:
graine = analyseGraine(ligneLue)
else:
if (len(ligneLue) > 10) and (ligneLue[0:6] == "Tunnel"):
chouBlanc = False
monTunnelNormalise = analyseTunnelMoteur(ligneLue)
mesTunnels.append(monTunnelNormalise)
fichALire.close()
print("Le nombre de cellules est de %d." % (nbCell))
print("La graine est [%s]." % (graine))
nomEntreeDeBase = fabriqueTempsSyntaxeUrl()
identifiantSerpicon = "%d %s" % (nbCell, graine)
nomServeur = "alwaysdata"
if chouBlanc:
alimenteBaseDeDonnees(nomEntreeDeBase, identifiantSerpicon, "Chou blanc !", nomServeur)
else:
for numTunnel in range(len(mesTunnels)):
monTunnel = mesTunnels[numTunnel]
maLongueur = monTunnel.longueur
maGeneration = monTunnel.generation
print("Tunnel de %s a la generation %s" % \
(separateurMille(maLongueur, ' '),
separateurMille(maGeneration, ' ')))
nomEntreeDeBase = fabriqueTempsSyntaxeUrl()
nomEntree = nomEntreeDeBase + "__" + separateurMille(maLongueur, '_')
descriptifTunnel = separateurMille(maLongueur, ' ') + " en " \
+ separateurMille(maGeneration, ' ')
alimenteBaseDeDonnees(nomEntree, identifiantSerpicon, descriptifTunnel, nomServeur)
if numTunnel < len(mesTunnels) - 1:
attend(5.0)
# time.sleep(2.0) # A tout hasard, pour ne pas venir trop vite apres les requetes
# d'analyse_tunnel.py
# lanceSinodoju () # On va laisser courteline s'occuper de relancer amarelia
tableauDeLignes = []
tableauDeLignes.append("Cette page est la page de l'analyse des tunnels.")
template = loader.get_template('cestmoilechef/petite_merdasse.html')
context = Context({ 'tabDeLignes': tableauDeLignes })
output = template.render(context)
return HttpResponse(output)
def attend (dureeEnSecondes):
time.sleep(dureeEnSecondes)
def analyseTunnelMoteur (ligneLue):
chaineLongueur = ""
chaineGeneration = ""
caracLu = ""
numSigne = 10
eTrouve = False
while (not eTrouve) and (numSigne < len(ligneLue)):
signeLu = ligneLue[numSigne]
if signeLu == "e":
eTrouve = True
else:
chaineLongueur += signeLu
numSigne += 1
chaineLongueur = chaineLongueur[0:len(chaineLongueur) - 1] # pour virer l'espace finale
maLongueur = int(vireSigne(chaineLongueur, ' '))
numSigne += 2
chaineGeneration = ligneLue[numSigne:len(ligneLue)]
maGene = int(vireSigne(chaineGeneration, ' '))
monTunnel = Tunnel(maLongueur, maGene)
return monTunnel
def fabriqueTempsSyntaxeGraine ():
graine = time.strftime("jlancey%Y%m%da%Hh%Mm%S", time.localtime())
return graine
def fabriqueTempsSyntaxeUrl ():
# tempsSyntaxeUrl = time.strftime("%Y-%m-%d_%H-%M-%S", time.gmtime())
tempsSyntaxeUrl = time.strftime("%Y-%m-%d_%H-%M-%S", time.localtime())
return tempsSyntaxeUrl
def lanceSinodoju ():
conn = http.client.HTTPConnection("www.amarelia.ch")
conn.request("GET", "/sinodoju/sinodoju.php")
r1 = conn.getresponse()
print(r1.status, r1.reason)
data1 = r1.read()
# print(data1)
conn.close()
def separateurMille (monEntier, monSeparateur):
maChaine0 = "%d" % (monEntier)
maChaine1 = ""
for numSigne in range(len(maChaine0)):
numSigne2 = len(maChaine0) -1 - numSigne
monSigne = maChaine0[numSigne2]
if (numSigne % 3 == 0) and numSigne > 0:
maChaine1 = monSeparateur + maChaine1
maChaine1 = monSigne + maChaine1
return maChaine1
@csrf_exempt # En théorie, c'est une brèche de sécurité; en pratique... ca depend
def viewSinodoju (request):
tableauDeLignes = []
tableauDeLignes.append("Cette page est la page de Sinodoju.")
graine = fabriqueTempsSyntaxeGraine()
nbBitsFournis = len(graine) * 6
tableauDeLignes.append("La graine est [%s], soit assez pour %d bits." % (graine, nbBitsFournis))
nbCellules = 145
system("./sinodoju.pl %d %s > cr_perl.txt 2> cr2_perl.txt &" % (nbCellules, graine))
tableauDeLignes.append("En principe, si vous lisez ça, c'est qu'un daemon Sinodoju a été lancé.")
tableauDeLignes.append("Donc ça aura un effet... quand le daemon aura fini de travailler.")
tableauDeLignes.append("Ce template a été écrit pour vous rendre la main tout de suite...")
tableauDeLignes.append("... mais des limitations d'AlwaysData, compréhensibles d'ailleurs,")
tableauDeLignes.append("imposent d'attendre quand même la fin du processus. Cette page ne")
tableauDeLignes.append("sert donc qu'à titre de test.")
template = loader.get_template('cestmoilechef/petite_merdasse.html')
context = Context({ 'tabDeLignes': tableauDeLignes })
output = template.render(context)
return HttpResponse(output)
def vireSigne (maChaine, monSigneAVirer):
maChainePurgee = ""
for numSigne in range(len(maChaine)):
monSigne = maChaine[numSigne]
if monSigne != monSigneAVirer:
maChainePurgee += monSigne
return maChainePurgee
| [
"django.http.HttpResponse",
"time.sleep",
"django.template.Context",
"os.system",
"time.localtime",
"django.template.loader.get_template",
"jla_utils.utils.Fichier"
] | [((1154, 1179), 'jla_utils.utils.Fichier', 'Fichier', (['nomFichTunnel', '(0)'], {}), '(nomFichTunnel, 0)\n', (1161, 1179), False, 'from jla_utils.utils import Fichier\n'), ((3284, 3341), 'django.template.loader.get_template', 'loader.get_template', (['"""cestmoilechef/petite_merdasse.html"""'], {}), "('cestmoilechef/petite_merdasse.html')\n", (3303, 3341), False, 'from django.template import Context, loader\n'), ((3356, 3397), 'django.template.Context', 'Context', (["{'tabDeLignes': tableauDeLignes}"], {}), "({'tabDeLignes': tableauDeLignes})\n", (3363, 3397), False, 'from django.template import Context, loader\n'), ((3449, 3469), 'django.http.HttpResponse', 'HttpResponse', (['output'], {}), '(output)\n', (3461, 3469), False, 'from django.http import HttpResponse\n'), ((3505, 3532), 'time.sleep', 'time.sleep', (['dureeEnSecondes'], {}), '(dureeEnSecondes)\n', (3515, 3532), False, 'import time\n'), ((5624, 5712), 'os.system', 'system', (["('./sinodoju.pl %d %s > cr_perl.txt 2> cr2_perl.txt &' % (nbCellules, graine))"], {}), "('./sinodoju.pl %d %s > cr_perl.txt 2> cr2_perl.txt &' % (nbCellules,\n graine))\n", (5630, 5712), False, 'from os import system\n'), ((6273, 6330), 'django.template.loader.get_template', 'loader.get_template', (['"""cestmoilechef/petite_merdasse.html"""'], {}), "('cestmoilechef/petite_merdasse.html')\n", (6292, 6330), False, 'from django.template import Context, loader\n'), ((6345, 6386), 'django.template.Context', 'Context', (["{'tabDeLignes': tableauDeLignes}"], {}), "({'tabDeLignes': tableauDeLignes})\n", (6352, 6386), False, 'from django.template import Context, loader\n'), ((6438, 6458), 'django.http.HttpResponse', 'HttpResponse', (['output'], {}), '(output)\n', (6450, 6458), False, 'from django.http import HttpResponse\n'), ((4321, 4337), 'time.localtime', 'time.localtime', ([], {}), '()\n', (4335, 4337), False, 'import time\n'), ((4522, 4538), 'time.localtime', 'time.localtime', ([], {}), '()\n', (4536, 4538), False, 'import time\n')] |
#!/usr/bin/env python
from unittest import TestCase
from boutiques.bosh import bosh
from boutiques.bids import validate_bids
from boutiques import __file__ as bofile
from jsonschema.exceptions import ValidationError
from boutiques.validator import DescriptorValidationError
import os.path as op
import simplejson as json
import os
class TestBIDS(TestCase):
def test_bids_good(self):
fil = op.join(op.split(bofile)[0], 'schema/examples/bids_good.json')
self.assertFalse(bosh(["validate", fil, '-b']))
def test_bids_bad1(self):
fil = op.join(op.split(bofile)[0], 'schema/examples/bids_bad1.json')
self.assertRaises(DescriptorValidationError, bosh, ["validate",
fil, '-b'])
def test_bids_bad2(self):
fil = op.join(op.split(bofile)[0], 'schema/examples/bids_bad2.json')
self.assertRaises(DescriptorValidationError, bosh, ["validate",
fil, '-b'])
def test_bids_invalid(self):
fil = op.join(op.split(bofile)[0], 'schema/examples/bids_bad2.json')
descriptor = json.load(open(fil))
self.assertRaises(DescriptorValidationError, validate_bids,
descriptor, False)
| [
"boutiques.bosh.bosh",
"os.path.split"
] | [((493, 522), 'boutiques.bosh.bosh', 'bosh', (["['validate', fil, '-b']"], {}), "(['validate', fil, '-b'])\n", (497, 522), False, 'from boutiques.bosh import bosh\n'), ((413, 429), 'os.path.split', 'op.split', (['bofile'], {}), '(bofile)\n', (421, 429), True, 'import os.path as op\n'), ((577, 593), 'os.path.split', 'op.split', (['bofile'], {}), '(bofile)\n', (585, 593), True, 'import os.path as op\n'), ((829, 845), 'os.path.split', 'op.split', (['bofile'], {}), '(bofile)\n', (837, 845), True, 'import os.path as op\n'), ((1084, 1100), 'os.path.split', 'op.split', (['bofile'], {}), '(bofile)\n', (1092, 1100), True, 'import os.path as op\n')] |
from __future__ import print_function
import pathlib
from builtins import object, str
from typing import Dict
from empire.server.common import helpers
from empire.server.common.module_models import PydanticModule
from empire.server.utils import data_util
from empire.server.utils.module_util import handle_error_message
class Module(object):
@staticmethod
def generate(
main_menu,
module: PydanticModule,
params: Dict,
obfuscate: bool = False,
obfuscation_command: str = "",
):
username = params["Username"]
password = params["Password"]
instance = params["Instance"]
check_all = params["CheckAll"]
# read in the common module source code
script, err = main_menu.modules.get_module_source(
module_name="situational_awareness/network/Get-SQLServerInfo.ps1",
obfuscate=obfuscate,
obfuscate_command=obfuscation_command,
)
script_end = ""
if check_all:
# read in the common module source code
script, err = main_menu.modules.get_module_source(
module_name="situational_awareness/network/Get-SQLInstanceDomain.ps1",
obfuscate=obfuscate,
obfuscate_command=obfuscation_command,
)
try:
with open(sql_instance_source, "r") as auxSource:
auxScript = auxSource.read()
script += " " + auxScript
except:
print(
helpers.color(
"[!] Could not read additional module source path at: "
+ str(sql_instance_source)
)
)
script_end = " Get-SQLInstanceDomain "
if username != "":
script_end += " -Username " + username
if password != "":
script_end += " -Password " + password
script_end += " | "
script_end += " Get-SQLServerInfo"
if username != "":
script_end += " -Username " + username
if password != "":
script_end += " -Password " + password
if instance != "" and not check_all:
script_end += " -Instance " + instance
outputf = params.get("OutputFunction", "Out-String")
script_end += (
f" | {outputf} | "
+ '%{$_ + "`n"};"`n'
+ str(module.name.split("/")[-1])
+ ' completed!"'
)
script = main_menu.modules.finalize_module(
script=script,
script_end=script_end,
obfuscate=obfuscate,
obfuscation_command=obfuscation_command,
)
return script
| [
"builtins.str"
] | [((1685, 1709), 'builtins.str', 'str', (['sql_instance_source'], {}), '(sql_instance_source)\n', (1688, 1709), False, 'from builtins import object, str\n')] |
from collections import defaultdict
from itertools import permutations
import networkx as nx
def air_duct_spelunking(inp, part1=True):
max_y = len(inp)
max_x = max(len(line) for line in inp)
grid = defaultdict(lambda: '#')
numbers = defaultdict(lambda: '')
route_list = defaultdict(lambda: 0)
graph = nx.Graph()
for y, row in enumerate(inp):
yo = 1j * y
for x, ch in enumerate(row):
grid[x + yo] = ch
for y in range(max_y):
yo = 1j * y
for x in range(max_x):
if grid[x + yo] == '#':
continue
ch = grid[x + yo]
node_address = str(x + yo)
if ch.isdigit():
graph.add_node(node_address, num=int(ch))
numbers[ch] = node_address
else:
graph.add_node(node_address)
for offset in [1j, 1, -1j, -1]:
if grid[x + yo + offset] != '#':
graph.add_edge(node_address, str(x + yo + offset))
# find shortest path
short_route = 1e9
for route in permutations([n for n in numbers.keys() if n > '0']):
path = 0
if part1:
r = ['0'] + list(route)
else:
r = ['0'] + list(route) + ['0']
for u, v in zip(r[:-1], r[1:]):
if route_list[(u, v)] == 0:
route_list[(u, v)] = nx.shortest_path_length(graph, numbers[u], numbers[v])
path += route_list[(u, v)]
if short_route > path:
short_route = path
return short_route
if __name__ == '__main__':
with open('input.txt') as cave_file:
cave_lines = cave_file.read().splitlines(keepends=False)
print(f'Day 24, part 1: {air_duct_spelunking(cave_lines)}')
print(f'Day 24, part 2: {air_duct_spelunking(cave_lines, False)}')
# Day 24, part 1: 518
# Day 24, part 2: 716
| [
"networkx.shortest_path_length",
"collections.defaultdict",
"networkx.Graph"
] | [((213, 238), 'collections.defaultdict', 'defaultdict', (["(lambda : '#')"], {}), "(lambda : '#')\n", (224, 238), False, 'from collections import defaultdict\n'), ((252, 276), 'collections.defaultdict', 'defaultdict', (["(lambda : '')"], {}), "(lambda : '')\n", (263, 276), False, 'from collections import defaultdict\n'), ((293, 316), 'collections.defaultdict', 'defaultdict', (['(lambda : 0)'], {}), '(lambda : 0)\n', (304, 316), False, 'from collections import defaultdict\n'), ((328, 338), 'networkx.Graph', 'nx.Graph', ([], {}), '()\n', (336, 338), True, 'import networkx as nx\n'), ((1389, 1443), 'networkx.shortest_path_length', 'nx.shortest_path_length', (['graph', 'numbers[u]', 'numbers[v]'], {}), '(graph, numbers[u], numbers[v])\n', (1412, 1443), True, 'import networkx as nx\n')] |
import unittest2
from openerp.osv.orm import except_orm
import openerp.tests.common as common
from openerp.tools import mute_logger
class TestServerActionsBase(common.TransactionCase):
def setUp(self):
super(TestServerActionsBase, self).setUp()
cr, uid = self.cr, self.uid
# Models
self.ir_actions_server = self.registry('ir.actions.server')
self.ir_actions_client = self.registry('ir.actions.client')
self.ir_values = self.registry('ir.values')
self.ir_model = self.registry('ir.model')
self.ir_model_fields = self.registry('ir.model.fields')
self.res_partner = self.registry('res.partner')
self.res_country = self.registry('res.country')
# Data on which we will run the server action
self.test_country_id = self.res_country.create(cr, uid, {
'name': 'TestingCountry',
'code': 'TY',
'address_format': 'SuperFormat',
})
self.test_country = self.res_country.browse(cr, uid, self.test_country_id)
self.test_partner_id = self.res_partner.create(cr, uid, {
'name': 'TestingPartner',
'city': 'OrigCity',
'country_id': self.test_country_id,
})
self.test_partner = self.res_partner.browse(cr, uid, self.test_partner_id)
self.context = {
'active_id': self.test_partner_id,
'active_model': 'res.partner',
}
# Model data
self.res_partner_model_id = self.ir_model.search(cr, uid, [('model', '=', 'res.partner')])[0]
self.res_partner_name_field_id = self.ir_model_fields.search(cr, uid, [('model', '=', 'res.partner'), ('name', '=', 'name')])[0]
self.res_partner_city_field_id = self.ir_model_fields.search(cr, uid, [('model', '=', 'res.partner'), ('name', '=', 'city')])[0]
self.res_partner_country_field_id = self.ir_model_fields.search(cr, uid, [('model', '=', 'res.partner'), ('name', '=', 'country_id')])[0]
self.res_partner_parent_field_id = self.ir_model_fields.search(cr, uid, [('model', '=', 'res.partner'), ('name', '=', 'parent_id')])[0]
self.res_country_model_id = self.ir_model.search(cr, uid, [('model', '=', 'res.country')])[0]
self.res_country_name_field_id = self.ir_model_fields.search(cr, uid, [('model', '=', 'res.country'), ('name', '=', 'name')])[0]
self.res_country_code_field_id = self.ir_model_fields.search(cr, uid, [('model', '=', 'res.country'), ('name', '=', 'code')])[0]
# create server action to
self.act_id = self.ir_actions_server.create(cr, uid, {
'name': 'TestAction',
'condition': 'True',
'model_id': self.res_partner_model_id,
'state': 'code',
'code': 'obj.write({"comment": "MyComment"})',
})
class TestServerActions(TestServerActionsBase):
def test_00_action(self):
cr, uid = self.cr, self.uid
# Do: eval 'True' condition
self.ir_actions_server.run(cr, uid, [self.act_id], self.context)
self.test_partner.refresh()
self.assertEqual(self.test_partner.comment, 'MyComment', 'ir_actions_server: invalid condition check')
self.test_partner.write({'comment': False})
# Do: eval False condition, that should be considered as True (void = True)
self.ir_actions_server.write(cr, uid, [self.act_id], {'condition': False})
self.ir_actions_server.run(cr, uid, [self.act_id], self.context)
self.test_partner.refresh()
self.assertEqual(self.test_partner.comment, 'MyComment', 'ir_actions_server: invalid condition check')
# Do: create contextual action
self.ir_actions_server.create_action(cr, uid, [self.act_id])
# Test: ir_values created
ir_values_ids = self.ir_values.search(cr, uid, [('name', '=', 'Run TestAction')])
self.assertEqual(len(ir_values_ids), 1, 'ir_actions_server: create_action should have created an entry in ir_values')
ir_value = self.ir_values.browse(cr, uid, ir_values_ids[0])
self.assertEqual(ir_value.value, 'ir.actions.server,%s' % self.act_id, 'ir_actions_server: created ir_values should reference the server action')
self.assertEqual(ir_value.model, 'res.partner', 'ir_actions_server: created ir_values should be linked to the action base model')
# Do: remove contextual action
self.ir_actions_server.unlink_action(cr, uid, [self.act_id])
# Test: ir_values removed
ir_values_ids = self.ir_values.search(cr, uid, [('name', '=', 'Run TestAction')])
self.assertEqual(len(ir_values_ids), 0, 'ir_actions_server: unlink_action should remove the ir_values record')
def test_10_code(self):
cr, uid = self.cr, self.uid
self.ir_actions_server.write(cr, uid, self.act_id, {
'state': 'code',
'code': """partner_name = obj.name + '_code'
self.pool["res.partner"].create(cr, uid, {"name": partner_name}, context=context)
workflow"""
})
run_res = self.ir_actions_server.run(cr, uid, [self.act_id], context=self.context)
self.assertFalse(run_res, 'ir_actions_server: code server action correctly finished should return False')
pids = self.res_partner.search(cr, uid, [('name', 'ilike', 'TestingPartner_code')])
self.assertEqual(len(pids), 1, 'ir_actions_server: 1 new partner should have been created')
def test_20_trigger(self):
cr, uid = self.cr, self.uid
# Data: code server action (at this point code-based actions should work)
act_id2 = self.ir_actions_server.create(cr, uid, {
'name': 'TestAction2',
'type': 'ir.actions.server',
'condition': 'True',
'model_id': self.res_partner_model_id,
'state': 'code',
'code': 'obj.write({"comment": "MyComment"})',
})
act_id3 = self.ir_actions_server.create(cr, uid, {
'name': 'TestAction3',
'type': 'ir.actions.server',
'condition': 'True',
'model_id': self.res_country_model_id,
'state': 'code',
'code': 'obj.write({"code": "ZZ"})',
})
# Data: create workflows
partner_wf_id = self.registry('workflow').create(cr, uid, {
'name': 'TestWorkflow',
'osv': 'res.partner',
'on_create': True,
})
partner_act1_id = self.registry('workflow.activity').create(cr, uid, {
'name': 'PartnerStart',
'wkf_id': partner_wf_id,
'flow_start': True
})
partner_act2_id = self.registry('workflow.activity').create(cr, uid, {
'name': 'PartnerTwo',
'wkf_id': partner_wf_id,
'kind': 'function',
'action': 'True',
'action_id': act_id2,
})
partner_trs1_id = self.registry('workflow.transition').create(cr, uid, {
'signal': 'partner_trans',
'act_from': partner_act1_id,
'act_to': partner_act2_id
})
country_wf_id = self.registry('workflow').create(cr, uid, {
'name': 'TestWorkflow',
'osv': 'res.country',
'on_create': True,
})
country_act1_id = self.registry('workflow.activity').create(cr, uid, {
'name': 'CountryStart',
'wkf_id': country_wf_id,
'flow_start': True
})
country_act2_id = self.registry('workflow.activity').create(cr, uid, {
'name': 'CountryTwo',
'wkf_id': country_wf_id,
'kind': 'function',
'action': 'True',
'action_id': act_id3,
})
country_trs1_id = self.registry('workflow.transition').create(cr, uid, {
'signal': 'country_trans',
'act_from': country_act1_id,
'act_to': country_act2_id
})
# Data: re-create country and partner to benefit from the workflows
self.test_country_id = self.res_country.create(cr, uid, {
'name': 'TestingCountry2',
'code': 'T2',
})
self.test_country = self.res_country.browse(cr, uid, self.test_country_id)
self.test_partner_id = self.res_partner.create(cr, uid, {
'name': 'TestingPartner2',
'country_id': self.test_country_id,
})
self.test_partner = self.res_partner.browse(cr, uid, self.test_partner_id)
self.context = {
'active_id': self.test_partner_id,
'active_model': 'res.partner',
}
# Run the action on partner object itself ('base')
self.ir_actions_server.write(cr, uid, [self.act_id], {
'state': 'trigger',
'use_relational_model': 'base',
'wkf_model_id': self.res_partner_model_id,
'wkf_model_name': 'res.partner',
'wkf_transition_id': partner_trs1_id,
})
self.ir_actions_server.run(cr, uid, [self.act_id], self.context)
self.test_partner.refresh()
self.assertEqual(self.test_partner.comment, 'MyComment', 'ir_actions_server: incorrect signal trigger')
# Run the action on related country object ('relational')
self.ir_actions_server.write(cr, uid, [self.act_id], {
'use_relational_model': 'relational',
'wkf_model_id': self.res_country_model_id,
'wkf_model_name': 'res.country',
'wkf_field_id': self.res_partner_country_field_id,
'wkf_transition_id': country_trs1_id,
})
self.ir_actions_server.run(cr, uid, [self.act_id], self.context)
self.test_country.refresh()
self.assertEqual(self.test_country.code, 'ZZ', 'ir_actions_server: incorrect signal trigger')
# Clear workflow cache, otherwise openerp will try to create workflows even if it has been deleted
from openerp.workflow import clear_cache
clear_cache(cr, uid)
def test_30_client(self):
cr, uid = self.cr, self.uid
client_action_id = self.registry('ir.actions.client').create(cr, uid, {
'name': 'TestAction2',
'tag': 'Test',
})
self.ir_actions_server.write(cr, uid, [self.act_id], {
'state': 'client_action',
'action_id': client_action_id,
})
res = self.ir_actions_server.run(cr, uid, [self.act_id], context=self.context)
self.assertEqual(res['name'], 'TestAction2', 'ir_actions_server: incorrect return result for a client action')
def test_40_crud_create(self):
cr, uid = self.cr, self.uid
_city = 'TestCity'
_name = 'TestNew'
# Do: create a new record in the same model and link it
self.ir_actions_server.write(cr, uid, [self.act_id], {
'state': 'object_create',
'use_create': 'new',
'link_new_record': True,
'link_field_id': self.res_partner_parent_field_id,
'fields_lines': [(0, 0, {'col1': self.res_partner_name_field_id, 'value': _name}),
(0, 0, {'col1': self.res_partner_city_field_id, 'value': _city})],
})
run_res = self.ir_actions_server.run(cr, uid, [self.act_id], context=self.context)
self.assertFalse(run_res, 'ir_actions_server: create record action correctly finished should return False')
# Test: new partner created
pids = self.res_partner.search(cr, uid, [('name', 'ilike', _name)])
self.assertEqual(len(pids), 1, 'ir_actions_server: TODO')
partner = self.res_partner.browse(cr, uid, pids[0])
self.assertEqual(partner.city, _city, 'ir_actions_server: TODO')
# Test: new partner linked
self.test_partner.refresh()
self.assertEqual(self.test_partner.parent_id.id, pids[0], 'ir_actions_server: TODO')
# Do: copy current record
self.ir_actions_server.write(cr, uid, [self.act_id], {'fields_lines': [[5]]})
self.ir_actions_server.write(cr, uid, [self.act_id], {
'state': 'object_create',
'use_create': 'copy_current',
'link_new_record': False,
'fields_lines': [(0, 0, {'col1': self.res_partner_name_field_id, 'value': 'TestCopyCurrent'}),
(0, 0, {'col1': self.res_partner_city_field_id, 'value': 'TestCity'})],
})
run_res = self.ir_actions_server.run(cr, uid, [self.act_id], context=self.context)
self.assertFalse(run_res, 'ir_actions_server: create record action correctly finished should return False')
# Test: new partner created
pids = self.res_partner.search(cr, uid, [('name', 'ilike', 'TestingPartner (copy)')]) # currently res_partner overrides default['name'] whatever its value
self.assertEqual(len(pids), 1, 'ir_actions_server: TODO')
partner = self.res_partner.browse(cr, uid, pids[0])
self.assertEqual(partner.city, 'TestCity', 'ir_actions_server: TODO')
self.assertEqual(partner.country_id.id, self.test_partner.country_id.id, 'ir_actions_server: TODO')
# Do: create a new record in another model
self.ir_actions_server.write(cr, uid, [self.act_id], {'fields_lines': [[5]]})
self.ir_actions_server.write(cr, uid, [self.act_id], {
'state': 'object_create',
'use_create': 'new_other',
'crud_model_id': self.res_country_model_id,
'link_new_record': False,
'fields_lines': [(0, 0, {'col1': self.res_country_name_field_id, 'value': 'obj.name', 'type': 'equation'}),
(0, 0, {'col1': self.res_country_code_field_id, 'value': 'obj.name[0:2]', 'type': 'equation'})],
})
run_res = self.ir_actions_server.run(cr, uid, [self.act_id], context=self.context)
self.assertFalse(run_res, 'ir_actions_server: create record action correctly finished should return False')
# Test: new country created
cids = self.res_country.search(cr, uid, [('name', 'ilike', 'TestingPartner')])
self.assertEqual(len(cids), 1, 'ir_actions_server: TODO')
country = self.res_country.browse(cr, uid, cids[0])
self.assertEqual(country.code, 'TE', 'ir_actions_server: TODO')
# Do: copy a record in another model
self.ir_actions_server.write(cr, uid, [self.act_id], {'fields_lines': [[5]]})
self.ir_actions_server.write(cr, uid, [self.act_id], {
'state': 'object_create',
'use_create': 'copy_other',
'crud_model_id': self.res_country_model_id,
'link_new_record': False,
'ref_object': 'res.country,%s' % self.test_country_id,
'fields_lines': [(0, 0, {'col1': self.res_country_name_field_id, 'value': 'NewCountry', 'type': 'value'}),
(0, 0, {'col1': self.res_country_code_field_id, 'value': 'NY', 'type': 'value'})],
})
run_res = self.ir_actions_server.run(cr, uid, [self.act_id], context=self.context)
self.assertFalse(run_res, 'ir_actions_server: create record action correctly finished should return False')
# Test: new country created
cids = self.res_country.search(cr, uid, [('name', 'ilike', 'NewCountry')])
self.assertEqual(len(cids), 1, 'ir_actions_server: TODO')
country = self.res_country.browse(cr, uid, cids[0])
self.assertEqual(country.code, 'NY', 'ir_actions_server: TODO')
self.assertEqual(country.address_format, 'SuperFormat', 'ir_actions_server: TODO')
def test_50_crud_write(self):
cr, uid = self.cr, self.uid
_name = 'TestNew'
# Do: create a new record in the same model and link it
self.ir_actions_server.write(cr, uid, [self.act_id], {
'state': 'object_write',
'use_write': 'current',
'fields_lines': [(0, 0, {'col1': self.res_partner_name_field_id, 'value': _name})],
})
run_res = self.ir_actions_server.run(cr, uid, [self.act_id], context=self.context)
self.assertFalse(run_res, 'ir_actions_server: create record action correctly finished should return False')
# Test: new partner created
pids = self.res_partner.search(cr, uid, [('name', 'ilike', _name)])
self.assertEqual(len(pids), 1, 'ir_actions_server: TODO')
partner = self.res_partner.browse(cr, uid, pids[0])
self.assertEqual(partner.city, 'OrigCity', 'ir_actions_server: TODO')
# Do: copy current record
self.ir_actions_server.write(cr, uid, [self.act_id], {'fields_lines': [[5]]})
self.ir_actions_server.write(cr, uid, [self.act_id], {
'use_write': 'other',
'crud_model_id': self.res_country_model_id,
'ref_object': 'res.country,%s' % self.test_country_id,
'fields_lines': [(0, 0, {'col1': self.res_country_name_field_id, 'value': 'obj.name', 'type': 'equation'})],
})
run_res = self.ir_actions_server.run(cr, uid, [self.act_id], context=self.context)
self.assertFalse(run_res, 'ir_actions_server: create record action correctly finished should return False')
# Test: new country created
cids = self.res_country.search(cr, uid, [('name', 'ilike', 'TestNew')])
self.assertEqual(len(cids), 1, 'ir_actions_server: TODO')
# Do: copy a record in another model
self.ir_actions_server.write(cr, uid, [self.act_id], {'fields_lines': [[5]]})
self.ir_actions_server.write(cr, uid, [self.act_id], {
'use_write': 'expression',
'crud_model_id': self.res_country_model_id,
'write_expression': 'object.country_id',
'fields_lines': [(0, 0, {'col1': self.res_country_name_field_id, 'value': 'NewCountry', 'type': 'value'})],
})
run_res = self.ir_actions_server.run(cr, uid, [self.act_id], context=self.context)
self.assertFalse(run_res, 'ir_actions_server: create record action correctly finished should return False')
# Test: new country created
cids = self.res_country.search(cr, uid, [('name', 'ilike', 'NewCountry')])
self.assertEqual(len(cids), 1, 'ir_actions_server: TODO')
@mute_logger('openerp.addons.base.ir.ir_model', 'openerp.models')
def test_60_multi(self):
cr, uid = self.cr, self.uid
# Data: 2 server actions that will be nested
act1_id = self.ir_actions_server.create(cr, uid, {
'name': 'Subaction1',
'sequence': 1,
'model_id': self.res_partner_model_id,
'state': 'code',
'code': 'action = {"type": "ir.actions.act_window"}',
})
act2_id = self.ir_actions_server.create(cr, uid, {
'name': 'Subaction2',
'sequence': 2,
'model_id': self.res_partner_model_id,
'state': 'object_create',
'use_create': 'copy_current',
})
act3_id = self.ir_actions_server.create(cr, uid, {
'name': 'Subaction3',
'sequence': 3,
'model_id': self.res_partner_model_id,
'state': 'code',
'code': 'action = {"type": "ir.actions.act_url"}',
})
self.ir_actions_server.write(cr, uid, [self.act_id], {
'state': 'multi',
'child_ids': [(6, 0, [act1_id, act2_id, act3_id])],
})
# Do: run the action
res = self.ir_actions_server.run(cr, uid, [self.act_id], context=self.context)
# Test: new partner created
pids = self.res_partner.search(cr, uid, [('name', 'ilike', 'TestingPartner (copy)')]) # currently res_partner overrides default['name'] whatever its value
self.assertEqual(len(pids), 1, 'ir_actions_server: TODO')
# Test: action returned
self.assertEqual(res.get('type'), 'ir.actions.act_url')
# Test loops
with self.assertRaises(except_orm):
self.ir_actions_server.write(cr, uid, [self.act_id], {
'child_ids': [(6, 0, [self.act_id])]
})
if __name__ == '__main__':
unittest2.main()
| [
"openerp.workflow.clear_cache",
"openerp.tools.mute_logger",
"unittest2.main"
] | [((18225, 18289), 'openerp.tools.mute_logger', 'mute_logger', (['"""openerp.addons.base.ir.ir_model"""', '"""openerp.models"""'], {}), "('openerp.addons.base.ir.ir_model', 'openerp.models')\n", (18236, 18289), False, 'from openerp.tools import mute_logger\n'), ((20104, 20120), 'unittest2.main', 'unittest2.main', ([], {}), '()\n', (20118, 20120), False, 'import unittest2\n'), ((9965, 9985), 'openerp.workflow.clear_cache', 'clear_cache', (['cr', 'uid'], {}), '(cr, uid)\n', (9976, 9985), False, 'from openerp.workflow import clear_cache\n')] |
# -*- coding: utf-8 -*-
"""
Created on 2021-03-11 18:53:58
---------
@summary:
抓糗事百科的案例
---------
@author: Administrator
"""
import feapder
class Spider(feapder.AirSpider):
def start_requests(self):
for page_num in range(1, 2):
url = "https://www.qiushibaike.com/8hr/page/{}/".format(page_num)
yield feapder.Request(url)
def parse(self, request, response):
articles = response.xpath('//li[@id]/div/a')
for article in articles:
title = article.xpath('./text()').extract_first()
# 这里解析<a>的href,会留意到,此处的href已经是合并完整了的
url = article.xpath('./@href').extract_first()
# 新的请求
# 用法类似于scrapy
# callback 为回调函数
# 若是有其它需要传递的参数,直接写入即可,如title
yield feapder.Request(url, callback=self.parse_detail, title=title)
def parse_detail(self, request, response):
print('title:{}'.format(request.title))
print('url:{}'.format(response.url))
print('author:{}'.format(response.xpath('//*[@id="articleSideLeft"]/a/img/@alt').extract_first()))
response.encoding_errors = 'ignore' # 由于文章内容可能含有utf-8不能解析的字符,这里设置遇到不能解析字符就调过
print('content:{}'.format(response.xpath('string(//div[@class="content"])').extract_first()))
if __name__ == "__main__":
# Spider().start()
Spider(thread_count=3).start() # 设置3个线程来加快爬取速度 | [
"feapder.Request"
] | [((343, 363), 'feapder.Request', 'feapder.Request', (['url'], {}), '(url)\n', (358, 363), False, 'import feapder\n'), ((796, 857), 'feapder.Request', 'feapder.Request', (['url'], {'callback': 'self.parse_detail', 'title': 'title'}), '(url, callback=self.parse_detail, title=title)\n', (811, 857), False, 'import feapder\n')] |
"""Fixtures for tests."""
import json
import boto3
from moto import mock_dynamodb2
import pytest
from fixtures import LambdaContextMock
import payloads
@pytest.fixture
def event():
"""Return parsed event."""
with open('tests/payloads/success.json') as json_data:
return json.load(json_data)
@pytest.fixture
def context():
"""Return mock lambda context."""
return LambdaContextMock()
@pytest.fixture
def expected_table():
"""Return dynamodb table name fixture."""
return "test-certbot-ventilator-certificates"
@pytest.fixture
def region():
"""Return AWS region fixture."""
return "us-west-2"
@pytest.fixture
def setup_aws_creds(monkeypatch, region):
"""Set up AWS credential environment vars to make boto3 happy."""
monkeypatch.setenv("AWS_ACCESS_KEY_ID", "testing")
monkeypatch.setenv("AWS_SECRET_ACCESS_KEY", "testing")
monkeypatch.setenv("AWS_SECURITY_TOKEN", "testing")
monkeypatch.setenv("AWS_SESSION_TOKEN", "testing")
monkeypatch.setenv("AWS_DEFAULT_REGION", region)
@pytest.fixture(autouse=True)
def install_mock_dynamodb(setup_aws_creds):
"""Mock out boto3 S3 with moto."""
with mock_dynamodb2():
yield
@pytest.fixture(autouse=True)
def setup_table(install_mock_dynamodb, expected_table):
"""Create a table and populate it with a column value."""
dynamodb = boto3.client("dynamodb")
dynamodb.create_table(
TableName=expected_table,
KeySchema=[
{
'AttributeName': 'subject_alternative_name',
'KeyType': 'HASH'
},
],
AttributeDefinitions=[
{
'AttributeName': 'subject_alternative_name',
'AttributeType': 'S'
},
],
ProvisionedThroughput={
'ReadCapacityUnits': 5,
'WriteCapacityUnits': 5
}
)
item = {
"subject_alternative_name": {"S": "test-domain.pxsys.net"}
}
dynamodb.put_item(TableName=expected_table, Item=item)
| [
"boto3.client",
"moto.mock_dynamodb2",
"json.load",
"fixtures.LambdaContextMock",
"pytest.fixture"
] | [((1052, 1080), 'pytest.fixture', 'pytest.fixture', ([], {'autouse': '(True)'}), '(autouse=True)\n', (1066, 1080), False, 'import pytest\n'), ((1207, 1235), 'pytest.fixture', 'pytest.fixture', ([], {'autouse': '(True)'}), '(autouse=True)\n', (1221, 1235), False, 'import pytest\n'), ((393, 412), 'fixtures.LambdaContextMock', 'LambdaContextMock', ([], {}), '()\n', (410, 412), False, 'from fixtures import LambdaContextMock\n'), ((1369, 1393), 'boto3.client', 'boto3.client', (['"""dynamodb"""'], {}), "('dynamodb')\n", (1381, 1393), False, 'import boto3\n'), ((290, 310), 'json.load', 'json.load', (['json_data'], {}), '(json_data)\n', (299, 310), False, 'import json\n'), ((1173, 1189), 'moto.mock_dynamodb2', 'mock_dynamodb2', ([], {}), '()\n', (1187, 1189), False, 'from moto import mock_dynamodb2\n')] |
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# vim:fenc=utf-8
# <NAME> (<EMAIL>)
import py_trees as pt
import py_trees_ros as ptr
import time
import numpy as np
import rospy
import tf
import actionlib
# from move_base_msgs.msg import MoveBaseAction, MoveBaseGoal
from smarc_msgs.msg import GotoWaypointAction, GotoWaypointGoal
import actionlib_msgs.msg as actionlib_msgs
from geometry_msgs.msg import PointStamped, PoseArray, PoseStamped
from nav_msgs.msg import Path
from std_msgs.msg import Float64, Header, Bool, Empty
from visualization_msgs.msg import MarkerArray
from sensor_msgs.msg import NavSatFix
from std_srvs.srv import SetBool
from imc_ros_bridge.msg import EstimatedState, VehicleState, PlanDB, PlanDBInformation, PlanDBState, PlanControlState, PlanControl, PlanSpecification, Maneuver
import bb_enums
import imc_enums
import common_globals
from mission_plan import MissionPlan
from mission_log import MissionLog
class A_PublishFinalize(pt.behaviour.Behaviour):
def __init__(self, topic):
super(A_PublishFinalize, self).__init__(name="A_PublishFinalize")
self.bb = pt.blackboard.Blackboard()
self.topic = topic
self.last_published_time = None
self.message_object = Empty()
def setup(self, timeout):
self.pub = rospy.Publisher(self.topic, Empty, queue_size=1)
return True
def update(self):
if self.last_published_time is not None:
time_since = time.time() - self.last_published_time
self.feedback_message = "Last pub'd:{:.2f}s ago".format(time_since)
else:
self.feedback_message = "Never published!"
finalized = self.bb.get(bb_enums.MISSION_FINALIZED)
if not finalized:
try:
self.pub.publish(self.message_object)
self.last_published_time = time.time()
self.feedback_message = "Just published"
self.bb.set(bb_enums.MISSION_FINALIZED, True)
return pt.Status.SUCCESS
except:
msg = "Couldn't publish"
rospy.logwarn_throttle(1, msg)
self.feedback_message = msg
return pt.Status.FAILURE
return pt.Status.SUCCESS
class A_ManualMissionLog(pt.behaviour.Behaviour):
def __init__(self):
super(A_ManualMissionLog, self).__init__(name="A_ManualMissionLog")
self.bb = pt.blackboard.Blackboard()
self.started_logs = 0
self.num_saved_logs = 0
def start_new_log(self):
save_location = self.bb.get(bb_enums.MISSION_LOG_FOLDER)
log = MissionLog(mission_plan = None,
save_location = save_location)
self.bb.set(bb_enums.MANUAL_MISSION_LOG_OBJ, log)
rospy.loginfo("Started new manual mission log")
self.started_logs += 1
return log
def update(self):
enabled = self.bb.get(bb_enums.ENABLE_MANUAL_MISSION_LOG)
log = self.bb.get(bb_enums.MANUAL_MISSION_LOG_OBJ)
if not enabled:
# if we have a log, we save it now
# and set it to None, so next time we are
# disabled we dont do anything
if log is not None:
log.save()
self.bb.set(bb_enums.MANUAL_MISSION_LOG_OBJ, None)
self.num_saved_logs += 1
self.feedback_message = "Disabled, {} logs saved".format(self.num_saved_logs)
return pt.Status.SUCCESS
if log is None:
log = self.start_new_log()
# first add the auv pose
world_trans = self.bb.get(bb_enums.WORLD_TRANS)
x,y = world_trans[0], world_trans[1]
z = -self.bb.get(bb_enums.DEPTH)
log.navigation_trace.append((x,y,z))
# then add the raw gps
gps = self.bb.get(bb_enums.RAW_GPS)
if gps is None or gps.status.status == -1: # no fix
gps_utm_point = None
else:
# translate the latlon to utm point using the same service as the mission plan
gps_utm_x, gps_utm_y = mplan.latlon_to_utm(gps.latitude, gps.lonitude)
if gps_utm_x is None:
gps_utm_point = None
log.raw_gps_trace.append(gps_utm_point)
# then add the tree tip and its status
tree_tip = self.bb.get(bb_enums.TREE_TIP_NAME)
tip_status = self.bb.get(bb_enums.TREE_TIP_STATUS)
log.tree_tip_trace.append((tree_tip, tip_status))
self.feedback_message = "Log len:{} of log#{}".format(len(log.navigation_trace), self.started_logs)
return pt.Status.SUCCESS
class A_SaveMissionLog(pt.behaviour.Behaviour):
def __init__(self):
super(A_SaveMissionLog, self).__init__(name="A_SaveMissionLog")
self.bb = pt.blackboard.Blackboard()
self.num_saved_logs = 0
def update(self):
log = self.bb.get(bb_enums.MISSION_LOG_OBJ)
if log is not None:
log.save()
self.num_saved_logs += 1
self.bb.set(bb_enums.MISSION_LOG_OBJ, None)
self.feedback_message = "Saved log #{}!".format(self.num_saved_logs)
else:
self.feedback_message = "#saved logs:{}".format(self.num_saved_logs)
return pt.Status.SUCCESS
class A_UpdateMissionLog(pt.behaviour.Behaviour):
def __init__(self):
super(A_UpdateMissionLog, self).__init__(name="A_UpdateMissionLog")
self.bb = pt.blackboard.Blackboard()
self.started_logs = 0
def start_new_log(self, mplan):
save_location = self.bb.get(bb_enums.MISSION_LOG_FOLDER)
log = MissionLog(mission_plan = mplan,
save_location = save_location)
self.bb.set(bb_enums.MISSION_LOG_OBJ, log)
rospy.loginfo("Started new mission log")
self.started_logs += 1
return log
def update(self):
# only update if there is an unfinalized mission that has been started
mplan = self.bb.get(bb_enums.MISSION_PLAN_OBJ)
if mplan is None:
rospy.loginfo("Mission plan is None, can't make a log of this?")
self.feedback_message = "No mission plan!"
return pt.Status.FAILURE
log = self.bb.get(bb_enums.MISSION_LOG_OBJ)
if log is None:
log = self.start_new_log(mplan)
# check if the mission has changed in the meantime
# this can happen when the user starts a mission, stops it,
# and then starts a different one
# we dont wanna log the incomplete one
# did it change since we last got called?
if log.creation_time != mplan.creation_time:
# it changed!
# re-start a log
log = self.start_new_log(mplan)
# now we got a valid mission plan
# first add the auv pose
world_trans = self.bb.get(bb_enums.WORLD_TRANS)
x,y = world_trans[0], world_trans[1]
z = -self.bb.get(bb_enums.DEPTH)
log.navigation_trace.append((x,y,z))
# then add the raw gps
gps = self.bb.get(bb_enums.RAW_GPS)
if gps is None or gps.status.status == -1: # no fix
gps_utm_point = None
else:
# translate the latlon to utm point using the same service as the mission plan
gps_utm_x, gps_utm_y = mplan.latlon_to_utm(gps.latitude, gps.lonitude)
if gps_utm_x is None:
gps_utm_point = None
log.raw_gps_trace.append(gps_utm_point)
# then add the tree tip and its status
tree_tip = self.bb.get(bb_enums.TREE_TIP_NAME)
tip_status = self.bb.get(bb_enums.TREE_TIP_STATUS)
log.tree_tip_trace.append((tree_tip, tip_status))
self.feedback_message = "Log len:{} of log#{}".format(len(log.navigation_trace), self.started_logs)
return pt.Status.SUCCESS
class A_SetDVLRunning(pt.behaviour.Behaviour):
def __init__(self, dvl_on_off_service_name, running, cooldown):
super(A_SetDVLRunning, self).__init__(name="A_SetDVLRunning")
self.switcher_service = rospy.ServiceProxy(dvl_on_off_service_name,
SetBool)
self.bb = pt.blackboard.Blackboard()
self.sb = SetBool()
self.sb.data = running
self.running = running
self.last_toggle = 0
self.cooldown = cooldown
self.service_name = dvl_on_off_service_name
def update(self):
# try not to call the service every tick...
dvl_is_running = self.bb.get(bb_enums.DVL_IS_RUNNING)
if dvl_is_running is not None:
if dvl_is_running == self.sb.data:
rospy.loginfo_throttle_identical(20, "DVL is already running:"+str(self.sb.data))
return pt.Status.SUCCESS
# check if enough time has passed since last call
t = time.time()
if t - self.last_toggle < self.cooldown:
# nope, return running while we wait
rospy.loginfo_throttle_identical(5, "Waiting on DVL toggle cooldown")
return pt.Status.RUNNING
try:
ret = self.switcher_service(self.running)
except rospy.service.ServiceException:
rospy.logwarn_throttle_identical(60, "DVL Start/stop service not found! Succeeding by default namespace:{}".format(self.service_name))
return pt.Status.SUCCESS
if ret.success:
rospy.loginfo_throttle_identical(5, "DVL TOGGLED:"+str(self.sb.data))
self.last_toggle = time.time()
self.bb.set(bb_enums.DVL_IS_RUNNING, self.sb.data)
return pt.Status.SUCCESS
rospy.logwarn_throttle_identical(5, "DVL COULD NOT BE TOGGLED:{}, ret:{}".format(self.sb.data, ret))
return pt.Status.FAILURE
class A_EmergencySurface(ptr.actions.ActionClient):
def __init__(self, emergency_action_namespace):
"""
What to do when an emergency happens. This should be a very simple
action that is super unlikely to fail, ever. It should also 'just work'
without a goal.
Like surfacing.
"""
self.bb = pt.blackboard.Blackboard()
self.action_goal_handle = None
ptr.actions.ActionClient.__init__(
self,
name="A_EmergencySurface",
action_spec=GotoWaypointAction,
action_goal=None,
action_namespace= emergency_action_namespace,
override_feedback_message_on_running="EMERGENCY SURFACING"
)
self.action_server_ok = False
def setup(self, timeout):
"""
Overwriting the normal ptr action setup to stop it from failiing the setup step
and instead handling this failure in the tree.
"""
self.logger.debug("%s.setup()" % self.__class__.__name__)
self.action_client = actionlib.SimpleActionClient(
self.action_namespace,
self.action_spec
)
if not self.action_client.wait_for_server(rospy.Duration(timeout)):
self.logger.error("{0}.setup() could not connect to the action server at '{1}'".format(self.__class__.__name__, self.action_namespace))
self.action_client = None
self.action_server_ok = False
else:
self.action_server_ok = True
return True
def initialise(self):
if not self.action_server_ok:
rospy.logwarn_throttle_identical(5, "No Action Server found for emergency action, will just block the tree!")
return
self.feedback_message = "EMERGENCY SURFACING"
# construct the message
self.action_goal = GotoWaypointGoal()
self.sent_goal = False
def update(self):
if not self.action_server_ok:
self.feedback_message = "Action Server for emergency action can not be used!"
rospy.logerr_throttle_identical(5,self.feedback_message)
return pt.Status.FAILURE
# if your action client is not valid
if not self.action_client:
self.feedback_message = "ActionClient for emergency action is invalid!"
rospy.logwarn_throttle_identical(5,self.feedback_message)
return pt.Status.FAILURE
# if the action_goal is invalid
if not self.action_goal:
self.feedback_message = "No action_goal!"
rospy.logwarn(self.feedback_message)
return pt.Status.FAILURE
# if goal hasn't been sent yet
if not self.sent_goal:
self.action_goal_handle = self.action_client.send_goal(self.action_goal, feedback_cb=self.feedback_cb)
self.sent_goal = True
rospy.loginfo("Sent goal to action server:"+str(self.action_goal))
self.feedback_message = "Emergency goal sent"
return pt.Status.RUNNING
# if the goal was aborted or preempted
if self.action_client.get_state() in [actionlib_msgs.GoalStatus.ABORTED,
actionlib_msgs.GoalStatus.PREEMPTED]:
self.feedback_message = "Aborted emergency"
rospy.loginfo(self.feedback_message)
return pt.Status.FAILURE
result = self.action_client.get_result()
# if the goal was accomplished
if result:
self.feedback_message = "Completed emergency"
rospy.loginfo(self.feedback_message)
return pt.Status.SUCCESS
# if we're still trying to accomplish the goal
return pt.Status.RUNNING
def feedback_cb(self, msg):
pass
class A_SetNextPlanAction(pt.behaviour.Behaviour):
def __init__(self, do_not_visit=False):
"""
Sets the current plan action to the next one
SUCCESS if it can set it to something that is not None
FAILURE otherwise
if do_not_visit=True, then this action will only get the current wp
and set it and wont actually advance the plan forward.
This is useful for when you want to set the current wp right after
you created a plan.
"""
self.bb = pt.blackboard.Blackboard()
super(A_SetNextPlanAction, self).__init__('A_SetNextPlanAction')
self.do_not_visit = do_not_visit
def update(self):
mission_plan = self.bb.get(bb_enums.MISSION_PLAN_OBJ)
if mission_plan is None:
rospy.logwarn_throttle(5, "Mission plan was None!")
return pt.Status.FAILURE
if not self.do_not_visit:
mission_plan.visit_wp()
next_action = mission_plan.get_current_wp()
if next_action is None:
self.feedback_message = "Next action was None"
rospy.logwarn_throttle(5, "Mission is complete:{}".format(mission_plan.is_complete()))
return pt.Status.FAILURE
rospy.loginfo_throttle_identical(5, "Set CURRENT_PLAN_ACTION {} to: {}".format(self.do_not_visit, str(next_action)))
self.bb.set(bb_enums.CURRENT_PLAN_ACTION, next_action)
return pt.Status.SUCCESS
class A_GotoWaypoint(ptr.actions.ActionClient):
def __init__(self,
action_namespace,
goal_tf_frame = 'utm',
node_name = "A_GotoWaypoint"):
"""
Runs an action server that will move the robot to the given waypoint
"""
self.bb = pt.blackboard.Blackboard()
self.node_name = node_name
list_of_maneuvers = self.bb.get(bb_enums.MANEUVER_ACTIONS)
if list_of_maneuvers is None:
list_of_maneuvers = [self.node_name]
else:
list_of_maneuvers.append(self.node_name)
self.bb.set(bb_enums.MANEUVER_ACTIONS, list_of_maneuvers)
self.action_goal_handle = None
# become action client
ptr.actions.ActionClient.__init__(
self,
name = self.node_name,
action_spec = GotoWaypointAction,
action_goal = None,
action_namespace = action_namespace,
override_feedback_message_on_running = "Moving to waypoint"
)
self.action_server_ok = False
self.goal_tf_frame = goal_tf_frame
def setup(self, timeout):
"""
Overwriting the normal ptr action setup to stop it from failiing the setup step
and instead handling this failure in the tree.
"""
self.logger.debug("%s.setup()" % self.__class__.__name__)
self.action_client = actionlib.SimpleActionClient(
self.action_namespace,
self.action_spec
)
if not self.action_client.wait_for_server(rospy.Duration(timeout)):
self.logger.error("{0}.setup() could not connect to the action server at '{1}'".format(self.__class__.__name__, self.action_namespace))
self.action_client = None
else:
self.action_server_ok = True
return True
def initialise(self):
if not self.action_server_ok:
rospy.logwarn_throttle(5, "No action server found for A_GotoWaypoint!")
return
mission_plan = self.bb.get(bb_enums.MISSION_PLAN_OBJ)
if mission_plan is None:
rospy.logwarn("No mission plan found!")
return
wp = mission_plan.get_current_wp()
if wp is None:
rospy.loginfo("No wp found to execute! Does the plan have any waypoints that we understand?")
return
if wp.tf_frame != self.goal_tf_frame:
rospy.logerr_throttle(5, 'The frame of the waypoint({0}) does not match the expected frame({1}) of the action client!'.format(frame, self.goal_tf_frame))
return
if wp.maneuver_id != imc_enums.MANEUVER_GOTO:
rospy.loginfo("THIS IS A GOTO MANEUVER, WE ARE USING IT FOR SOMETHING ELSE")
# get the goal tolerance as a dynamic variable from the bb
goal_tolerance = self.bb.get(bb_enums.WAYPOINT_TOLERANCE)
# construct the message
goal = GotoWaypointGoal()
goal.waypoint_pose.pose.position.x = wp.x
goal.waypoint_pose.pose.position.y = wp.y
goal.goal_tolerance = goal_tolerance
# 0=None, 1=Depth, 2=Altitude in the action
# thankfully these are the same in IMC and in the Action
# but Action doesnt have 'height'
if wp.z_unit == imc_enums.Z_HEIGHT:
wp.z_unit = imc_enums.Z_NONE
goal.z_control_mode = wp.z_unit
goal.travel_depth = wp.z
# 0=None, 1=RPM, 2=speed in the action
# 0=speed, 1=rpm, 2=percentage in IMC
if wp.speed_unit == imc_enums.SPEED_UNIT_RPM:
goal.speed_control_mode = GotoWaypointGoal.SPEED_CONTROL_RPM
goal.travel_rpm = wp.speed
elif wp.speed_unit == imc_enums.SPEED_UNIT_MPS:
goal.speed_control_mode = GotoWaypointGoal.SPEED_CONTROL_SPEED
goal.travel_speed = wp.speed
else:
goal.speed_control_mode = GotoWaypointGoal.SPEED_CONTROL_NONE
rospy.logwarn_throttle(1, "Speed control of the waypoint action is NONE!")
self.action_goal = goal
rospy.loginfo(">>> Goto waypoint action goal initialized:"+str(goal))
# ensure that we still need to send the goal
self.sent_goal = False
def update(self):
"""
Check only to see whether the underlying action server has
succeeded, is running, or has cancelled/aborted for some reason and
map these to the usual behaviour return states.
"""
if not self.action_server_ok:
self.feedback_message = "Action Server for gotowp action can not be used!"
rospy.logerr_throttle_identical(5,self.feedback_message)
return pt.Status.FAILURE
# if your action client is not valid
if not self.action_client:
self.feedback_message = "ActionClient is invalid! Client:"+str(self.action_client)
rospy.logerr(self.feedback_message)
return pt.Status.FAILURE
# if the action_goal is invalid
if not self.action_goal:
self.feedback_message = "No action_goal!"
rospy.logwarn(self.feedback_message)
return pt.Status.FAILURE
# if goal hasn't been sent yet
if not self.sent_goal:
self.action_goal_handle = self.action_client.send_goal(self.action_goal, feedback_cb=self.feedback_cb)
self.sent_goal = True
rospy.loginfo("Sent goal to action server:"+str(self.action_goal))
self.feedback_message = "Goal sent"
return pt.Status.RUNNING
# if the goal was aborted or preempted
if self.action_client.get_state() in [actionlib_msgs.GoalStatus.ABORTED,
actionlib_msgs.GoalStatus.PREEMPTED]:
self.feedback_message = "Aborted goal"
rospy.loginfo(self.feedback_message)
return pt.Status.FAILURE
result = self.action_client.get_result()
# if the goal was accomplished
if result is not None and result.reached_waypoint:
self.feedback_message = "Completed goal"
rospy.loginfo(self.feedback_message)
return pt.Status.SUCCESS
return pt.Status.RUNNING
def feedback_cb(self, msg):
fb = str(msg.ETA)
self.feedback_message = "ETA:"+fb
rospy.loginfo_throttle(5, fb)
class A_UpdateTF(pt.behaviour.Behaviour):
def __init__(self, utm_link, base_link):
"""
reads the current translation and orientation from the TF tree
and puts that into the BB
utm_link and base_link are tf link names where utm_link is essentially the world coordinates.
check the neptus-related actions too for more info on utm_link
"""
super(A_UpdateTF, self).__init__("A_UpdateTF")
self.bb = pt.blackboard.Blackboard()
self.utm_link = utm_link
self.base_link = base_link
self.listener = tf.TransformListener()
self.tf_ok = False
self.last_read_time = None
def setup(self, timeout):
try:
rospy.loginfo_throttle(3, "Waiting for transform from {} to {}...".format(self.utm_link, self.base_link))
self.listener.waitForTransform(self.utm_link, self.base_link, rospy.Time(), rospy.Duration(timeout))
rospy.loginfo_throttle(3, "...Got it")
self.tf_ok = True
except:
rospy.logerr_throttle(5, "Could not find from "+self.utm_link+" to "+self.base_link + "... Nothing except safety will be run")
return True
def update(self):
if self.last_read_time is not None:
time_since = time.time() - self.last_read_time
self.feedback_message = "Last read:{:.2f}s ago".format(time_since)
else:
self.feedback_message = "No msg received ever"
try:
(world_trans, world_rot) = self.listener.lookupTransform(self.utm_link,
self.base_link,
rospy.Time(0))
self.last_read_time = time.time()
except (tf.LookupException, tf.ConnectivityException):
rospy.logerr_throttle_identical(5, "Could not get transform between {} and {}".format(self.utm_link, self.base_link))
return pt.Status.FAILURE
except:
rospy.logerr_throttle_identical(5, "Could not do tf lookup for some other reason")
return pt.Status.FAILURE
self.bb.set(bb_enums.WORLD_TRANS, world_trans)
self.bb.set(bb_enums.WORLD_ROT, world_rot)
# also create this pointstamped object so that we can transform this
# easily to w/e other frame is needed later
ps = PointStamped()
ps.header.frame_id = self.utm_link
ps.header.stamp = rospy.Time(0)
ps.point.x = world_trans[0]
ps.point.y = world_trans[1]
ps.point.z = world_trans[2]
self.bb.set(bb_enums.LOCATION_POINT_STAMPED, ps)
# the Z component is UP, so invert to get "depth"
self.bb.set(bb_enums.DEPTH, -world_trans[2])
return pt.Status.SUCCESS
class A_UpdateNeptusPlanControl(pt.behaviour.Behaviour):
def __init__(self, plan_control_topic):
super(A_UpdateNeptusPlanControl, self).__init__("A_UpdateNeptusPlanControl")
self.bb = pt.blackboard.Blackboard()
self.plan_control_msg = None
self.plan_control_topic = plan_control_topic
self.sub = None
def setup(self, timeout):
self.sub = rospy.Subscriber(self.plan_control_topic, PlanControl, self.plancontrol_cb)
return True
def plancontrol_cb(self, plan_control_msg):
# rospy.loginfo("plancontrol_cb {}".format(plan_control_msg))
self.plan_control_msg = plan_control_msg
def update(self):
plan_control_msg = self.plan_control_msg
if plan_control_msg is None:
# not receiving anything is ok.
return pt.Status.SUCCESS
# check if this message is a 'go' or 'no go' message
# imc/plan_control(569):
# int type:[0,1,2,3] req,suc,fail,in prog
# int op:[0,1,2,3] start, stop, load, get
# int request_id
# string plan_id
# int flags
# string info
# the start button in neptus sends:
# type:0 op:0 plan_id:"string" flags:1
# stop button sends:
# type:0 op:1 plan_id:'' flags:1
# teleop button sends:
# type:0 op:0 plan_id:"teleoperation-mode" flags:0
typee = plan_control_msg.type
op = plan_control_msg.op
plan_id = plan_control_msg.plan_id
flags = plan_control_msg.flags
# somehow this happens...
if plan_id is None:
plan_id=''
# separate well-defined ifs for possible future shenanigans.
if typee==0 and op==0 and plan_id!='' and flags==1:
# start button
# check if the start was given for our current plan
current_mission_plan = self.bb.get(bb_enums.MISSION_PLAN_OBJ)
self.bb.set(bb_enums.PLAN_IS_GO, True)
self.bb.set(bb_enums.ENABLE_AUTONOMY, False)
if current_mission_plan is not None and plan_id == current_mission_plan.plan_id:
rospy.loginfo("Started plan:{}".format(plan_id))
else:
if current_mission_plan is None:
rospy.logwarn("Start given for plan:{} but we don't have a plan!".format(plan_id))
else:
rospy.logwarn("Start given for plan:{} our plan:{}".format(plan_id, current_mission_plan.plan_id))
if typee==0 and op==1 and plan_id=='' and flags==1:
# stop button
self.bb.set(bb_enums.PLAN_IS_GO, False)
self.bb.set(bb_enums.ENABLE_AUTONOMY, False)
# this string is hardcoded in Neptus, so we hardcode it here too!
if typee==0 and op==0 and plan_id=='teleoperation-mode' and flags==0:
# teleop button
self.bb.set(bb_enums.ENABLE_AUTONOMY, True)
rospy.logwarn_throttle_identical(10, "AUTONOMOUS MODE")
# reset it until next message
self.plan_control_msg = None
return pt.Status.SUCCESS
class A_UpdateNeptusEstimatedState(pt.behaviour.Behaviour):
def __init__(self,
estimated_state_topic,
gps_fix_topic,
gps_nav_data_topic):
super(A_UpdateNeptusEstimatedState, self).__init__("A_UpdateNeptusEstimatedState")
self.bb = pt.blackboard.Blackboard()
self.estimated_state_pub = None
self.estimated_state_topic = estimated_state_topic
self.e_state = EstimatedState()
self.gps_fix_pub = None
self.gps_fix_topic = gps_fix_topic
self.gps_nav_data_pub = None
self.gps_nav_data_topic = gps_nav_data_topic
self.gps_fix = NavSatFix()
def setup(self, timeout):
self.estimated_state_pub = rospy.Publisher(self.estimated_state_topic, EstimatedState, queue_size=1)
self.gps_fix_pub = rospy.Publisher(self.gps_fix_topic, NavSatFix, queue_size=1)
self.gps_nav_data_pub = rospy.Publisher(self.gps_nav_data_topic, NavSatFix, queue_size=1)
return True
def update(self):
lat = self.bb.get(bb_enums.CURRENT_LATITUDE)
lon = self.bb.get(bb_enums.CURRENT_LONGITUDE)
depth = self.bb.get(bb_enums.DEPTH)
world_rot = self.bb.get(bb_enums.WORLD_ROT)
if depth is None:
reason = "depth was None, using 0"
self.feedback_message = reason
depth = 0
if lat is None or lon is None or world_rot is None:
rospy.logwarn_throttle_identical(10, "Could not update neptus estimated state because lat/lon/world_rot was None!")
return pt.Status.SUCCESS
# construct message for neptus
self.e_state.lat = np.radians(lat)
self.e_state.lon= np.radians(lon)
self.e_state.depth = depth
roll, pitch, yaw = tf.transformations.euler_from_quaternion(world_rot)
self.e_state.psi = np.pi/2. - yaw
# send the message to neptus
self.estimated_state_pub.publish(self.e_state)
# same thing with gps fix
# the bridge only looks at lat lon height=altitude
self.gps_fix.latitude = lat
self.gps_fix.longitude = lon
self.gps_fix.altitude = -depth
self.gps_fix.header.seq = int(time.time())
self.gps_fix_pub.publish(self.gps_fix)
self.gps_nav_data_pub.publish(self.gps_fix)
return pt.Status.SUCCESS
class A_UpdateNeptusPlanControlState(pt.behaviour.Behaviour):
def __init__(self, plan_control_state_topic):
super(A_UpdateNeptusPlanControlState, self).__init__("A_UpdateNeptusPlanControlState")
self.bb = pt.blackboard.Blackboard()
self.plan_control_state_pub = None
self.plan_control_state_topic = plan_control_state_topic
def setup(self, timeout):
self.plan_control_state_pub = rospy.Publisher(self.plan_control_state_topic, PlanControlState, queue_size=1)
return True
def update(self):
# construct current progress message for neptus
msg = PlanControlState()
tip_name = self.bb.get(bb_enums.TREE_TIP_NAME)
tip_status = self.bb.get(bb_enums.TREE_TIP_STATUS)
# this tip_status looks like: "Status.FAILURE"
# I just wanna get the first letter after dot.
msg.man_id = tip_name+'('+tip_status[7]+')'
mission_plan = self.bb.get(bb_enums.MISSION_PLAN_OBJ)
if mission_plan is None:
msg.plan_id = 'No plan'
msg.plan_progress = 100.0
elif mission_plan.is_complete():
msg.plan_id = 'Mission complete'
msg.plan_progress = 100.0
else:
current_wp_index = mission_plan.current_wp_index
current_man_id = mission_plan.waypoint_man_ids[current_wp_index]
total = len(mission_plan.waypoints)
msg.plan_id = str(mission_plan.plan_id)
if self.bb.get(bb_enums.PLAN_IS_GO):
msg.man_id = current_man_id
plan_progress = (current_wp_index * 100.0) / total # percent float
msg.plan_progress = plan_progress
if tip_name in imc_enums.EXECUTING_ACTION_NAMES:
msg.state = imc_enums.STATE_EXECUTING
elif tip_name in imc_enums.BLOCKED_ACTION_NAMES:
msg.state = imc_enums.STATE_BLOCKED
msg.plan_id = 'SAFETY FALLBACK'
msg.man_id = 'EMERGENCY'
msg.plan_progress = 0.0
else:
msg.state = imc_enums.STATE_READY
if self.bb.get(bb_enums.ENABLE_AUTONOMY):
msg.plan_id += '(AUTONOMOUS)'
# send message to neptus
self.plan_control_state_pub.publish(msg)
return pt.Status.SUCCESS
class A_UpdateNeptusVehicleState(pt.behaviour.Behaviour):
def __init__(self, vehicle_state_topic):
super(A_UpdateNeptusVehicleState, self).__init__("A_UpdateNeptusVehicleState")
self.bb = pt.blackboard.Blackboard()
self.vehicle_state_pub = None
self.vehicle_state_topic = vehicle_state_topic
def setup(self, timeout):
self.vehicle_state_pub = rospy.Publisher(self.vehicle_state_topic, VehicleState, queue_size=1)
return True
def update(self):
"""
this is the message that makes SAM:DISCONNECTED better.
"""
vs = VehicleState()
tip_name = self.bb.get(bb_enums.TREE_TIP_NAME)
if tip_name in imc_enums.EXECUTING_ACTION_NAMES:
vs.op_mode = imc_enums.OP_MODE_MANEUVER
elif tip_name == 'A_EmergencySurface':
vs.op_mode = imc_enums.OP_MODE_ERROR
else:
vs.op_mode = imc_enums.OP_MODE_SERVICE
self.vehicle_state_pub.publish(vs)
return pt.Status.SUCCESS
class A_UpdateNeptusPlanDB(pt.behaviour.Behaviour):
def __init__(self,
plandb_topic,
utm_link,
local_link,
latlontoutm_service_name,
latlontoutm_service_name_alternative):
super(A_UpdateNeptusPlanDB, self).__init__("A_UpdateNeptusPlanDB")
self.bb = pt.blackboard.Blackboard()
# neptus sends lat/lon, which we convert to utm, which we then convert to local
self.utm_link = utm_link
self.local_link = local_link
self.latlontoutm_service_name = latlontoutm_service_name
self.latlontoutm_service_name_alternative = latlontoutm_service_name_alternative
# the message body is largely the same, so we can re-use most of it
self.plandb_msg = PlanDB()
self.plandb_msg.type = imc_enums.PLANDB_TYPE_SUCCESS
self.plandb_msg.op = imc_enums.PLANDB_OP_SET
self.plandb_pub = None
self.plandb_sub = None
self.latest_plandb_msg = None
self.plandb_topic = plandb_topic
def setup(self, timeout):
self.plandb_pub = rospy.Publisher(self.plandb_topic, PlanDB, queue_size=1)
self.plandb_sub = rospy.Subscriber(self.plandb_topic, PlanDB, callback=self.plandb_cb, queue_size=1)
return True
def plandb_cb(self, plandb_msg):
"""
as an answer to OUR answer of 'type=succes, op=set', neptus sends a 'type=request, op=get_info'.
"""
# rospy.loginfo("plandb_db {}".format(plandb_msg))
self.latest_plandb_msg = plandb_msg
def make_plandb_info(self):
current_mission_plan = self.bb.get(bb_enums.MISSION_PLAN_OBJ)
plan_info = PlanDBInformation()
plan_info.plan_id = current_mission_plan.plan_id
plan_info.md5 = current_mission_plan.plandb_msg.plan_spec_md5
plan_info.change_time = current_mission_plan.creation_time/1000.0
return plan_info
def handle_request_get_info(self, plandb_msg):
# we need to respond to this with some info... but what?
rospy.loginfo_throttle_identical(30, "Got REQUEST GET_INFO planDB msg from Neptus")
current_mission_plan = self.bb.get(bb_enums.MISSION_PLAN_OBJ)
if current_mission_plan is None:
return
response = PlanDB()
response.plan_id = current_mission_plan.plan_id
response.type = imc_enums.PLANDB_TYPE_SUCCESS
response.op = imc_enums.PLANDB_OP_GET_INFO
response.plandb_information = self.make_plandb_info()
self.plandb_pub.publish(response)
rospy.loginfo_throttle_identical(30, "Answered GET_INFO for plan:"+str(response.plan_id))
def handle_request_get_state(self, plandb_msg):
rospy.loginfo_throttle_identical(30, "Got REQUEST GET_STATE planDB msg from Neptus")
current_mission_plan = self.bb.get(bb_enums.MISSION_PLAN_OBJ)
if current_mission_plan is None:
return
# https://github.com/LSTS/imcjava/blob/d95fddeab4c439e603cf5e30a32979ad7ace5fbc/src/java/pt/lsts/imc/adapter/PlanDbManager.java#L160
# See above for an example
# TODO it seems like we need to keep a planDB ourselves on this side, collect all the plans we
# received and answer this get_state with data from them all.
# lets try telling neptus that we just got one plan, maybe that'll be okay?
# seems alright, but after this message is sent, the plan goes red :/
response = PlanDB()
response.plan_id = current_mission_plan.plan_id
response.type = imc_enums.PLANDB_TYPE_SUCCESS
response.op = imc_enums.PLANDB_OP_GET_STATE
response.plandb_state = PlanDBState()
response.plandb_state.plan_count = 1
response.plandb_state.plans_info.append(self.make_plandb_info())
self.plandb_pub.publish(response)
rospy.loginfo_throttle_identical(30, "Answered GET_STATE for plan:\n"+str(response.plan_id))
def handle_set_plan(self, plandb_msg):
# there is a plan we can at least look at
mission_plan = MissionPlan(plan_frame = self.utm_link,
plandb_msg = plandb_msg,
latlontoutm_service_name = self.latlontoutm_service_name,
latlontoutm_service_name_alternative = self.latlontoutm_service_name_alternative,
coverage_swath = self.bb.get(bb_enums.SWATH),
vehicle_localization_error_growth = self.bb.get(bb_enums.LOCALIZATION_ERROR_GROWTH))
if mission_plan.no_service:
self.feedback_message = "MISSION PLAN HAS NO SERVICE"
rospy.logerr(self.feedback_message)
return
self.bb.set(bb_enums.MISSION_PLAN_OBJ, mission_plan)
self.bb.set(bb_enums.ENABLE_AUTONOMY, False)
self.bb.set(bb_enums.MISSION_FINALIZED, False)
self.bb.set(bb_enums.PLAN_IS_GO, False)
rospy.loginfo_throttle_identical(5, "Set the mission plan to:{} and un-finalized the mission.".format(mission_plan))
def handle_plandb_msg(self):
plandb_msg = self.latest_plandb_msg
if plandb_msg is None:
return
typee = plandb_msg.type
op = plandb_msg.op
# request get_info
if typee == imc_enums.PLANDB_TYPE_REQUEST and op == imc_enums.PLANDB_OP_GET_INFO:
self.handle_request_get_info(plandb_msg)
elif typee == imc_enums.PLANDB_TYPE_REQUEST and op == imc_enums.PLANDB_OP_GET_STATE:
self.handle_request_get_state(plandb_msg)
elif typee == imc_enums.PLANDB_TYPE_SUCCESS and op == imc_enums.PLANDB_OP_SET:
self.feedback_message = "Got SUCCESS for plandb set"
elif typee == imc_enums.PLANDB_TYPE_SUCCESS and op == imc_enums.PLANDB_OP_GET_INFO:
self.feedback_message = "Got SUCCESS for plandb get info"
elif typee == imc_enums.PLANDB_TYPE_SUCCESS and op == imc_enums.PLANDB_OP_GET_STATE:
self.feedback_message = "Got SUCCESS for plandb get state"
elif op == imc_enums.PLANDB_OP_SET:
self.handle_set_plan(plandb_msg)
else:
self.feedback_message = "Got some unhandled planDB message:\n"+str(plandb_msg)
def respond_set_success(self):
current_mission_plan = self.bb.get(bb_enums.MISSION_PLAN_OBJ)
if current_mission_plan is None:
self.feedback_message = "No mission plan obj!"
return
plan_id = current_mission_plan.plan_id
self.plandb_msg.plan_id = plan_id
self.plandb_pub.publish(self.plandb_msg)
self.feedback_message = "Answered set success for plan_id:"+str(plan_id)
def update(self):
# we just want to tell neptus we got the plan all the time
# this keeps the thingy green
self.respond_set_success()
self.handle_plandb_msg()
# reset
self.latest_plandb_msg = None
return pt.Status.SUCCESS
class A_UpdateMissonForPOI(pt.behaviour.Behaviour):
"""
creates a new diamond-shaped mission over a detected POI
and sets that as the current mission plan.
always returns SUCCESS
"""
def __init__(self, utm_link, poi_link, latlontoutm_service_name):
super(A_UpdateMissonForPOI, self).__init__(name="A_UpdateMissonForPOI")
self.bb = pt.blackboard.Blackboard()
self.utm_link = utm_link
self.poi_link = poi_link
self.tf_listener = tf.TransformListener()
self.latlontoutm_service_name = latlontoutm_service_name
self.poi_link_available = False
def setup(self, timeout):
try:
rospy.loginfo_throttle(3, "Waiting for transform from {} to {}...".format(self.poi_link, self.utm_link))
self.tf_listener.waitForTransform(self.poi_link, self.utm_link, rospy.Time(), rospy.Duration(timeout))
rospy.loginfo_throttle(3, "...Got it")
self.poi_link_available = True
except:
rospy.logerr_throttle(5, "Could not find tf from:"+self.poi_link+" to:"+self.utm_link+" disabling updates")
return True
def update(self):
#XXX UNTESTED STUFF HERE, RETURN FAILURE TO KEEP PPL
#XXX FROM USING THIS ACTION
return pt.Status.FAILURE
if not self.poi_link_available:
return pt.Status.FAILURE
poi = self.bb.get(bb_enums.POI_POINT_STAMPED)
if poi is None:
return pt.Status.SUCCESS
poi_local = self.tf_listener.transformPoint(self.utm_link, poi)
x = poi_local.point.x
y = poi_local.point.y
depth = poi.point.z
# construct the waypoints that we want to go to
inspection_depth = max(1, depth - 5)
radius = 10
# go east,west,north,south,center
# so we do bunch of fly-overs
waypoints = [
(x+radius, y, inspection_depth),
(x-radius, y, inspection_depth),
(x, y+radius, inspection_depth),
(x, y-radius, inspection_depth),
(x, y, 0)
]
waypoint_man_ids = ['east', 'west', 'north', 'south', 'surface_center']
# construct a planDB message to be given to the mission_plan
# we will not fill the plan_spec of this plandb message,
# and instead call a different constructor of MissionPlan
# to bypass the lat/lon stuff
pdb = PlanDB()
pdb.request_id = 42
pdb.plan_id = "POI"
# set it in the tree
mission_plan = MissionPlan(plan_frame = self.utm_link,
plandb_msg = pdb,
waypoints = waypoints,
waypoint_man_ids=waypoint_man_ids,
latlontoutm_service_name = self.latlontoutm_service_name)
self.bb.set(bb_enums.MISSION_PLAN_OBJ, mission_plan)
rospy.loginfo_throttle_identical(5, "Due to POI, set the mission plan to:"+str(mission_plan))
return pt.Status.SUCCESS
class A_VizPublishPlan(pt.behaviour.Behaviour):
"""
Publishes the current plans waypoints as a PoseArray
"""
def __init__(self, plan_viz_topic):
super(A_VizPublishPlan, self).__init__(name="A_VizPublishPlan")
self.bb = pt.blackboard.Blackboard()
self.pa_pub = None
self.plan_viz_topic = plan_viz_topic
def setup(self, timeout):
self.pa_pub = rospy.Publisher(self.plan_viz_topic, PoseArray, queue_size=1)
return True
def update(self):
mission = self.bb.get(bb_enums.MISSION_PLAN_OBJ)
if mission is not None:
pa = mission.get_pose_array(flip_z=True)
else:
pa = PoseArray()
self.pa_pub.publish(pa)
return pt.Status.SUCCESS
class A_FollowLeader(ptr.actions.ActionClient):
def __init__(self,
action_namespace,
leader_link):
"""
Runs an action server that will move the robot towards another tf link
"""
self.bb = pt.blackboard.Blackboard()
list_of_maneuvers = self.bb.get(bb_enums.MANEUVER_ACTIONS)
if list_of_maneuvers is None:
list_of_maneuvers = ["A_FollowLeader"]
else:
list_of_maneuvers.append("A_FollowLeader")
self.bb.set(bb_enums.MANEUVER_ACTIONS, list_of_maneuvers)
self.action_goal_handle = None
self.leader_link = leader_link
# become action client
ptr.actions.ActionClient.__init__(
self,
name="A_FollowLeader",
action_spec=GotoWaypointAction,
action_goal=None,
action_namespace = action_namespace,
override_feedback_message_on_running="Moving towards"+str(leader_link)
)
self.action_server_ok = False
def setup(self, timeout):
"""
Overwriting the normal ptr action setup to stop it from failiing the setup step
and instead handling this failure in the tree.
"""
self.logger.debug("%s.setup()" % self.__class__.__name__)
self.action_client = actionlib.SimpleActionClient(
self.action_namespace,
self.action_spec
)
if not self.action_client.wait_for_server(rospy.Duration(timeout)):
self.logger.error("{0}.setup() could not connect to the action server at '{1}'".format(self.__class__.__name__, self.action_namespace))
self.action_client = None
else:
self.action_server_ok = True
return True
def initialise(self):
# construct the message
self.action_goal = GotoWaypointGoal()
# leave 0,0,0 because we want to go to the frame's center
self.action_goal.target_pose.header.frame_id = self.leader_link
rospy.loginfo("Follow action goal initialized")
# ensure that we still need to send the goal
self.sent_goal = False
def update(self):
"""
Check only to see whether the underlying action server has
succeeded, is running, or has cancelled/aborted for some reason and
map these to the usual behaviour return states.
"""
if not self.action_server_ok:
self.feedback_message = "Action Server for follow leader action can not be used!"
rospy.logerr_throttle_identical(5,self.feedback_message)
return pt.Status.FAILURE
# if your action client is not valid
if not self.action_client:
self.feedback_message = "ActionClient is invalid! Client:"+str(self.action_client)
rospy.logerr(self.feedback_message)
return pt.Status.FAILURE
# if the action_goal is invalid
if not self.action_goal:
self.feedback_message = "No action_goal!"
rospy.logwarn(self.feedback_message)
return pt.Status.FAILURE
# if goal hasn't been sent yet
if not self.sent_goal:
self.action_goal_handle = self.action_client.send_goal(self.action_goal, feedback_cb=self.feedback_cb)
self.sent_goal = True
rospy.loginfo("Sent goal to action server:"+str(self.action_goal))
self.feedback_message = "Goal sent"
return pt.Status.RUNNING
# if the goal was aborted or preempted
if self.action_client.get_state() in [actionlib_msgs.GoalStatus.ABORTED,
actionlib_msgs.GoalStatus.PREEMPTED]:
self.feedback_message = "Aborted goal"
rospy.loginfo(self.feedback_message)
return pt.Status.FAILURE
result = self.action_client.get_result()
# if the goal was accomplished
if result:
self.feedback_message = "Completed goal"
rospy.loginfo(self.feedback_message)
return pt.Status.SUCCESS
return pt.Status.RUNNING
def feedback_cb(self, msg):
pass
class A_ReadBuoys(pt.behaviour.Behaviour):
'''
This action reads the uncertain positions
(mean and covariance) of buoys from the rostopic.
'''
def __init__(
self,
topic_name,
buoy_link,
utm_link,
latlon_utm_serv,
):
# rostopic name and type (e.g. marker array)
self.topic_name = topic_name
# frame IDs for TF
self.buoy_link = buoy_link
self.utm_link = utm_link
# lat/lon to utm service
self.latlon_utm_serv = latlon_utm_serv
# blackboard for info
self.bb = pt.blackboard.Blackboard()
# become a behaviour
pt.behaviour.Behaviour.__init__(
self,
name="A_ReadBuoys"
)
# for coordinate frame transformations
self.tf_listener = tf.TransformListener()
def setup(self, timeout):
# wait for TF transformation
try:
rospy.loginfo('Waiting for transform from {} to {}.'.format(
self.buoy_link,
self.utm_link
))
self.tf_listener.waitForTransform(
self.buoy_link,
self.utm_link,
rospy.Time(),
rospy.Duration(timeout)
)
except:
rospy.loginfo('Transform from {} to {} not found.'.format(
self.buoy_link,
self.utm_link
))
# subscribe to buoy positions
self.sub = rospy.Subscriber(
self.topic_name,
MarkerArray,
callback=self.cb,
queue_size=10
)
# self.bb.set(bb_enums.BUOYS, None)
self.buoys = None
return True
def cb(self, msg):
'''
This will read the uncertain buoy positions
from the SLAM backend and sensors.
But, for now, it just read the simulator buoys.
The buoys here are assumed to be in the map frame.
'''
# space for bouy positions
# rospy.loginfo('hello')
self.buoys = list()
# loop through visualization markers
for marker in msg.markers:
# convert their pose to pose stamped
pose = PoseStamped(
header=marker.header,
pose=marker.pose
)
# # transform it from local to UTM frame
# pose = self.tf_listener.transformPose(
# self.utm_link,
# pose
# )
# add it to the list
self.buoys.append([
pose.pose.position.x,
pose.pose.position.y,
pose.pose.position.z
])
# make it into a numpy array because why not
self.buoys = np.array(self.buoys)
self.buoys = self.buoys[np.argsort(self.buoys[:,0])]
self.buoys = self.buoys.reshape((-1, 3, 3))
self.buoys = np.sort(self.buoys, axis=1)
self.buoys = dict(
front=self.buoys[:,0,:],
left=self.buoys[0,:,:],
back=self.buoys[:,-1,:],
right=self.buoys[-1,:,:],
all=self.buoys
)
def update(self):
# put the buoy positions in the blackboard
self.bb.set(bb_enums.BUOYS, self.buoys)
return pt.Status.SUCCESS
| [
"std_srvs.srv.SetBool",
"numpy.radians",
"rospy.logerr",
"rospy.logwarn",
"imc_ros_bridge.msg.EstimatedState",
"numpy.argsort",
"py_trees.behaviour.Behaviour.__init__",
"numpy.array",
"rospy.logwarn_throttle",
"tf.TransformListener",
"imc_ros_bridge.msg.PlanDBState",
"imc_ros_bridge.msg.PlanDB... | [((1112, 1138), 'py_trees.blackboard.Blackboard', 'pt.blackboard.Blackboard', ([], {}), '()\n', (1136, 1138), True, 'import py_trees as pt\n'), ((1238, 1245), 'std_msgs.msg.Empty', 'Empty', ([], {}), '()\n', (1243, 1245), False, 'from std_msgs.msg import Float64, Header, Bool, Empty\n'), ((1297, 1345), 'rospy.Publisher', 'rospy.Publisher', (['self.topic', 'Empty'], {'queue_size': '(1)'}), '(self.topic, Empty, queue_size=1)\n', (1312, 1345), False, 'import rospy\n'), ((2425, 2451), 'py_trees.blackboard.Blackboard', 'pt.blackboard.Blackboard', ([], {}), '()\n', (2449, 2451), True, 'import py_trees as pt\n'), ((2623, 2681), 'mission_log.MissionLog', 'MissionLog', ([], {'mission_plan': 'None', 'save_location': 'save_location'}), '(mission_plan=None, save_location=save_location)\n', (2633, 2681), False, 'from mission_log import MissionLog\n'), ((2777, 2824), 'rospy.loginfo', 'rospy.loginfo', (['"""Started new manual mission log"""'], {}), "('Started new manual mission log')\n", (2790, 2824), False, 'import rospy\n'), ((4780, 4806), 'py_trees.blackboard.Blackboard', 'pt.blackboard.Blackboard', ([], {}), '()\n', (4804, 4806), True, 'import py_trees as pt\n'), ((5439, 5465), 'py_trees.blackboard.Blackboard', 'pt.blackboard.Blackboard', ([], {}), '()\n', (5463, 5465), True, 'import py_trees as pt\n'), ((5613, 5672), 'mission_log.MissionLog', 'MissionLog', ([], {'mission_plan': 'mplan', 'save_location': 'save_location'}), '(mission_plan=mplan, save_location=save_location)\n', (5623, 5672), False, 'from mission_log import MissionLog\n'), ((5761, 5801), 'rospy.loginfo', 'rospy.loginfo', (['"""Started new mission log"""'], {}), "('Started new mission log')\n", (5774, 5801), False, 'import rospy\n'), ((8074, 8126), 'rospy.ServiceProxy', 'rospy.ServiceProxy', (['dvl_on_off_service_name', 'SetBool'], {}), '(dvl_on_off_service_name, SetBool)\n', (8092, 8126), False, 'import rospy\n'), ((8196, 8222), 'py_trees.blackboard.Blackboard', 'pt.blackboard.Blackboard', ([], {}), '()\n', (8220, 8222), True, 'import py_trees as pt\n'), ((8242, 8251), 'std_srvs.srv.SetBool', 'SetBool', ([], {}), '()\n', (8249, 8251), False, 'from std_srvs.srv import SetBool\n'), ((8863, 8874), 'time.time', 'time.time', ([], {}), '()\n', (8872, 8874), False, 'import time\n'), ((10136, 10162), 'py_trees.blackboard.Blackboard', 'pt.blackboard.Blackboard', ([], {}), '()\n', (10160, 10162), True, 'import py_trees as pt\n'), ((10211, 10446), 'py_trees_ros.actions.ActionClient.__init__', 'ptr.actions.ActionClient.__init__', (['self'], {'name': '"""A_EmergencySurface"""', 'action_spec': 'GotoWaypointAction', 'action_goal': 'None', 'action_namespace': 'emergency_action_namespace', 'override_feedback_message_on_running': '"""EMERGENCY SURFACING"""'}), "(self, name='A_EmergencySurface',\n action_spec=GotoWaypointAction, action_goal=None, action_namespace=\n emergency_action_namespace, override_feedback_message_on_running=\n 'EMERGENCY SURFACING')\n", (10244, 10446), True, 'import py_trees_ros as ptr\n'), ((10848, 10917), 'actionlib.SimpleActionClient', 'actionlib.SimpleActionClient', (['self.action_namespace', 'self.action_spec'], {}), '(self.action_namespace, self.action_spec)\n', (10876, 10917), False, 'import actionlib\n'), ((11651, 11669), 'smarc_msgs.msg.GotoWaypointGoal', 'GotoWaypointGoal', ([], {}), '()\n', (11667, 11669), False, 'from smarc_msgs.msg import GotoWaypointAction, GotoWaypointGoal\n'), ((14109, 14135), 'py_trees.blackboard.Blackboard', 'pt.blackboard.Blackboard', ([], {}), '()\n', (14133, 14135), True, 'import py_trees as pt\n'), ((15359, 15385), 'py_trees.blackboard.Blackboard', 'pt.blackboard.Blackboard', ([], {}), '()\n', (15383, 15385), True, 'import py_trees as pt\n'), ((15789, 16002), 'py_trees_ros.actions.ActionClient.__init__', 'ptr.actions.ActionClient.__init__', (['self'], {'name': 'self.node_name', 'action_spec': 'GotoWaypointAction', 'action_goal': 'None', 'action_namespace': 'action_namespace', 'override_feedback_message_on_running': '"""Moving to waypoint"""'}), "(self, name=self.node_name, action_spec=\n GotoWaypointAction, action_goal=None, action_namespace=action_namespace,\n override_feedback_message_on_running='Moving to waypoint')\n", (15822, 16002), True, 'import py_trees_ros as ptr\n'), ((16463, 16532), 'actionlib.SimpleActionClient', 'actionlib.SimpleActionClient', (['self.action_namespace', 'self.action_spec'], {}), '(self.action_namespace, self.action_spec)\n', (16491, 16532), False, 'import actionlib\n'), ((17993, 18011), 'smarc_msgs.msg.GotoWaypointGoal', 'GotoWaypointGoal', ([], {}), '()\n', (18009, 18011), False, 'from smarc_msgs.msg import GotoWaypointAction, GotoWaypointGoal\n'), ((21400, 21429), 'rospy.loginfo_throttle', 'rospy.loginfo_throttle', (['(5)', 'fb'], {}), '(5, fb)\n', (21422, 21429), False, 'import rospy\n'), ((21896, 21922), 'py_trees.blackboard.Blackboard', 'pt.blackboard.Blackboard', ([], {}), '()\n', (21920, 21922), True, 'import py_trees as pt\n'), ((22015, 22037), 'tf.TransformListener', 'tf.TransformListener', ([], {}), '()\n', (22035, 22037), False, 'import tf\n'), ((23852, 23866), 'geometry_msgs.msg.PointStamped', 'PointStamped', ([], {}), '()\n', (23864, 23866), False, 'from geometry_msgs.msg import PointStamped, PoseArray, PoseStamped\n'), ((23936, 23949), 'rospy.Time', 'rospy.Time', (['(0)'], {}), '(0)\n', (23946, 23949), False, 'import rospy\n'), ((24470, 24496), 'py_trees.blackboard.Blackboard', 'pt.blackboard.Blackboard', ([], {}), '()\n', (24494, 24496), True, 'import py_trees as pt\n'), ((24662, 24737), 'rospy.Subscriber', 'rospy.Subscriber', (['self.plan_control_topic', 'PlanControl', 'self.plancontrol_cb'], {}), '(self.plan_control_topic, PlanControl, self.plancontrol_cb)\n', (24678, 24737), False, 'import rospy\n'), ((27684, 27710), 'py_trees.blackboard.Blackboard', 'pt.blackboard.Blackboard', ([], {}), '()\n', (27708, 27710), True, 'import py_trees as pt\n'), ((27833, 27849), 'imc_ros_bridge.msg.EstimatedState', 'EstimatedState', ([], {}), '()\n', (27847, 27849), False, 'from imc_ros_bridge.msg import EstimatedState, VehicleState, PlanDB, PlanDBInformation, PlanDBState, PlanControlState, PlanControl, PlanSpecification, Maneuver\n'), ((28039, 28050), 'sensor_msgs.msg.NavSatFix', 'NavSatFix', ([], {}), '()\n', (28048, 28050), False, 'from sensor_msgs.msg import NavSatFix\n'), ((28117, 28190), 'rospy.Publisher', 'rospy.Publisher', (['self.estimated_state_topic', 'EstimatedState'], {'queue_size': '(1)'}), '(self.estimated_state_topic, EstimatedState, queue_size=1)\n', (28132, 28190), False, 'import rospy\n'), ((28218, 28278), 'rospy.Publisher', 'rospy.Publisher', (['self.gps_fix_topic', 'NavSatFix'], {'queue_size': '(1)'}), '(self.gps_fix_topic, NavSatFix, queue_size=1)\n', (28233, 28278), False, 'import rospy\n'), ((28311, 28376), 'rospy.Publisher', 'rospy.Publisher', (['self.gps_nav_data_topic', 'NavSatFix'], {'queue_size': '(1)'}), '(self.gps_nav_data_topic, NavSatFix, queue_size=1)\n', (28326, 28376), False, 'import rospy\n'), ((29056, 29071), 'numpy.radians', 'np.radians', (['lat'], {}), '(lat)\n', (29066, 29071), True, 'import numpy as np\n'), ((29098, 29113), 'numpy.radians', 'np.radians', (['lon'], {}), '(lon)\n', (29108, 29113), True, 'import numpy as np\n'), ((29176, 29227), 'tf.transformations.euler_from_quaternion', 'tf.transformations.euler_from_quaternion', (['world_rot'], {}), '(world_rot)\n', (29216, 29227), False, 'import tf\n'), ((29979, 30005), 'py_trees.blackboard.Blackboard', 'pt.blackboard.Blackboard', ([], {}), '()\n', (30003, 30005), True, 'import py_trees as pt\n'), ((30184, 30262), 'rospy.Publisher', 'rospy.Publisher', (['self.plan_control_state_topic', 'PlanControlState'], {'queue_size': '(1)'}), '(self.plan_control_state_topic, PlanControlState, queue_size=1)\n', (30199, 30262), False, 'import rospy\n'), ((30377, 30395), 'imc_ros_bridge.msg.PlanControlState', 'PlanControlState', ([], {}), '()\n', (30393, 30395), False, 'from imc_ros_bridge.msg import EstimatedState, VehicleState, PlanDB, PlanDBInformation, PlanDBState, PlanControlState, PlanControl, PlanSpecification, Maneuver\n'), ((32248, 32274), 'py_trees.blackboard.Blackboard', 'pt.blackboard.Blackboard', ([], {}), '()\n', (32272, 32274), True, 'import py_trees as pt\n'), ((32432, 32501), 'rospy.Publisher', 'rospy.Publisher', (['self.vehicle_state_topic', 'VehicleState'], {'queue_size': '(1)'}), '(self.vehicle_state_topic, VehicleState, queue_size=1)\n', (32447, 32501), False, 'import rospy\n'), ((32647, 32661), 'imc_ros_bridge.msg.VehicleState', 'VehicleState', ([], {}), '()\n', (32659, 32661), False, 'from imc_ros_bridge.msg import EstimatedState, VehicleState, PlanDB, PlanDBInformation, PlanDBState, PlanControlState, PlanControl, PlanSpecification, Maneuver\n'), ((33422, 33448), 'py_trees.blackboard.Blackboard', 'pt.blackboard.Blackboard', ([], {}), '()\n', (33446, 33448), True, 'import py_trees as pt\n'), ((33864, 33872), 'imc_ros_bridge.msg.PlanDB', 'PlanDB', ([], {}), '()\n', (33870, 33872), False, 'from imc_ros_bridge.msg import EstimatedState, VehicleState, PlanDB, PlanDBInformation, PlanDBState, PlanControlState, PlanControl, PlanSpecification, Maneuver\n'), ((34188, 34244), 'rospy.Publisher', 'rospy.Publisher', (['self.plandb_topic', 'PlanDB'], {'queue_size': '(1)'}), '(self.plandb_topic, PlanDB, queue_size=1)\n', (34203, 34244), False, 'import rospy\n'), ((34271, 34357), 'rospy.Subscriber', 'rospy.Subscriber', (['self.plandb_topic', 'PlanDB'], {'callback': 'self.plandb_cb', 'queue_size': '(1)'}), '(self.plandb_topic, PlanDB, callback=self.plandb_cb,\n queue_size=1)\n', (34287, 34357), False, 'import rospy\n'), ((34771, 34790), 'imc_ros_bridge.msg.PlanDBInformation', 'PlanDBInformation', ([], {}), '()\n', (34788, 34790), False, 'from imc_ros_bridge.msg import EstimatedState, VehicleState, PlanDB, PlanDBInformation, PlanDBState, PlanControlState, PlanControl, PlanSpecification, Maneuver\n'), ((35143, 35230), 'rospy.loginfo_throttle_identical', 'rospy.loginfo_throttle_identical', (['(30)', '"""Got REQUEST GET_INFO planDB msg from Neptus"""'], {}), "(30,\n 'Got REQUEST GET_INFO planDB msg from Neptus')\n", (35175, 35230), False, 'import rospy\n'), ((35378, 35386), 'imc_ros_bridge.msg.PlanDB', 'PlanDB', ([], {}), '()\n', (35384, 35386), False, 'from imc_ros_bridge.msg import EstimatedState, VehicleState, PlanDB, PlanDBInformation, PlanDBState, PlanControlState, PlanControl, PlanSpecification, Maneuver\n'), ((35811, 35899), 'rospy.loginfo_throttle_identical', 'rospy.loginfo_throttle_identical', (['(30)', '"""Got REQUEST GET_STATE planDB msg from Neptus"""'], {}), "(30,\n 'Got REQUEST GET_STATE planDB msg from Neptus')\n", (35843, 35899), False, 'import rospy\n'), ((36557, 36565), 'imc_ros_bridge.msg.PlanDB', 'PlanDB', ([], {}), '()\n', (36563, 36565), False, 'from imc_ros_bridge.msg import EstimatedState, VehicleState, PlanDB, PlanDBInformation, PlanDBState, PlanControlState, PlanControl, PlanSpecification, Maneuver\n'), ((36761, 36774), 'imc_ros_bridge.msg.PlanDBState', 'PlanDBState', ([], {}), '()\n', (36772, 36774), False, 'from imc_ros_bridge.msg import EstimatedState, VehicleState, PlanDB, PlanDBInformation, PlanDBState, PlanControlState, PlanControl, PlanSpecification, Maneuver\n'), ((40471, 40497), 'py_trees.blackboard.Blackboard', 'pt.blackboard.Blackboard', ([], {}), '()\n', (40495, 40497), True, 'import py_trees as pt\n'), ((40591, 40613), 'tf.TransformListener', 'tf.TransformListener', ([], {}), '()\n', (40611, 40613), False, 'import tf\n'), ((42524, 42532), 'imc_ros_bridge.msg.PlanDB', 'PlanDB', ([], {}), '()\n', (42530, 42532), False, 'from imc_ros_bridge.msg import EstimatedState, VehicleState, PlanDB, PlanDBInformation, PlanDBState, PlanControlState, PlanControl, PlanSpecification, Maneuver\n'), ((42642, 42816), 'mission_plan.MissionPlan', 'MissionPlan', ([], {'plan_frame': 'self.utm_link', 'plandb_msg': 'pdb', 'waypoints': 'waypoints', 'waypoint_man_ids': 'waypoint_man_ids', 'latlontoutm_service_name': 'self.latlontoutm_service_name'}), '(plan_frame=self.utm_link, plandb_msg=pdb, waypoints=waypoints,\n waypoint_man_ids=waypoint_man_ids, latlontoutm_service_name=self.\n latlontoutm_service_name)\n', (42653, 42816), False, 'from mission_plan import MissionPlan\n'), ((43406, 43432), 'py_trees.blackboard.Blackboard', 'pt.blackboard.Blackboard', ([], {}), '()\n', (43430, 43432), True, 'import py_trees as pt\n'), ((43558, 43619), 'rospy.Publisher', 'rospy.Publisher', (['self.plan_viz_topic', 'PoseArray'], {'queue_size': '(1)'}), '(self.plan_viz_topic, PoseArray, queue_size=1)\n', (43573, 43619), False, 'import rospy\n'), ((44178, 44204), 'py_trees.blackboard.Blackboard', 'pt.blackboard.Blackboard', ([], {}), '()\n', (44202, 44204), True, 'import py_trees as pt\n'), ((45251, 45320), 'actionlib.SimpleActionClient', 'actionlib.SimpleActionClient', (['self.action_namespace', 'self.action_spec'], {}), '(self.action_namespace, self.action_spec)\n', (45279, 45320), False, 'import actionlib\n'), ((45780, 45798), 'smarc_msgs.msg.GotoWaypointGoal', 'GotoWaypointGoal', ([], {}), '()\n', (45796, 45798), False, 'from smarc_msgs.msg import GotoWaypointAction, GotoWaypointGoal\n'), ((45945, 45992), 'rospy.loginfo', 'rospy.loginfo', (['"""Follow action goal initialized"""'], {}), "('Follow action goal initialized')\n", (45958, 45992), False, 'import rospy\n'), ((48702, 48728), 'py_trees.blackboard.Blackboard', 'pt.blackboard.Blackboard', ([], {}), '()\n', (48726, 48728), True, 'import py_trees as pt\n'), ((48767, 48824), 'py_trees.behaviour.Behaviour.__init__', 'pt.behaviour.Behaviour.__init__', (['self'], {'name': '"""A_ReadBuoys"""'}), "(self, name='A_ReadBuoys')\n", (48798, 48824), True, 'import py_trees as pt\n'), ((48934, 48956), 'tf.TransformListener', 'tf.TransformListener', ([], {}), '()\n', (48954, 48956), False, 'import tf\n'), ((49605, 49684), 'rospy.Subscriber', 'rospy.Subscriber', (['self.topic_name', 'MarkerArray'], {'callback': 'self.cb', 'queue_size': '(10)'}), '(self.topic_name, MarkerArray, callback=self.cb, queue_size=10)\n', (49621, 49684), False, 'import rospy\n'), ((50885, 50905), 'numpy.array', 'np.array', (['self.buoys'], {}), '(self.buoys)\n', (50893, 50905), True, 'import numpy as np\n'), ((51040, 51067), 'numpy.sort', 'np.sort', (['self.buoys'], {'axis': '(1)'}), '(self.buoys, axis=1)\n', (51047, 51067), True, 'import numpy as np\n'), ((6048, 6112), 'rospy.loginfo', 'rospy.loginfo', (['"""Mission plan is None, can\'t make a log of this?"""'], {}), '("Mission plan is None, can\'t make a log of this?")\n', (6061, 6112), False, 'import rospy\n'), ((8985, 9054), 'rospy.loginfo_throttle_identical', 'rospy.loginfo_throttle_identical', (['(5)', '"""Waiting on DVL toggle cooldown"""'], {}), "(5, 'Waiting on DVL toggle cooldown')\n", (9017, 9054), False, 'import rospy\n'), ((9529, 9540), 'time.time', 'time.time', ([], {}), '()\n', (9538, 9540), False, 'import time\n'), ((11409, 11522), 'rospy.logwarn_throttle_identical', 'rospy.logwarn_throttle_identical', (['(5)', '"""No Action Server found for emergency action, will just block the tree!"""'], {}), "(5,\n 'No Action Server found for emergency action, will just block the tree!')\n", (11441, 11522), False, 'import rospy\n'), ((11864, 11921), 'rospy.logerr_throttle_identical', 'rospy.logerr_throttle_identical', (['(5)', 'self.feedback_message'], {}), '(5, self.feedback_message)\n', (11895, 11921), False, 'import rospy\n'), ((12135, 12193), 'rospy.logwarn_throttle_identical', 'rospy.logwarn_throttle_identical', (['(5)', 'self.feedback_message'], {}), '(5, self.feedback_message)\n', (12167, 12193), False, 'import rospy\n'), ((12370, 12406), 'rospy.logwarn', 'rospy.logwarn', (['self.feedback_message'], {}), '(self.feedback_message)\n', (12383, 12406), False, 'import rospy\n'), ((13120, 13156), 'rospy.loginfo', 'rospy.loginfo', (['self.feedback_message'], {}), '(self.feedback_message)\n', (13133, 13156), False, 'import rospy\n'), ((13373, 13409), 'rospy.loginfo', 'rospy.loginfo', (['self.feedback_message'], {}), '(self.feedback_message)\n', (13386, 13409), False, 'import rospy\n'), ((14380, 14431), 'rospy.logwarn_throttle', 'rospy.logwarn_throttle', (['(5)', '"""Mission plan was None!"""'], {}), "(5, 'Mission plan was None!')\n", (14402, 14431), False, 'import rospy\n'), ((16984, 17055), 'rospy.logwarn_throttle', 'rospy.logwarn_throttle', (['(5)', '"""No action server found for A_GotoWaypoint!"""'], {}), "(5, 'No action server found for A_GotoWaypoint!')\n", (17006, 17055), False, 'import rospy\n'), ((17183, 17222), 'rospy.logwarn', 'rospy.logwarn', (['"""No mission plan found!"""'], {}), "('No mission plan found!')\n", (17196, 17222), False, 'import rospy\n'), ((17322, 17425), 'rospy.loginfo', 'rospy.loginfo', (['"""No wp found to execute! Does the plan have any waypoints that we understand?"""'], {}), "(\n 'No wp found to execute! Does the plan have any waypoints that we understand?'\n )\n", (17335, 17425), False, 'import rospy\n'), ((17734, 17810), 'rospy.loginfo', 'rospy.loginfo', (['"""THIS IS A GOTO MANEUVER, WE ARE USING IT FOR SOMETHING ELSE"""'], {}), "('THIS IS A GOTO MANEUVER, WE ARE USING IT FOR SOMETHING ELSE')\n", (17747, 17810), False, 'import rospy\n'), ((19664, 19721), 'rospy.logerr_throttle_identical', 'rospy.logerr_throttle_identical', (['(5)', 'self.feedback_message'], {}), '(5, self.feedback_message)\n', (19695, 19721), False, 'import rospy\n'), ((19946, 19981), 'rospy.logerr', 'rospy.logerr', (['self.feedback_message'], {}), '(self.feedback_message)\n', (19958, 19981), False, 'import rospy\n'), ((20159, 20195), 'rospy.logwarn', 'rospy.logwarn', (['self.feedback_message'], {}), '(self.feedback_message)\n', (20172, 20195), False, 'import rospy\n'), ((20894, 20930), 'rospy.loginfo', 'rospy.loginfo', (['self.feedback_message'], {}), '(self.feedback_message)\n', (20907, 20930), False, 'import rospy\n'), ((21182, 21218), 'rospy.loginfo', 'rospy.loginfo', (['self.feedback_message'], {}), '(self.feedback_message)\n', (21195, 21218), False, 'import rospy\n'), ((22389, 22427), 'rospy.loginfo_throttle', 'rospy.loginfo_throttle', (['(3)', '"""...Got it"""'], {}), "(3, '...Got it')\n", (22411, 22427), False, 'import rospy\n'), ((23213, 23224), 'time.time', 'time.time', ([], {}), '()\n', (23222, 23224), False, 'import time\n'), ((27214, 27269), 'rospy.logwarn_throttle_identical', 'rospy.logwarn_throttle_identical', (['(10)', '"""AUTONOMOUS MODE"""'], {}), "(10, 'AUTONOMOUS MODE')\n", (27246, 27269), False, 'import rospy\n'), ((28836, 28960), 'rospy.logwarn_throttle_identical', 'rospy.logwarn_throttle_identical', (['(10)', '"""Could not update neptus estimated state because lat/lon/world_rot was None!"""'], {}), "(10,\n 'Could not update neptus estimated state because lat/lon/world_rot was None!'\n )\n", (28868, 28960), False, 'import rospy\n'), ((29606, 29617), 'time.time', 'time.time', ([], {}), '()\n', (29615, 29617), False, 'import time\n'), ((37780, 37815), 'rospy.logerr', 'rospy.logerr', (['self.feedback_message'], {}), '(self.feedback_message)\n', (37792, 37815), False, 'import rospy\n'), ((41009, 41047), 'rospy.loginfo_throttle', 'rospy.loginfo_throttle', (['(3)', '"""...Got it"""'], {}), "(3, '...Got it')\n", (41031, 41047), False, 'import rospy\n'), ((43837, 43848), 'geometry_msgs.msg.PoseArray', 'PoseArray', ([], {}), '()\n', (43846, 43848), False, 'from geometry_msgs.msg import PointStamped, PoseArray, PoseStamped\n'), ((46468, 46525), 'rospy.logerr_throttle_identical', 'rospy.logerr_throttle_identical', (['(5)', 'self.feedback_message'], {}), '(5, self.feedback_message)\n', (46499, 46525), False, 'import rospy\n'), ((46750, 46785), 'rospy.logerr', 'rospy.logerr', (['self.feedback_message'], {}), '(self.feedback_message)\n', (46762, 46785), False, 'import rospy\n'), ((46963, 46999), 'rospy.logwarn', 'rospy.logwarn', (['self.feedback_message'], {}), '(self.feedback_message)\n', (46976, 46999), False, 'import rospy\n'), ((47698, 47734), 'rospy.loginfo', 'rospy.loginfo', (['self.feedback_message'], {}), '(self.feedback_message)\n', (47711, 47734), False, 'import rospy\n'), ((47946, 47982), 'rospy.loginfo', 'rospy.loginfo', (['self.feedback_message'], {}), '(self.feedback_message)\n', (47959, 47982), False, 'import rospy\n'), ((50339, 50390), 'geometry_msgs.msg.PoseStamped', 'PoseStamped', ([], {'header': 'marker.header', 'pose': 'marker.pose'}), '(header=marker.header, pose=marker.pose)\n', (50350, 50390), False, 'from geometry_msgs.msg import PointStamped, PoseArray, PoseStamped\n'), ((50938, 50966), 'numpy.argsort', 'np.argsort', (['self.buoys[:, 0]'], {}), '(self.buoys[:, 0])\n', (50948, 50966), True, 'import numpy as np\n'), ((1464, 1475), 'time.time', 'time.time', ([], {}), '()\n', (1473, 1475), False, 'import time\n'), ((1853, 1864), 'time.time', 'time.time', ([], {}), '()\n', (1862, 1864), False, 'import time\n'), ((11002, 11025), 'rospy.Duration', 'rospy.Duration', (['timeout'], {}), '(timeout)\n', (11016, 11025), False, 'import rospy\n'), ((16617, 16640), 'rospy.Duration', 'rospy.Duration', (['timeout'], {}), '(timeout)\n', (16631, 16640), False, 'import rospy\n'), ((19007, 19081), 'rospy.logwarn_throttle', 'rospy.logwarn_throttle', (['(1)', '"""Speed control of the waypoint action is NONE!"""'], {}), "(1, 'Speed control of the waypoint action is NONE!')\n", (19029, 19081), False, 'import rospy\n'), ((22338, 22350), 'rospy.Time', 'rospy.Time', ([], {}), '()\n', (22348, 22350), False, 'import rospy\n'), ((22352, 22375), 'rospy.Duration', 'rospy.Duration', (['timeout'], {}), '(timeout)\n', (22366, 22375), False, 'import rospy\n'), ((22486, 22622), 'rospy.logerr_throttle', 'rospy.logerr_throttle', (['(5)', "('Could not find from ' + self.utm_link + ' to ' + self.base_link +\n '... Nothing except safety will be run')"], {}), "(5, 'Could not find from ' + self.utm_link + ' to ' +\n self.base_link + '... Nothing except safety will be run')\n", (22507, 22622), False, 'import rospy\n'), ((22726, 22737), 'time.time', 'time.time', ([], {}), '()\n', (22735, 22737), False, 'import time\n'), ((23164, 23177), 'rospy.Time', 'rospy.Time', (['(0)'], {}), '(0)\n', (23174, 23177), False, 'import rospy\n'), ((23483, 23569), 'rospy.logerr_throttle_identical', 'rospy.logerr_throttle_identical', (['(5)', '"""Could not do tf lookup for some other reason"""'], {}), "(5,\n 'Could not do tf lookup for some other reason')\n", (23514, 23569), False, 'import rospy\n'), ((40958, 40970), 'rospy.Time', 'rospy.Time', ([], {}), '()\n', (40968, 40970), False, 'import rospy\n'), ((40972, 40995), 'rospy.Duration', 'rospy.Duration', (['timeout'], {}), '(timeout)\n', (40986, 40995), False, 'import rospy\n'), ((41119, 41238), 'rospy.logerr_throttle', 'rospy.logerr_throttle', (['(5)', "('Could not find tf from:' + self.poi_link + ' to:' + self.utm_link +\n ' disabling updates')"], {}), "(5, 'Could not find tf from:' + self.poi_link + ' to:' +\n self.utm_link + ' disabling updates')\n", (41140, 41238), False, 'import rospy\n'), ((45405, 45428), 'rospy.Duration', 'rospy.Duration', (['timeout'], {}), '(timeout)\n', (45419, 45428), False, 'import rospy\n'), ((49315, 49327), 'rospy.Time', 'rospy.Time', ([], {}), '()\n', (49325, 49327), False, 'import rospy\n'), ((49345, 49368), 'rospy.Duration', 'rospy.Duration', (['timeout'], {}), '(timeout)\n', (49359, 49368), False, 'import rospy\n'), ((2102, 2132), 'rospy.logwarn_throttle', 'rospy.logwarn_throttle', (['(1)', 'msg'], {}), '(1, msg)\n', (2124, 2132), False, 'import rospy\n')] |
from __future__ import annotations
import logging
import os
import os.path
from ..util import gcs
logger = logging.getLogger(os.path.basename(__file__))
class Image:
def __init__(self, url) -> None:
self.url = url
def __str__(self) -> str:
return self.url
def delete(self):
gcs.delete_resource(self.url)
logger.info(f'{self} is deleted')
@classmethod
def from_url(cls, url: str):
return cls(url)
| [
"os.path.basename"
] | [((128, 154), 'os.path.basename', 'os.path.basename', (['__file__'], {}), '(__file__)\n', (144, 154), False, 'import os\n')] |