code
stringlengths 22
1.05M
| apis
listlengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
|---|---|---|
"""
Flask settings file.
For the full list of settings and their values, visit
http://flask.pocoo.org/docs/0.12/config/
"""
import os
DEBUG = True if os.environ.get('FLASK_DEBUG') in ['1', 'True', 'true'] else False
SECRET_KEY = '<KEY>'
|
[
"os.environ.get"
] |
[((153, 182), 'os.environ.get', 'os.environ.get', (['"""FLASK_DEBUG"""'], {}), "('FLASK_DEBUG')\n", (167, 182), False, 'import os\n')]
|
#!/usr/bin/python
from kafka import KafkaProducer
kafkaHosts=["kafka01.paas.longfor.sit:9092"
,"kafka02.paas.longfor.sit:9092"
,"kafka03.paas.longfor.sit:9092"]
producer = KafkaProducer(bootstrap_servers=kafkaHosts);
for _ in range(20):
producer.send("testapplog_plm-prototype",b"Hello....")
producer.flush();
|
[
"kafka.KafkaProducer"
] |
[((199, 242), 'kafka.KafkaProducer', 'KafkaProducer', ([], {'bootstrap_servers': 'kafkaHosts'}), '(bootstrap_servers=kafkaHosts)\n', (212, 242), False, 'from kafka import KafkaProducer\n')]
|
from setuptools import setup, find_packages
setup(
name='django-jsx',
version='0.4.0',
author='<NAME>',
author_email='<EMAIL>',
packages=find_packages(exclude=['sample_project']),
include_package_data=True,
license='BSD',
description='Integration library for React/JSX and Django',
classifiers=[
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
],
)
|
[
"setuptools.find_packages"
] |
[((158, 199), 'setuptools.find_packages', 'find_packages', ([], {'exclude': "['sample_project']"}), "(exclude=['sample_project'])\n", (171, 199), False, 'from setuptools import setup, find_packages\n')]
|
import copy
class Filledlist(list):
def __init__(self,count,value,*args,**kwargs):
super().__init__()
for _ in range(count):
slef.append(copy.copy(value))
|
[
"copy.copy"
] |
[((171, 187), 'copy.copy', 'copy.copy', (['value'], {}), '(value)\n', (180, 187), False, 'import copy\n')]
|
from torch.utils.data import DataLoader
import torchvision
from torchvision.transforms import ToTensor, Normalize
class DataGetter():
"""Helper class for getting various torchvision transformations and data loaders
Args:
self.dataset (torch.utils.data.Dataset): data set from which to load the data
self.name ("MNIST" or "CIFAR10"): Name of data set used
"""
def __init__(self, dataset, name):
self.dataset = dataset
self.name = name
def get_transformer(self):
"""Get transformer depending on self.name
Returns:
torchvision.transforms.Transforms: Normalization based on data set depending on attribute self.name"""
# The operations which are being applied to each image before training
if self.name == 'MNIST':
return torchvision.transforms.Compose([
ToTensor(),
Normalize((0.1307,), (0.3081,))
])
elif self.name == 'CIFAR10':
return torchvision.transforms.Compose([
ToTensor(),
Normalize((0.4914, 0.4822, 0.4465),
(0.2023, 0.1994, 0.2010))
])
def get_inverse_transformer(self):
"""Reverses the transformations of 'get_transformer'. This is required for the PGD algorithm"""
if self.name == 'CIFAR10':
return torchvision.transforms.Compose([
Normalize((0, 0, 0),
(1 / 0.2023, 1 / 0.1994, 1 / 0.2010)),
Normalize((-0.4914, -0.4822, -0.4465),
(1, 1, 1)),
])
if self.name == 'MNIST':
return torchvision.transforms.Compose([
ToTensor(),
Normalize((0.,), (1/0.3081,)), # Std of MNIST /255 = 0.3081
Normalize((-0.1307,), (1.,)) # Mean of MNIST /255 = 0.1307
])
def get_dataloader(self, split, batch_size=64, shuffle=True):
"""Get a dataloader for specified parameters.
Args:
split ("train" ord "test"): split for data set to use
batch_size (int): batch size of data set
shuffle (bool: if True, will shuffle the data set, otherwise not
Returns:
torch.utils.data.DataLoader: The dataloader
Raises:
Exception: If specified split is neither "train" nor "test"
"""
if split == "train":
dataloader = DataLoader(
self.dataset('./data/'+self.name+'/', train=True, download=True,
transform=self.get_transformer()),
batch_size=batch_size, shuffle=True)
elif split == "test":
dataloader = DataLoader(
self.dataset('./data/'+self.name+'/', train=False, download=True,
transform=self.get_transformer()),
batch_size=batch_size, shuffle=True)
else:
raise Exception("The specified split '" +
str(split) + "' is not supported. Please use one of ['train', 'test']")
return dataloader
|
[
"torchvision.transforms.Normalize",
"torchvision.transforms.ToTensor"
] |
[((895, 905), 'torchvision.transforms.ToTensor', 'ToTensor', ([], {}), '()\n', (903, 905), False, 'from torchvision.transforms import ToTensor, Normalize\n'), ((923, 954), 'torchvision.transforms.Normalize', 'Normalize', (['(0.1307,)', '(0.3081,)'], {}), '((0.1307,), (0.3081,))\n', (932, 954), False, 'from torchvision.transforms import ToTensor, Normalize\n'), ((1453, 1510), 'torchvision.transforms.Normalize', 'Normalize', (['(0, 0, 0)', '(1 / 0.2023, 1 / 0.1994, 1 / 0.201)'], {}), '((0, 0, 0), (1 / 0.2023, 1 / 0.1994, 1 / 0.201))\n', (1462, 1510), False, 'from torchvision.transforms import ToTensor, Normalize\n'), ((1555, 1604), 'torchvision.transforms.Normalize', 'Normalize', (['(-0.4914, -0.4822, -0.4465)', '(1, 1, 1)'], {}), '((-0.4914, -0.4822, -0.4465), (1, 1, 1))\n', (1564, 1604), False, 'from torchvision.transforms import ToTensor, Normalize\n'), ((1748, 1758), 'torchvision.transforms.ToTensor', 'ToTensor', ([], {}), '()\n', (1756, 1758), False, 'from torchvision.transforms import ToTensor, Normalize\n'), ((1776, 1808), 'torchvision.transforms.Normalize', 'Normalize', (['(0.0,)', '(1 / 0.3081,)'], {}), '((0.0,), (1 / 0.3081,))\n', (1785, 1808), False, 'from torchvision.transforms import ToTensor, Normalize\n'), ((1853, 1882), 'torchvision.transforms.Normalize', 'Normalize', (['(-0.1307,)', '(1.0,)'], {}), '((-0.1307,), (1.0,))\n', (1862, 1882), False, 'from torchvision.transforms import ToTensor, Normalize\n'), ((1075, 1085), 'torchvision.transforms.ToTensor', 'ToTensor', ([], {}), '()\n', (1083, 1085), False, 'from torchvision.transforms import ToTensor, Normalize\n'), ((1103, 1163), 'torchvision.transforms.Normalize', 'Normalize', (['(0.4914, 0.4822, 0.4465)', '(0.2023, 0.1994, 0.201)'], {}), '((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.201))\n', (1112, 1163), False, 'from torchvision.transforms import ToTensor, Normalize\n')]
|
import requests, json
import sys
from colorama import init, Fore
init(autoreset=True)
bold = '\033[01m'
print (Fore.RED+"""
_________________________________
| |
| |
▀▀█▀▀ ░█─░█ ▀█▀ ░█▀▀█ ░█▀▀▄ ░█▀▀▀ ░█──░█ ░█▀▀▀
─░█── ░█▀▀█ ░█─ ░█▄▄▀ ░█─░█ ░█▀▀▀ ░█▄▄▄█ ░█▀▀▀
─░█── ░█─░█ ▄█▄ ░█─░█ ░█▄▄▀ ░█▄▄▄ ──░█── ░█▄▄▄
| |
| |
|_________________________________|
(___________________________________)
v1.0
Coded By H1mzy0t1 :)
""")
print(Fore.YELLOW+bold+" /^\ A Monkey on your roof /^\ \n")
print(Fore.GREEN+bold+"(1) IP Geolocate ")
print(Fore.GREEN+bold+"(2) My IP Geolocate")
print(Fore.GREEN+bold+"(3) Domain Age Checker")
print(Fore.GREEN+bold+"(4) Whois Lookup")
print(Fore.GREEN+bold+"(5) Phone Number Info ")
print(Fore.GREEN+bold+"(6) Email Verify ")
print(Fore.GREEN+bold+"(7) Exit ")
inpt = str(input(Fore.RED+"[+] " ))
info = (Fore.RED+bold+ "GETTING YOUR INFO............../"+bold)
if inpt == "1":
print("*If this field remains empty it will show your geolocation!")
ip = str(input(Fore.BLUE+ "ENTER THE IP : "))
api = "http://ip-api.com/json/"
print(info)
try:
data = requests.get(api+ip).json()
print(Fore.WHITE+bold+ "Result :",data['status'])
print(Fore.WHITE+bold+"ISP :",data['isp'])
print(Fore.WHITE+bold+"City :",data['city'])
print(Fore.WHITE+bold+"Region :",data['regionName'])
print(Fore.WHITE+bold+"Country :",data['country'])
print(Fore.WHITE+bold+"Country Code :",data['countryCode'])
print(Fore.WHITE+bold+"Postal :",data['zip'])
print(Fore.WHITE+bold+"Timezone :",data['timezone'])
except requests.exceptions.ConnectionError as e:
print (Fore.RED+bold+"Please check your internet connection!")
except:
print (Fore.RED+bold+"SOMETHING WENT WRONG, PLEASE TRY AGAIN")
elif inpt == "2":
api = "http://ip-api.com/json/"
print(info)
try:
data = requests.get(api).json()
print(Fore.WHITE+bold+ "Result :",data['status'])
print(Fore.WHITE+bold+"ISP :",data['isp'])
print(Fore.WHITE+bold+"City :",data['city'])
print(Fore.WHITE+bold+"Region :",data['regionName'])
print(Fore.WHITE+bold+"Country :",data['country'])
print(Fore.WHITE+bold+"Country Code :",data['countryCode'])
print(Fore.WHITE+bold+"Postal :",data['zip'])
print(Fore.WHITE+bold+"Timezone :",data['timezone'])
except requests.exceptions.ConnectionError as e:
print(Fore.RED+bold+"Please check your internet connection!")
except:
print (Fore.RED+bold+"SOMETHING WENT WRONG, PLEASE TRY AGAIN")
elif inpt == "3":
domain = str(input(Fore.BLUE+ "ENTER THE DOMAIN : "))
api = "https://input.payapi.io/v1/api/fraud/domain/"
print(info)
try:
data = requests.get(api+domain).json()
print(Fore.WHITE+bold+"Domain : ",data['domain'])
print(Fore.WHITE+bold+"Age :",data['result'],"Days old")
print(Fore.WHITE+bold+"Result :",data['message'])
except requests.exceptions.ConnectionError as e:
print(Fore.RED+bold+"Please check your internet connection!")
except:
print (Fore.RED+bold+"SOMETHING WENT WRONG, PLEASE TRY AGAIN")
elif inpt =="4":
domainw = str(input(Fore.RED+"ENTER THE DOMAIN : "))
api2 = "https://api.ip2whois.com/v1?key=YG8PVA9QRQPEEIKQZXZ1H62UDD4GTXZB&domain="
try:
data = requests.get(api2+domainw).json()
print(Fore.WHITE+bold+"Domain :",data['domain'])
print(Fore.WHITE+bold+"Age :",data['domain_age'])
print(Fore.WHITE+bold+"Created :",data['create_date'])
print(Fore.WHITE+bold+"Updated :",data['update_date'])
print(Fore.WHITE+bold+"Expire :",data['expire_date'])
print(Fore.WHITE+bold+"Registrar :", data['registrar'])
print(Fore.WHITE+bold+"Registrant :",data['registrant'])
print(Fore.WHITE+bold+"More info :",data['nameservers'])
except requests.exceptions.ConnectionError as e:
print(Fore.RED+bold+"Please check your internet connection!")
except:
print (Fore.RED+bold+"SOMETHING WENT WRONG, PLEASE TRY AGAIN")
elif inpt == "5":
print("*Use upper case for country code!")
code = str(input(Fore.BLUE+ "ENTER COUNTRY CODE : "))
number = (input("ENTER THE NUMBER : "))
api = "http://apilayer.net/api/validate?access_key=62cc0533d5b4f9966528d3c104353993&number="
mid = "&country_code="
last = "&format=1"
print(info)
try:
data = requests.get(api+number+mid+code+last).json()
print(Fore.WHITE+bold+"Operation STATUS :",data['valid'])
print(Fore.WHITE+bold+"Prifix :",data['country_prefix'])
print(Fore.WHITE+bold+"Country Code :",data['country_code'])
print(Fore.WHITE+bold+"Country Name :",data['country_name'])
print(Fore.WHITE+bold+"Region :",data['location'])
print(Fore.WHITE+bold+"Carrier :",data['carrier'])
print(Fore.WHITE+bold+"Line Type :",data['line_type'])
except requests.exceptions.ConnectionError as e:
print(Fore.RED+bold+"Please check your internet connection!")
except:
print (Fore.RED+bold+"SOMETHING WENT WRONG, PLEASE TRY AGAIN")
elif inpt == "6":
inp = (input(Fore.BLUE+bold+"ENTER THE EMAIL : "))
api3 = "http://apilayer.net/api/check?access_key=<KEY>"
api3l = "&email="
print(info)
try:
data = requests.get(api3+api3l+inp).json()
print(Fore.WHITE+"Email : ",data ['email'])
print(Fore.WHITE+bold+"User :",data['user'])
print(Fore.WHITE+bold+"Domain :",data['domain'])
print(Fore.WHITE+bold+"Format :",data['format_valid'])
print(Fore.WHITE+bold+"SMTP :",data['smtp_check'])
except requests.exceptions.ConnectionError as e:
print(Fore.RED+bold+"Please check your internet connection!")
except:
print (Fore.RED+bold+"SOMETHING WENT WRONG, PLEASE TRY AGAIN")
if inpt == "7":
print(Fore.RED+"Exiting..../")
sys.exit(0)
|
[
"colorama.init",
"requests.get",
"sys.exit"
] |
[((65, 85), 'colorama.init', 'init', ([], {'autoreset': '(True)'}), '(autoreset=True)\n', (69, 85), False, 'from colorama import init, Fore\n'), ((6097, 6108), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (6105, 6108), False, 'import sys\n'), ((1542, 1564), 'requests.get', 'requests.get', (['(api + ip)'], {}), '(api + ip)\n', (1554, 1564), False, 'import requests, json\n'), ((2283, 2300), 'requests.get', 'requests.get', (['api'], {}), '(api)\n', (2295, 2300), False, 'import requests, json\n'), ((3109, 3135), 'requests.get', 'requests.get', (['(api + domain)'], {}), '(api + domain)\n', (3121, 3135), False, 'import requests, json\n'), ((3678, 3706), 'requests.get', 'requests.get', (['(api2 + domainw)'], {}), '(api2 + domainw)\n', (3690, 3706), False, 'import requests, json\n'), ((4707, 4753), 'requests.get', 'requests.get', (['(api + number + mid + code + last)'], {}), '(api + number + mid + code + last)\n', (4719, 4753), False, 'import requests, json\n'), ((5551, 5583), 'requests.get', 'requests.get', (['(api3 + api3l + inp)'], {}), '(api3 + api3l + inp)\n', (5563, 5583), False, 'import requests, json\n')]
|
import unittest
from yauber_algo.errors import *
class TWMATestCase(unittest.TestCase):
def test_twma(self):
import yauber_algo.sanitychecks as sc
from numpy import array, nan, inf
import os
import sys
import pandas as pd
import numpy as np
from yauber_algo.algo import twma
#
# Function settings
#
algo = 'twma'
func = twma
with sc.SanityChecker(algo) as s:
#
# Check regular algorithm logic
#
s.check_regular(
array([nan, nan, 2, 7/3]),
func,
(
array([3, 2, 1, 4]),
np.array([1, 1, 1])
),
suffix='twma_equal_weight'
)
s.check_regular(
array([nan, nan, (1*1+2*0.5+3*0.25)/1.75, (4*1+1*0.5+2*0.25)/1.75]),
func,
(
array([3, 2, 1, 4]),
np.array([1, 0.5, 0.25])
),
suffix='twma_linear_weight'
)
s.check_regular(
array([nan, nan, 2, 7 / 3]),
func,
(
array([3, 2, 1, 4]),
[1, 1, 1]
),
suffix='twma_list_weight'
)
s.check_regular(
array([nan, nan, 2, 7 / 3]),
func,
(
array([3, 2, 1, 4]),
pd.Series([1, 1, 1])
),
suffix='twma_series_weight'
)
s.check_regular(
array([nan, nan, 2, 7 / 3]),
func,
(
array([3, 2, 1, 4]),
[1, 1, 1, 2, 2]
),
suffix='twma_weight_gt_ser',
exception=YaUberAlgoArgumentError,
)
s.check_regular(
array([nan, nan, nan, nan]),
func,
(
array([3, 2, 1, 4]),
np.array([0, 0, 0])
),
suffix='twma_zeroweight'
)
s.check_regular(
array([nan, nan, 2, 7 / 3]),
func,
(
array([3, 2, 1, 4]),
np.array([1, 1, nan])
),
suffix='twma_nan_weight',
exception=YaUberAlgoArgumentError,
)
s.check_naninf(
array([nan, nan, nan, nan]),
func,
(
array([3, 2, nan, inf]),
np.array([1, 1, 1, 1])
),
suffix='',
)
s.check_series(
pd.Series(array([nan, nan, 2, 7 / 3])),
func,
(
pd.Series(array([3, 2, 1, 4])),
np.array([1, 1, 1])
),
)
s.check_dtype_float(
array([nan, nan, 2, 7 / 3], dtype=float),
func,
(
array([3, 2, 1, 4], dtype=float),
np.array([1, 1, 1], dtype=float)
),
)
s.check_dtype_bool(
array([nan, nan, 1/3, 2 / 3], dtype=float),
func,
(
array([0, 1, 0, 1], dtype=bool),
np.array([1, 1, 1], dtype=float)
),
)
s.check_dtype_int(
array([nan, nan, 2, 7 / 3], dtype=float),
func,
(
array([3, 2, 1, 4], dtype=np.int32),
np.array([1, 1, 1], dtype=np.int32)
),
)
s.check_dtype_object(
func,
(
array([3, 2, 1, 4], dtype=np.object),
np.array([1, 1, 1], dtype=float)
),
)
s.check_futref(5, 1,
func,
(
np.random.random(100),
np.array([1, 0.5, 0.25, 0.2, 0.1]),
),
fix_args=[1], # Use weights args as is
)
s.check_window_consistency(5, 1,
func,
(
np.random.random(100),
np.array([1, 0.5, 0.25, 0.2, 0.1]),
),
fix_args=[1], # Use weights args as is
)
|
[
"pandas.Series",
"numpy.random.random",
"numpy.array",
"yauber_algo.sanitychecks.SanityChecker"
] |
[((443, 465), 'yauber_algo.sanitychecks.SanityChecker', 'sc.SanityChecker', (['algo'], {}), '(algo)\n', (459, 465), True, 'import yauber_algo.sanitychecks as sc\n'), ((589, 616), 'numpy.array', 'array', (['[nan, nan, 2, 7 / 3]'], {}), '([nan, nan, 2, 7 / 3])\n', (594, 616), False, 'from numpy import array, nan, inf\n'), ((859, 954), 'numpy.array', 'array', (['[nan, nan, (1 * 1 + 2 * 0.5 + 3 * 0.25) / 1.75, (4 * 1 + 1 * 0.5 + 2 * 0.25\n ) / 1.75]'], {}), '([nan, nan, (1 * 1 + 2 * 0.5 + 3 * 0.25) / 1.75, (4 * 1 + 1 * 0.5 + 2 *\n 0.25) / 1.75])\n', (864, 954), False, 'from numpy import array, nan, inf\n'), ((1177, 1204), 'numpy.array', 'array', (['[nan, nan, 2, 7 / 3]'], {}), '([nan, nan, 2, 7 / 3])\n', (1182, 1204), False, 'from numpy import array, nan, inf\n'), ((1437, 1464), 'numpy.array', 'array', (['[nan, nan, 2, 7 / 3]'], {}), '([nan, nan, 2, 7 / 3])\n', (1442, 1464), False, 'from numpy import array, nan, inf\n'), ((1711, 1738), 'numpy.array', 'array', (['[nan, nan, 2, 7 / 3]'], {}), '([nan, nan, 2, 7 / 3])\n', (1716, 1738), False, 'from numpy import array, nan, inf\n'), ((2032, 2059), 'numpy.array', 'array', (['[nan, nan, nan, nan]'], {}), '([nan, nan, nan, nan])\n', (2037, 2059), False, 'from numpy import array, nan, inf\n'), ((2302, 2329), 'numpy.array', 'array', (['[nan, nan, 2, 7 / 3]'], {}), '([nan, nan, 2, 7 / 3])\n', (2307, 2329), False, 'from numpy import array, nan, inf\n'), ((2625, 2652), 'numpy.array', 'array', (['[nan, nan, nan, nan]'], {}), '([nan, nan, nan, nan])\n', (2630, 2652), False, 'from numpy import array, nan, inf\n'), ((3142, 3182), 'numpy.array', 'array', (['[nan, nan, 2, 7 / 3]'], {'dtype': 'float'}), '([nan, nan, 2, 7 / 3], dtype=float)\n', (3147, 3182), False, 'from numpy import array, nan, inf\n'), ((3413, 3457), 'numpy.array', 'array', (['[nan, nan, 1 / 3, 2 / 3]'], {'dtype': 'float'}), '([nan, nan, 1 / 3, 2 / 3], dtype=float)\n', (3418, 3457), False, 'from numpy import array, nan, inf\n'), ((3683, 3723), 'numpy.array', 'array', (['[nan, nan, 2, 7 / 3]'], {'dtype': 'float'}), '([nan, nan, 2, 7 / 3], dtype=float)\n', (3688, 3723), False, 'from numpy import array, nan, inf\n'), ((676, 695), 'numpy.array', 'array', (['[3, 2, 1, 4]'], {}), '([3, 2, 1, 4])\n', (681, 695), False, 'from numpy import array, nan, inf\n'), ((717, 736), 'numpy.array', 'np.array', (['[1, 1, 1]'], {}), '([1, 1, 1])\n', (725, 736), True, 'import numpy as np\n'), ((988, 1007), 'numpy.array', 'array', (['[3, 2, 1, 4]'], {}), '([3, 2, 1, 4])\n', (993, 1007), False, 'from numpy import array, nan, inf\n'), ((1029, 1053), 'numpy.array', 'np.array', (['[1, 0.5, 0.25]'], {}), '([1, 0.5, 0.25])\n', (1037, 1053), True, 'import numpy as np\n'), ((1266, 1285), 'numpy.array', 'array', (['[3, 2, 1, 4]'], {}), '([3, 2, 1, 4])\n', (1271, 1285), False, 'from numpy import array, nan, inf\n'), ((1526, 1545), 'numpy.array', 'array', (['[3, 2, 1, 4]'], {}), '([3, 2, 1, 4])\n', (1531, 1545), False, 'from numpy import array, nan, inf\n'), ((1567, 1587), 'pandas.Series', 'pd.Series', (['[1, 1, 1]'], {}), '([1, 1, 1])\n', (1576, 1587), True, 'import pandas as pd\n'), ((1800, 1819), 'numpy.array', 'array', (['[3, 2, 1, 4]'], {}), '([3, 2, 1, 4])\n', (1805, 1819), False, 'from numpy import array, nan, inf\n'), ((2121, 2140), 'numpy.array', 'array', (['[3, 2, 1, 4]'], {}), '([3, 2, 1, 4])\n', (2126, 2140), False, 'from numpy import array, nan, inf\n'), ((2162, 2181), 'numpy.array', 'np.array', (['[0, 0, 0]'], {}), '([0, 0, 0])\n', (2170, 2181), True, 'import numpy as np\n'), ((2391, 2410), 'numpy.array', 'array', (['[3, 2, 1, 4]'], {}), '([3, 2, 1, 4])\n', (2396, 2410), False, 'from numpy import array, nan, inf\n'), ((2432, 2453), 'numpy.array', 'np.array', (['[1, 1, nan]'], {}), '([1, 1, nan])\n', (2440, 2453), True, 'import numpy as np\n'), ((2714, 2737), 'numpy.array', 'array', (['[3, 2, nan, inf]'], {}), '([3, 2, nan, inf])\n', (2719, 2737), False, 'from numpy import array, nan, inf\n'), ((2759, 2781), 'numpy.array', 'np.array', (['[1, 1, 1, 1]'], {}), '([1, 1, 1, 1])\n', (2767, 2781), True, 'import numpy as np\n'), ((2897, 2924), 'numpy.array', 'array', (['[nan, nan, 2, 7 / 3]'], {}), '([nan, nan, 2, 7 / 3])\n', (2902, 2924), False, 'from numpy import array, nan, inf\n'), ((3039, 3058), 'numpy.array', 'np.array', (['[1, 1, 1]'], {}), '([1, 1, 1])\n', (3047, 3058), True, 'import numpy as np\n'), ((3244, 3276), 'numpy.array', 'array', (['[3, 2, 1, 4]'], {'dtype': 'float'}), '([3, 2, 1, 4], dtype=float)\n', (3249, 3276), False, 'from numpy import array, nan, inf\n'), ((3298, 3330), 'numpy.array', 'np.array', (['[1, 1, 1]'], {'dtype': 'float'}), '([1, 1, 1], dtype=float)\n', (3306, 3330), True, 'import numpy as np\n'), ((3517, 3548), 'numpy.array', 'array', (['[0, 1, 0, 1]'], {'dtype': 'bool'}), '([0, 1, 0, 1], dtype=bool)\n', (3522, 3548), False, 'from numpy import array, nan, inf\n'), ((3570, 3602), 'numpy.array', 'np.array', (['[1, 1, 1]'], {'dtype': 'float'}), '([1, 1, 1], dtype=float)\n', (3578, 3602), True, 'import numpy as np\n'), ((3785, 3820), 'numpy.array', 'array', (['[3, 2, 1, 4]'], {'dtype': 'np.int32'}), '([3, 2, 1, 4], dtype=np.int32)\n', (3790, 3820), False, 'from numpy import array, nan, inf\n'), ((3842, 3877), 'numpy.array', 'np.array', (['[1, 1, 1]'], {'dtype': 'np.int32'}), '([1, 1, 1], dtype=np.int32)\n', (3850, 3877), True, 'import numpy as np\n'), ((4006, 4042), 'numpy.array', 'array', (['[3, 2, 1, 4]'], {'dtype': 'np.object'}), '([3, 2, 1, 4], dtype=np.object)\n', (4011, 4042), False, 'from numpy import array, nan, inf\n'), ((4064, 4096), 'numpy.array', 'np.array', (['[1, 1, 1]'], {'dtype': 'float'}), '([1, 1, 1], dtype=float)\n', (4072, 4096), True, 'import numpy as np\n'), ((4257, 4278), 'numpy.random.random', 'np.random.random', (['(100)'], {}), '(100)\n', (4273, 4278), True, 'import numpy as np\n'), ((4311, 4345), 'numpy.array', 'np.array', (['[1, 0.5, 0.25, 0.2, 0.1]'], {}), '([1, 0.5, 0.25, 0.2, 0.1])\n', (4319, 4345), True, 'import numpy as np\n'), ((4647, 4668), 'numpy.random.random', 'np.random.random', (['(100)'], {}), '(100)\n', (4663, 4668), True, 'import numpy as np\n'), ((4713, 4747), 'numpy.array', 'np.array', (['[1, 0.5, 0.25, 0.2, 0.1]'], {}), '([1, 0.5, 0.25, 0.2, 0.1])\n', (4721, 4747), True, 'import numpy as np\n'), ((2997, 3016), 'numpy.array', 'array', (['[3, 2, 1, 4]'], {}), '([3, 2, 1, 4])\n', (3002, 3016), False, 'from numpy import array, nan, inf\n')]
|
# Copyright 2022 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
import re
import pytest
from pex.resolve.path_mappings import PathMapping, PathMappings
from pex.typing import TYPE_CHECKING
if TYPE_CHECKING:
from typing import Tuple
def create_path_mappings(*mappings):
# type: (*Tuple[str, str]) -> PathMappings
return PathMappings(tuple(PathMapping(path=path, name=name) for path, name in mappings))
def test_invalid():
# type: () -> None
with pytest.raises(ValueError, match=re.escape("Mapped paths must be absolute. Given: foo")):
create_path_mappings(("./foo", "A"))
def test_normalize():
# type: () -> None
def check_path(
path_mappings, # type: PathMappings
expected_path, # type: str
):
# type: (...) -> PathMappings
assert 1 == len(path_mappings.mappings)
assert expected_path == path_mappings.mappings[0].path
return path_mappings
assert check_path(create_path_mappings(("/tmp/foo", "A")), "/tmp/foo") == check_path(
create_path_mappings(("/tmp/./foo/", "A")), "/tmp/foo"
)
def test_noop():
# type: () -> None
path_mappings = create_path_mappings(("/tmp/foo", "A"))
assert "foo" == path_mappings.maybe_canonicalize("foo")
assert "/tmp/bar" == path_mappings.maybe_canonicalize("/tmp/bar")
assert "foo" == path_mappings.maybe_reify("foo")
assert "/tmp/foo" == path_mappings.maybe_reify("/tmp/foo")
assert "A" == path_mappings.maybe_reify("A")
assert "$A" == path_mappings.maybe_reify("$A")
def test_canonicalize():
# type: () -> None
path_mappings = create_path_mappings(("/tmp/foo", "A"), ("/tmp/bar/", "B"))
assert "${A}" == path_mappings.maybe_canonicalize("/tmp/foo")
assert "${B}/" == path_mappings.maybe_canonicalize("/tmp/bar/")
assert "${A}/bar" == path_mappings.maybe_canonicalize("/tmp/foo/bar")
assert "file://${A}/bar" == path_mappings.maybe_canonicalize("file:///tmp/foo/bar")
assert "baz @ file://${B}/baz" == path_mappings.maybe_canonicalize("baz @ file:///tmp/bar/baz")
def test_reify():
# type: () -> None
path_mappings = create_path_mappings(("/tmp/foo", "A"), ("/tmp/bar/", "B"))
assert "/tmp/foo" == path_mappings.maybe_reify("${A}")
assert "/tmp/bar/" == path_mappings.maybe_reify("${B}/")
assert "/tmp/foo/bar" == path_mappings.maybe_reify("${A}/bar")
assert "file:///tmp/foo/bar" == path_mappings.maybe_reify("file://${A}/bar")
assert "baz @ file:///tmp/bar/baz" == path_mappings.maybe_reify("baz @ file://${B}/baz")
|
[
"re.escape",
"pex.resolve.path_mappings.PathMapping"
] |
[((423, 456), 'pex.resolve.path_mappings.PathMapping', 'PathMapping', ([], {'path': 'path', 'name': 'name'}), '(path=path, name=name)\n', (434, 456), False, 'from pex.resolve.path_mappings import PathMapping, PathMappings\n'), ((573, 627), 're.escape', 're.escape', (['"""Mapped paths must be absolute. Given: foo"""'], {}), "('Mapped paths must be absolute. Given: foo')\n", (582, 627), False, 'import re\n')]
|
from django.contrib import admin
from django.forms import TextInput, ModelForm
from suit.admin import SortableModelAdmin
from .models import MarqueeMessage
class MarqueeMessageForm(ModelForm):
class Meta:
widgets = {
'message': TextInput(attrs={'class': 'input-xxlarge'}),
}
class MarqueeMessageAdmin(SortableModelAdmin):
form = MarqueeMessageForm
sortable = 'order'
list_editable = ('display', 'order')
list_display = ('message', 'display', 'order')
admin.site.register(MarqueeMessage, MarqueeMessageAdmin)
|
[
"django.forms.TextInput",
"django.contrib.admin.site.register"
] |
[((505, 561), 'django.contrib.admin.site.register', 'admin.site.register', (['MarqueeMessage', 'MarqueeMessageAdmin'], {}), '(MarqueeMessage, MarqueeMessageAdmin)\n', (524, 561), False, 'from django.contrib import admin\n'), ((254, 297), 'django.forms.TextInput', 'TextInput', ([], {'attrs': "{'class': 'input-xxlarge'}"}), "(attrs={'class': 'input-xxlarge'})\n", (263, 297), False, 'from django.forms import TextInput, ModelForm\n')]
|
# coding=utf-8
from pypint.integrators.node_providers.gauss_legendre_nodes import GaussLegendreNodes
import unittest
from nose.tools import *
import numpy as np
test_num_nodes = range(2, 7)
def manual_initialization(n_nodes):
nodes = GaussLegendreNodes()
nodes.init(n_nodes)
assert_equal(nodes.num_nodes, n_nodes,
"Number of nodes should be set")
assert_is_instance(nodes.nodes, np.ndarray,
"Nodes should be a numpy.ndarray")
assert_equal(nodes.nodes.size, n_nodes,
"There should be correct number of nodes")
def test_manual_initialization():
for n_nodes in test_num_nodes:
yield manual_initialization, n_nodes
class GaussLegendreNodesTest(unittest.TestCase):
def setUp(self):
self._test_obj = GaussLegendreNodes()
def test_default_initialization(self):
self.assertIsNone(self._test_obj.num_nodes,
"Number of nodes should be initialized as 'None'")
self.assertIsNone(self._test_obj.nodes,
"Nodes list should be initializes as 'None'")
def test_correctness_of_selected_nodes(self):
self._test_obj.init(1)
self.assertAlmostEqual(self._test_obj.nodes[0], 0.0)
self.setUp()
self._test_obj.init(2)
self.assertAlmostEqual(self._test_obj.nodes[0], -np.sqrt(1.0 / 3.0))
self.assertAlmostEqual(self._test_obj.nodes[1], np.sqrt(1.0 / 3.0))
self.setUp()
self._test_obj.init(5)
self.assertAlmostEqual(self._test_obj.nodes[0], -1.0 / 3.0 * np.sqrt(5.0 + 2.0 * np.sqrt(10.0 / 7.0)))
self.assertAlmostEqual(self._test_obj.nodes[1], -1.0 / 3.0 * np.sqrt(5.0 - 2.0 * np.sqrt(10.0 / 7.0)))
self.assertAlmostEqual(self._test_obj.nodes[2], 0.0)
self.assertAlmostEqual(self._test_obj.nodes[3], 1.0 / 3.0 * np.sqrt(5.0 - 2.0 * np.sqrt(10.0 / 7.0)))
self.assertAlmostEqual(self._test_obj.nodes[4], 1.0 / 3.0 * np.sqrt(5 + 2 * np.sqrt(10.0 / 7.0)))
|
[
"pypint.integrators.node_providers.gauss_legendre_nodes.GaussLegendreNodes",
"numpy.sqrt"
] |
[((242, 262), 'pypint.integrators.node_providers.gauss_legendre_nodes.GaussLegendreNodes', 'GaussLegendreNodes', ([], {}), '()\n', (260, 262), False, 'from pypint.integrators.node_providers.gauss_legendre_nodes import GaussLegendreNodes\n'), ((803, 823), 'pypint.integrators.node_providers.gauss_legendre_nodes.GaussLegendreNodes', 'GaussLegendreNodes', ([], {}), '()\n', (821, 823), False, 'from pypint.integrators.node_providers.gauss_legendre_nodes import GaussLegendreNodes\n'), ((1446, 1464), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (1453, 1464), True, 'import numpy as np\n'), ((1370, 1388), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (1377, 1388), True, 'import numpy as np\n'), ((1608, 1627), 'numpy.sqrt', 'np.sqrt', (['(10.0 / 7.0)'], {}), '(10.0 / 7.0)\n', (1615, 1627), True, 'import numpy as np\n'), ((1719, 1738), 'numpy.sqrt', 'np.sqrt', (['(10.0 / 7.0)'], {}), '(10.0 / 7.0)\n', (1726, 1738), True, 'import numpy as np\n'), ((1890, 1909), 'numpy.sqrt', 'np.sqrt', (['(10.0 / 7.0)'], {}), '(10.0 / 7.0)\n', (1897, 1909), True, 'import numpy as np\n'), ((1996, 2015), 'numpy.sqrt', 'np.sqrt', (['(10.0 / 7.0)'], {}), '(10.0 / 7.0)\n', (2003, 2015), True, 'import numpy as np\n')]
|
from torch.utils.data import Dataset
from tqdm import tqdm
from pathlib import Path
def read_text(text_file):
with open(text_file, 'r', encoding='utf-8') as out:
return out.readlines()[0].strip()
class MnMAudioDataset(Dataset):
def __init__(self, path, manifest_csv_file, tokenizer, data_transformer, bucket_size, path_from_home=True):
if path_from_home:
main_path = Path.home()
else:
main_path = Path(".")
corpus_path = main_path.joinpath(path)
manifest_csv_path = corpus_path.joinpath(manifest_csv_file)
self.file_text_pair = []
self.data_transformer = data_transformer
self.tokenizer = tokenizer
self.bucket_size = bucket_size
with open(manifest_csv_path, 'r', encoding='utf-8') as mp:
for x in tqdm(mp):
str_vals = x.strip().split(",")
# Preprocess the text
text = read_text(str_vals[-1])
text = self.data_transformer(text)
text = self.tokenizer.encode(text)
self.file_text_pair.append((str_vals[0], text))
def __len__(self):
return len(self.file_text_pair)
def __getitem__(self, index):
if self.bucket_size > 1:
# Return a bucket
index = min(len(self) - self.bucket_size, index)
return self.file_text_pair[index:index+self.bucket_size]
# Return a single sample
return self.file_text_pair[index]
class MnMAudioTextDataset(Dataset):
def __init__(self, path, manifest_csv_file, tokenizer, data_transformer, bucket_size, path_from_home=True):
if path_from_home:
main_path = Path.home()
else:
main_path = Path(".")
corpus_path = main_path.joinpath(path)
manifest_csv_path = corpus_path.joinpath(manifest_csv_file)
self.texts = []
self.data_transformer = data_transformer
self.tokenizer = tokenizer
self.bucket_size = bucket_size
with open(manifest_csv_path, 'r', encoding='utf-8') as mp:
for x in tqdm(mp):
str_vals = x.strip().split(",")
# Preprocess the text
text = read_text(str_vals[-1])
text = self.data_transformer(text)
text = self.tokenizer.encode(text)
self.texts.append(text)
def __len__(self):
return len(self.texts)
def __getitem__(self, index):
if self.bucket_size > 1:
# Return a bucket
index = min(len(self) - self.bucket_size, index)
return self.texts[index:index+self.bucket_size]
# Return a single sample
return self.texts[index]
|
[
"tqdm.tqdm",
"pathlib.Path",
"pathlib.Path.home"
] |
[((411, 422), 'pathlib.Path.home', 'Path.home', ([], {}), '()\n', (420, 422), False, 'from pathlib import Path\n'), ((461, 470), 'pathlib.Path', 'Path', (['"""."""'], {}), "('.')\n", (465, 470), False, 'from pathlib import Path\n'), ((841, 849), 'tqdm.tqdm', 'tqdm', (['mp'], {}), '(mp)\n', (845, 849), False, 'from tqdm import tqdm\n'), ((1788, 1799), 'pathlib.Path.home', 'Path.home', ([], {}), '()\n', (1797, 1799), False, 'from pathlib import Path\n'), ((1838, 1847), 'pathlib.Path', 'Path', (['"""."""'], {}), "('.')\n", (1842, 1847), False, 'from pathlib import Path\n'), ((2209, 2217), 'tqdm.tqdm', 'tqdm', (['mp'], {}), '(mp)\n', (2213, 2217), False, 'from tqdm import tqdm\n')]
|
import webapp2
import json
from models.user import User
class setCityHandler(webapp2.RequestHandler):
def get(self):
user = User.checkUser()
if not user:
return
city = self.request.get('city')
city = int(city)
if city:
update = User.setCity(user.email,city)
self.response.write(json.dumps({'status':'ok'}))
else:
self.response.write(json.dumps({'status':'error'}))
app = webapp2.WSGIApplication([
('/setCity', setCityHandler)
], debug=True)
|
[
"models.user.User.setCity",
"models.user.User.checkUser",
"json.dumps",
"webapp2.WSGIApplication"
] |
[((415, 482), 'webapp2.WSGIApplication', 'webapp2.WSGIApplication', (["[('/setCity', setCityHandler)]"], {'debug': '(True)'}), "([('/setCity', setCityHandler)], debug=True)\n", (438, 482), False, 'import webapp2\n'), ((128, 144), 'models.user.User.checkUser', 'User.checkUser', ([], {}), '()\n', (142, 144), False, 'from models.user import User\n'), ((252, 282), 'models.user.User.setCity', 'User.setCity', (['user.email', 'city'], {}), '(user.email, city)\n', (264, 282), False, 'from models.user import User\n'), ((313, 341), 'json.dumps', 'json.dumps', (["{'status': 'ok'}"], {}), "({'status': 'ok'})\n", (323, 341), False, 'import json\n'), ((373, 404), 'json.dumps', 'json.dumps', (["{'status': 'error'}"], {}), "({'status': 'error'})\n", (383, 404), False, 'import json\n')]
|
import re # to search the tags
import html # to analyze the html &...
import imghdr # check for the img type
import atexit
import requests # main module to get the web source
import zipfile # archive all files into a zip file
from threading import Thread
from collections import deque # use thread safe sequence
from rdstr import randstr
from headers import HEADER
ses = requests.session()
ses.headers.update(HEADER)
HOST = 'https://cn.bing.com'
search = '动漫壁纸'
count = 1000
url = 'https://cn.bing.com/images/async?q=%(topic)s&first=%(page)d&count=%(count)d&relp=35&scenario=ImageBasicHover&datsrc=N_I&layout=R'
tag_a = re.compile('<a class="iusc" .*?>')
murl = re.compile('"murl":"(.*?)"')
page = 200
startn = 200 * 35
All = deque()
def main():
thrs = deque()
for p in range(page):
p = startn + p * 35
iurl = url % dict(topic = search, page = p, count = 35)
print(iurl)
#try:
# get(iurl)
#except Exception as e:
# print(e)
thrs.append(Thread(None, get, iurl, (iurl, thrs, len(thrs))))
for t in thrs:
t.setDaemon(1)
for t in thrs:
t.start()
for t in thrs.copy():
t.join()
print(t,'end')
def last_write():
print('zipping')
if not All:
return
with zipfile.ZipFile('all.zip', 'a') as zf:
for f in All:
try:
zf.write(f)
except FileNotFoundError:
pass
atexit.register(last_write)
def get(ur, thrs, _i):
this = thrs[_i]
r = ses.get(ur)
as_ = tag_a.findall(r.text)
print(len(as_))
n = 0
for a in as_:
text = html.unescape(a)
# analyze the html
try:
s = murl.search(text).group(1)
except Exception:
#print('no murl')
continue
fn = randstr(s)
if fn is None:
#print('exists')
continue
try:
b_img = ses.get(s).content
except Exception:
#print('pass', s)
continue
n += 1
ext = imghdr.what(None, b_img)
if ext is None:
ext = 'jpeg'
fn = "%s.%s" % (fn, ext)
with open(fn, 'wb') as f:
f.write(b_img)
print('get', fn, 'at', s)
All.append(fn)
if n : print('get', n ,'pics')
thrs.remove(this)
print(this, 'finish tasks', 'rest', len(thrs), 'works')
if __name__ == '__main__':
main()
|
[
"requests.session",
"atexit.register",
"rdstr.randstr",
"html.unescape",
"zipfile.ZipFile",
"imghdr.what",
"collections.deque",
"re.compile"
] |
[((374, 392), 'requests.session', 'requests.session', ([], {}), '()\n', (390, 392), False, 'import requests\n'), ((629, 663), 're.compile', 're.compile', (['"""<a class="iusc" .*?>"""'], {}), '(\'<a class="iusc" .*?>\')\n', (639, 663), False, 'import re\n'), ((671, 699), 're.compile', 're.compile', (['""""murl":"(.*?)\\""""'], {}), '(\'"murl":"(.*?)"\')\n', (681, 699), False, 'import re\n'), ((738, 745), 'collections.deque', 'deque', ([], {}), '()\n', (743, 745), False, 'from collections import deque\n'), ((1483, 1510), 'atexit.register', 'atexit.register', (['last_write'], {}), '(last_write)\n', (1498, 1510), False, 'import atexit\n'), ((770, 777), 'collections.deque', 'deque', ([], {}), '()\n', (775, 777), False, 'from collections import deque\n'), ((1317, 1348), 'zipfile.ZipFile', 'zipfile.ZipFile', (['"""all.zip"""', '"""a"""'], {}), "('all.zip', 'a')\n", (1332, 1348), False, 'import zipfile\n'), ((1672, 1688), 'html.unescape', 'html.unescape', (['a'], {}), '(a)\n', (1685, 1688), False, 'import html\n'), ((1872, 1882), 'rdstr.randstr', 'randstr', (['s'], {}), '(s)\n', (1879, 1882), False, 'from rdstr import randstr\n'), ((2116, 2140), 'imghdr.what', 'imghdr.what', (['None', 'b_img'], {}), '(None, b_img)\n', (2127, 2140), False, 'import imghdr\n')]
|
#!/usr/bin/env python
from .greengraph import GreenGraph
from .googlemap import GoogleMap
from argparse import ArgumentParser
from IPython.display import Image
from IPython.display import display
if __name__ == "__main__":
parser = ArgumentParser(description = 'Generate pictures between 2 location')
parser.add_argument('-f', '--from', required=True, help='Starting location', dest='start')
parser.add_argument('-t', '--to', required=True, help='Ending location', dest='end')
parser.add_argument('-s', '--steps', help='Number of steps', type=int, dest='steps', nargs='?', default=10)
parser.add_argument('-gb', '--greenbetween', help='Count green between', dest='greenbetween', action="store_true")
parser.add_argument('-o', '--out', help='Output filename', type=str, dest='filename')
args = parser.parse_args()
my_data = GreenGraph(args.start, args.end)
if args.greenbetween:
print(my_data.green_between(args.steps))
if args.filename:
imgs = (GoogleMap(*location)
for location in GreenGraph.location_sequence(GreenGraph.geolocate('london'),
GreenGraph.geolocate('oxford'), steps=args.steps))
for position, img in enumerate(imgs):
with open(args.filename+'_{0}.png'.format(position+1), 'wb') as png:
png.write(img.image)
|
[
"argparse.ArgumentParser"
] |
[((235, 301), 'argparse.ArgumentParser', 'ArgumentParser', ([], {'description': '"""Generate pictures between 2 location"""'}), "(description='Generate pictures between 2 location')\n", (249, 301), False, 'from argparse import ArgumentParser\n')]
|
import os
import sys
import unittest
import launch_testing.asserts
sys.path.append(os.path.dirname(__file__))
from move_group_launch_test_common import generate_move_group_test_description
def generate_test_description():
return generate_move_group_test_description(gtest_name='move_group_ompl_constraints_test')
class TestGTestProcessActive(unittest.TestCase):
def test_gtest_run_complete(self, proc_info, ompl_constraint_test, run_move_group_node, static_tf, robot_state_publisher, fake_joint_driver_node, mongodb_server_node):
proc_info.assertWaitForShutdown(ompl_constraint_test, timeout=4000.0)
@launch_testing.post_shutdown_test()
class TestGTestProcessPostShutdown(unittest.TestCase):
def test_gtest_pass(self, proc_info, ompl_constraint_test, run_move_group_node, static_tf, robot_state_publisher, fake_joint_driver_node, mongodb_server_node):
launch_testing.asserts.assertExitCodes(proc_info, process=ompl_constraint_test)
|
[
"os.path.dirname",
"move_group_launch_test_common.generate_move_group_test_description"
] |
[((84, 109), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (99, 109), False, 'import os\n'), ((235, 323), 'move_group_launch_test_common.generate_move_group_test_description', 'generate_move_group_test_description', ([], {'gtest_name': '"""move_group_ompl_constraints_test"""'}), "(gtest_name=\n 'move_group_ompl_constraints_test')\n", (271, 323), False, 'from move_group_launch_test_common import generate_move_group_test_description\n')]
|
# -*- coding: utf-8 -*-
# Created by: ZhaoDongshuang
# Created on: 18-2-7
""" 定义 learning_logs 的 URL 模式 """
from django.conf.urls import url
from . import views
app_name = 'learning_logs'
urlpatterns = [
# 主页
url(r'^$', views.index, name='index'),
# 显示所有的主题
url(r'^topics/$', views.topics, name='topics'),
# 特定主题的详细页面
url(r'^topics/(?P<topic_id>\d+)/$', views.topic, name='topic'),
]
|
[
"django.conf.urls.url"
] |
[((220, 256), 'django.conf.urls.url', 'url', (['"""^$"""', 'views.index'], {'name': '"""index"""'}), "('^$', views.index, name='index')\n", (223, 256), False, 'from django.conf.urls import url\n'), ((277, 322), 'django.conf.urls.url', 'url', (['"""^topics/$"""', 'views.topics'], {'name': '"""topics"""'}), "('^topics/$', views.topics, name='topics')\n", (280, 322), False, 'from django.conf.urls import url\n'), ((345, 407), 'django.conf.urls.url', 'url', (['"""^topics/(?P<topic_id>\\\\d+)/$"""', 'views.topic'], {'name': '"""topic"""'}), "('^topics/(?P<topic_id>\\\\d+)/$', views.topic, name='topic')\n", (348, 407), False, 'from django.conf.urls import url\n')]
|
from collections import defaultdict
from io import StringIO
from random import choice
from packaging.version import Version
from pysvc import errors as svc_errors
from pysvc.unified.client import connect
from pysvc.unified.response import CLIFailureError, SVCResponse
from retry import retry
import controller.array_action.config as config
import controller.array_action.errors as array_errors
import controller.controller_server.config as controller_config
from controller.array_action.array_action_types import Volume, Snapshot, Replication, Host
from controller.array_action.array_mediator_abstract import ArrayMediatorAbstract
from controller.array_action.utils import ClassProperty, convert_scsi_id_to_nguid
from controller.common import settings
from controller.common.csi_logger import get_stdout_logger
array_connections_dict = {}
logger = get_stdout_logger()
OBJ_NOT_FOUND = 'CMMVC5753E'
SNAPSHOT_NOT_EXIST = 'CMMVC9755E'
NAME_NOT_EXIST_OR_MEET_RULES = 'CMMVC5754E'
NON_ASCII_CHARS = 'CMMVC6017E'
INVALID_NAME = 'CMMVC6527E'
TOO_MANY_CHARS = 'CMMVC5738E'
VALUE_TOO_LONG = 'CMMVC5703E'
INVALID_FILTER_VALUE = 'CMMVC5741E'
SPECIFIED_OBJ_NOT_EXIST = 'CMMVC5804E'
LUN_ALREADY_IN_USE = 'CMMVC5879E'
VOL_ALREADY_UNMAPPED = 'CMMVC5842E'
OBJ_ALREADY_EXIST = 'CMMVC6035E'
FCMAP_ALREADY_EXIST = 'CMMVC6466E'
FCMAP_ALREADY_COPYING = 'CMMVC5907E'
FCMAP_ALREADY_IN_THE_STOPPED_STATE = 'CMMVC5912E'
VOL_NOT_FOUND = 'CMMVC8957E'
POOL_NOT_MATCH_VOL_SPACE_EFFICIENCY = 'CMMVC9292E'
NOT_CHILD_POOL = 'CMMVC9760E'
NOT_REDUCTION_POOL = 'CMMVC9301E'
NOT_ENOUGH_EXTENTS_IN_POOL_EXPAND = 'CMMVC5860E'
NOT_ENOUGH_EXTENTS_IN_POOL_CREATE = 'CMMVC8710E'
HOST_NQN = 'nqn'
HOST_WWPN = 'WWPN'
HOST_ISCSI_NAME = 'iscsi_name'
HOST_PORTSET_ID = 'portset_id'
LIST_HOSTS_CMD_FORMAT = 'lshost {HOST_ID};echo;'
HOSTS_LIST_ERR_MSG_MAX_LENGTH = 300
LUN_INTERVAL = 128
FCMAP_STATUS_DONE = 'idle_or_copied'
RCRELATIONSHIP_STATE_IDLE = 'idling'
RCRELATIONSHIP_STATE_READY = 'consistent_synchronized'
YES = 'yes'
ENDPOINT_TYPE_SOURCE = 'source'
ENDPOINT_TYPE_TARGET = 'target'
ENDPOINT_TYPE_MASTER = 'master'
ENDPOINT_TYPE_AUX = 'aux'
def is_warning_message(exception):
""" Return True if the exception message is warning """
info_seperated_by_quotation = str(exception).split('"')
message = info_seperated_by_quotation[1]
word_in_message = message.split()
message_tag = word_in_message[0]
if message_tag[-1] == 'W':
return True
return False
def _get_space_efficiency_kwargs(space_efficiency):
if space_efficiency:
space_efficiency = space_efficiency.lower()
if space_efficiency == config.SPACE_EFFICIENCY_THIN:
return {'thin': True}
if space_efficiency == config.SPACE_EFFICIENCY_COMPRESSED:
return {'compressed': True}
if space_efficiency == config.SPACE_EFFICIENCY_DEDUPLICATED_THIN:
return {'deduplicated': True, 'thin': True}
if space_efficiency in (config.SPACE_EFFICIENCY_DEDUPLICATED,
config.SPACE_EFFICIENCY_DEDUPLICATED_COMPRESSED):
return {'deduplicated': True, 'compressed': True}
return {}
def build_kwargs_from_parameters(space_efficiency, pool_name, io_group,
volume_group, volume_name, volume_size):
cli_kwargs = {}
cli_kwargs.update({
'name': volume_name,
'unit': 'b',
'size': volume_size,
'pool': pool_name
})
space_efficiency_kwargs = _get_space_efficiency_kwargs(space_efficiency)
cli_kwargs.update(space_efficiency_kwargs)
if io_group:
cli_kwargs['iogrp'] = io_group
if volume_group:
cli_kwargs['volumegroup'] = volume_group
return cli_kwargs
def build_create_replication_kwargs(master_cli_volume_id, aux_cli_volume_id, other_system_id, copy_type):
cli_kwargs = {
'master': master_cli_volume_id,
'aux': aux_cli_volume_id,
'cluster': other_system_id,
}
if copy_type == config.REPLICATION_COPY_TYPE_ASYNC:
cli_kwargs.update({'global': True})
return cli_kwargs
def build_start_replication_kwargs(rcrelationship_id, primary_endpoint_type, force):
cli_kwargs = {'object_id': rcrelationship_id}
if primary_endpoint_type:
cli_kwargs.update({'primary': primary_endpoint_type})
if force:
cli_kwargs.update({'force': True})
return cli_kwargs
def build_stop_replication_kwargs(rcrelationship_id, add_access):
cli_kwargs = {'object_id': rcrelationship_id}
if add_access:
cli_kwargs.update({'access': True})
return cli_kwargs
def _get_cli_volume_space_efficiency(cli_volume):
space_efficiency = config.SPACE_EFFICIENCY_THICK
if cli_volume.se_copy == YES:
space_efficiency = config.SPACE_EFFICIENCY_THIN
if cli_volume.compressed_copy == YES:
space_efficiency = config.SPACE_EFFICIENCY_COMPRESSED
if hasattr(cli_volume, "deduplicated_copy"):
if cli_volume.deduplicated_copy == YES:
if cli_volume.se_copy == YES:
space_efficiency = config.SPACE_EFFICIENCY_DEDUPLICATED_THIN
else:
space_efficiency = config.SPACE_EFFICIENCY_DEDUPLICATED_COMPRESSED
return space_efficiency
class SVCArrayMediator(ArrayMediatorAbstract):
ARRAY_ACTIONS = {}
BLOCK_SIZE_IN_BYTES = 512
MAX_LUN_NUMBER = 511
MIN_LUN_NUMBER = 0
MIN_SUPPORTED_VERSION = '7.8'
@ClassProperty
def array_type(self):
return settings.ARRAY_TYPE_SVC
@ClassProperty
def port(self):
return 22
@ClassProperty
def max_object_name_length(self):
return 63
@ClassProperty
def max_object_prefix_length(self):
return 20
@ClassProperty
def max_connections(self):
return 2
@ClassProperty
def minimal_volume_size_in_bytes(self):
return 512 # 512 Bytes
@ClassProperty
def maximal_volume_size_in_bytes(self):
return 256 * 1024 * 1024 * 1024 * 1024
@ClassProperty
def max_lun_retries(self):
return 10
@ClassProperty
def default_object_prefix(self):
return "CSI"
def __init__(self, user, password, endpoint):
super().__init__(user, password, endpoint)
self.client = None
# SVC only accept one IP address
if len(endpoint) == 0 or len(endpoint) > 1:
logger.error("SVC only support one cluster IP")
raise array_errors.StorageManagementIPsNotSupportError(
endpoint)
self.endpoint = self.endpoint[0]
self._cluster = None
logger.debug("in init")
self._connect()
def _connect(self):
logger.debug("Connecting to SVC {0}".format(self.endpoint))
try:
self.client = connect(self.endpoint, username=self.user,
password=self.password)
if Version(self._code_level) < Version(self.MIN_SUPPORTED_VERSION):
raise array_errors.UnsupportedStorageVersionError(
self._code_level, self.MIN_SUPPORTED_VERSION
)
except (svc_errors.IncorrectCredentials,
svc_errors.StorageArrayClientException):
raise array_errors.CredentialsError(self.endpoint)
def disconnect(self):
if self.client:
self.client.close()
@property
def _system_info(self):
if self._cluster is None:
for cluster in self.client.svcinfo.lssystem():
if cluster.location == 'local':
self._cluster = cluster
return self._cluster
@property
def _code_level(self):
return self._system_info.code_level.split(None, 1)[0]
@property
def identifier(self):
return self._system_info.id_alias
def is_active(self):
return self.client.transport.transport.get_transport().is_active()
def _generate_volume_response(self, cli_volume):
pool = self._get_volume_pool(cli_volume)
source_volume_wwn = self._get_source_volume_wwn_if_exists(cli_volume)
space_efficiency = _get_cli_volume_space_efficiency(cli_volume)
return Volume(
capacity_bytes=int(cli_volume.capacity),
id=cli_volume.vdisk_UID,
internal_id=cli_volume.id,
name=cli_volume.name,
array_address=self.endpoint,
pool=pool,
source_id=source_volume_wwn,
array_type=self.array_type,
space_efficiency=space_efficiency,
default_space_efficiency=config.SPACE_EFFICIENCY_THICK
)
def _generate_snapshot_response_from_cli_volume(self, cli_volume, source_id):
return self._generate_snapshot_response(cli_volume.capacity, cli_volume.name, source_id, cli_volume.id,
cli_volume.vdisk_UID)
def _generate_snapshot_response_from_cli_snapshot(self, cli_snapshot, source_cli_volume):
return self._generate_snapshot_response(source_cli_volume.capacity, cli_snapshot.snapshot_name,
source_cli_volume.vdisk_UID, cli_snapshot.snapshot_id)
def _generate_snapshot_response(self, capacity, name, source_id, internal_id, vdisk_uid=''):
return Snapshot(
capacity_bytes=int(capacity),
name=name,
source_id=source_id,
internal_id=internal_id,
id=vdisk_uid,
array_address=self.endpoint,
is_ready=True,
array_type=self.array_type
)
def _generate_snapshot_response_with_verification(self, cli_object):
if not cli_object.FC_id:
logger.error("FlashCopy Mapping not found for target volume: {}".format(cli_object.name))
raise array_errors.ExpectedSnapshotButFoundVolumeError(cli_object.name, self.endpoint)
fcmap = self._get_fcmap_as_target_if_exists(cli_object.name)
if fcmap is None or fcmap.copy_rate != '0':
raise array_errors.ExpectedSnapshotButFoundVolumeError(cli_object.name, self.endpoint)
source_id = self._get_wwn_by_volume_name_if_exists(fcmap.source_vdisk_name)
return self._generate_snapshot_response_from_cli_volume(cli_object, source_id)
def _lsvdisk(self, volume_name, not_exist_err):
try:
return self.client.svcinfo.lsvdisk(bytes=True, object_id=volume_name).as_single_element
except (svc_errors.CommandExecutionError, CLIFailureError) as ex:
if (OBJ_NOT_FOUND in ex.my_message or
NAME_NOT_EXIST_OR_MEET_RULES in ex.my_message):
logger.info("volume not found")
if not_exist_err:
raise array_errors.ObjectNotFoundError(volume_name)
elif any(msg_id in ex.my_message for msg_id in (NON_ASCII_CHARS, VALUE_TOO_LONG)):
raise array_errors.IllegalObjectName(ex.my_message)
else:
raise ex
return None
def _get_cli_volume(self, volume_name, not_exist_err=True):
cli_volume = self._lsvdisk(volume_name, not_exist_err)
if not cli_volume and not_exist_err:
raise array_errors.ObjectNotFoundError(volume_name)
return cli_volume
def _get_cli_volume_if_exists(self, volume_name):
cli_volume = self._get_cli_volume(volume_name, not_exist_err=False)
logger.debug("cli volume returned : {}".format(cli_volume))
return cli_volume
def _get_fcmap_as_target_if_exists(self, volume_name):
fcmaps_as_target = self._get_fcmaps(volume_name, ENDPOINT_TYPE_TARGET)
if len(fcmaps_as_target) != 1:
return None
return fcmaps_as_target[0]
def _get_fcmaps_as_source_if_exist(self, volume_name):
return self._get_fcmaps(volume_name, ENDPOINT_TYPE_SOURCE)
def _get_source_volume_wwn_if_exists(self, target_cli_object):
fcmap = self._get_fcmap_as_target_if_exists(target_cli_object.name)
if not fcmap:
return None
if self._is_in_remote_copy_relationship(fcmap):
return None
source_volume_name = fcmap.source_vdisk_name
return self._get_wwn_by_volume_name_if_exists(source_volume_name)
def _get_volume_pools(self, cli_volume):
pool = cli_volume.mdisk_grp_name
if isinstance(pool, list):
pool_names = pool[:]
pool_names.remove('many')
return pool_names
return [pool]
def _get_volume_pool(self, cli_volume):
pools = self._get_volume_pools(cli_volume)
return ':'.join(pools)
def get_volume(self, name, pool, flashcopy_2):
cli_volume = self._get_cli_volume(name)
return self._generate_volume_response(cli_volume)
def _get_object_fcmaps(self, object_name):
all_fcmaps = []
fcmap_as_target = self._get_fcmap_as_target_if_exists(object_name)
if fcmap_as_target:
all_fcmaps.append(fcmap_as_target)
all_fcmaps.extend(self._get_fcmaps_as_source_if_exist(object_name))
return all_fcmaps
def _expand_cli_volume(self, cli_volume, increase_in_bytes, is_hyperswap):
volume_name = cli_volume.name
try:
if is_hyperswap:
self.client.svctask.expandvolume(object_id=volume_name, unit='b', size=increase_in_bytes)
else:
self.client.svctask.expandvdisksize(vdisk_id=volume_name, unit='b', size=increase_in_bytes)
except (svc_errors.CommandExecutionError, CLIFailureError) as ex:
if is_warning_message(ex.my_message):
logger.warning("exception encountered during volume expansion of {}: {}".format(volume_name,
ex.my_message))
else:
logger.error("Failed to expand volume {}".format(volume_name))
if OBJ_NOT_FOUND in ex.my_message or VOL_NOT_FOUND in ex.my_message:
raise array_errors.ObjectNotFoundError(volume_name)
if NOT_ENOUGH_EXTENTS_IN_POOL_EXPAND in ex.my_message:
raise array_errors.NotEnoughSpaceInPool(id_or_name=cli_volume.mdisk_grp_name)
raise ex
def expand_volume(self, volume_id, required_bytes):
logger.info("Expanding volume with id : {0} to {1} bytes".format(volume_id, required_bytes))
cli_volume = self._get_cli_volume_by_wwn(volume_id, not_exist_err=True)
volume_name = cli_volume.name
fcmaps = self._get_object_fcmaps(volume_name)
self._safe_delete_fcmaps(volume_name, fcmaps)
is_hyperswap = any(self._is_in_remote_copy_relationship(fcmap) for fcmap in fcmaps)
current_size = int(cli_volume.capacity)
final_size = self._convert_size_bytes(required_bytes)
increase_in_bytes = final_size - current_size
self._expand_cli_volume(cli_volume, increase_in_bytes, is_hyperswap)
logger.info(
"Finished volume expansion. id : {0}. volume increased by {1} bytes".format(volume_id, increase_in_bytes))
def _get_fcmaps(self, volume_name, endpoint_type):
"""
Args:
endpoint_type : 'source' or 'target'
"""
filter_value = '{0}_vdisk_name={1}'.format(endpoint_type, volume_name)
return self.client.svcinfo.lsfcmap(filtervalue=filter_value).as_list
def validate_supported_space_efficiency(self, space_efficiency):
logger.debug("validate_supported_space_efficiency for "
"space efficiency : {0}".format(space_efficiency))
if (space_efficiency and space_efficiency.lower() not in
[config.SPACE_EFFICIENCY_THIN, config.SPACE_EFFICIENCY_THICK,
config.SPACE_EFFICIENCY_COMPRESSED,
config.SPACE_EFFICIENCY_DEDUPLICATED,
config.SPACE_EFFICIENCY_DEDUPLICATED_THIN,
config.SPACE_EFFICIENCY_DEDUPLICATED_COMPRESSED]):
logger.error("space efficiency value is not "
"supported {0}".format(space_efficiency))
raise array_errors.SpaceEfficiencyNotSupported(
space_efficiency)
logger.info("Finished validate_supported_space_efficiency")
def _convert_size_bytes(self, size_in_bytes):
# SVC volume size must be the multiple of 512 bytes
ret = size_in_bytes % self.BLOCK_SIZE_IN_BYTES
if ret > 0:
return size_in_bytes - ret + 512
return size_in_bytes
def _get_wwn_by_volume_name_if_exists(self, volume_name):
cli_volume = self._get_cli_volume_if_exists(volume_name)
if not cli_volume:
return None
wwn = cli_volume.vdisk_UID
logger.debug("found wwn : {0}".format(wwn))
return wwn
def _lsvdisk_by_uid(self, vdisk_uid):
filter_value = 'vdisk_UID=' + vdisk_uid
try:
return self.client.svcinfo.lsvdisk(bytes=True, filtervalue=filter_value).as_single_element
except (svc_errors.CommandExecutionError, CLIFailureError) as ex:
if any(msg_id in ex.my_message for msg_id in (NON_ASCII_CHARS, INVALID_FILTER_VALUE)):
raise array_errors.IllegalObjectID(ex.my_message)
raise ex
def _get_cli_volume_by_wwn(self, volume_id, not_exist_err=False):
cli_volume = self._lsvdisk_by_uid(volume_id)
if not cli_volume:
volume_nguid = convert_scsi_id_to_nguid(volume_id)
cli_volume = self._lsvdisk_by_uid(volume_nguid)
if not cli_volume and not_exist_err:
raise array_errors.ObjectNotFoundError(volume_id)
return cli_volume
def _get_volume_name_by_wwn_if_exists(self, volume_id):
cli_volume = self._get_cli_volume_by_wwn(volume_id)
if not cli_volume:
return None
vol_name = cli_volume.name
logger.debug("found volume name : {0}".format(vol_name))
return vol_name
def _get_volume_name_by_wwn(self, volume_id):
vol_name = self._get_volume_name_by_wwn_if_exists(volume_id)
if not vol_name:
raise array_errors.ObjectNotFoundError(volume_id)
return vol_name
def _create_cli_volume(self, name, size_in_bytes, space_efficiency, pool, io_group, volume_group=None):
logger.info("creating volume with name : {}. size : {} . in pool : {} with parameters : {}".format(
name, size_in_bytes, pool, space_efficiency))
try:
size = self._convert_size_bytes(size_in_bytes)
cli_kwargs = build_kwargs_from_parameters(space_efficiency, pool, io_group,
volume_group, name, size)
self.client.svctask.mkvolume(**cli_kwargs)
except (svc_errors.CommandExecutionError, CLIFailureError) as ex:
if is_warning_message(ex.my_message):
logger.warning("exception encountered during creation of volume {0}: {1}".format(name,
ex.my_message))
else:
logger.error("Cannot create volume {0}, Reason is: {1}".format(name, ex))
if OBJ_ALREADY_EXIST in ex.my_message:
raise array_errors.VolumeAlreadyExists(name, self.endpoint)
if NAME_NOT_EXIST_OR_MEET_RULES in ex.my_message:
raise array_errors.InvalidArgumentError(ex.my_message)
if POOL_NOT_MATCH_VOL_SPACE_EFFICIENCY in ex.my_message or NOT_REDUCTION_POOL in ex.my_message:
raise array_errors.PoolDoesNotMatchSpaceEfficiency(pool, space_efficiency, ex)
if NOT_ENOUGH_EXTENTS_IN_POOL_CREATE in ex.my_message:
raise array_errors.NotEnoughSpaceInPool(id_or_name=pool)
if any(msg_id in ex.my_message for msg_id in (NON_ASCII_CHARS, INVALID_NAME, TOO_MANY_CHARS)):
raise array_errors.IllegalObjectName(ex.my_message)
raise ex
logger.info("finished creating cli volume : {}".format(name))
@retry(svc_errors.StorageArrayClientException, tries=5, delay=1)
def _rollback_copy_to_target_volume(self, target_volume_name):
self._delete_unstarted_fcmap_if_exists(target_volume_name)
def _copy_to_target_volume(self, target_volume_name, source_volume_name):
logger.debug("copying volume {0} data to volume {1}.".format(source_volume_name,
target_volume_name))
try:
return self._create_and_start_fcmap(source_volume_name, target_volume_name, is_copy=True)
except (svc_errors.CommandExecutionError, CLIFailureError) as ex:
logger.error("Failed to copy to '{0}': {1}".format(target_volume_name, ex))
logger.info("rolling back copy to '{0}'".format(target_volume_name))
self._rollback_copy_to_target_volume(target_volume_name)
raise ex
def copy_to_existing_volume(self, volume_id, source_id, source_capacity_in_bytes,
minimum_volume_size_in_bytes):
source_name = self._get_volume_name_by_wwn(source_id)
target_volume_name = self._get_volume_name_by_wwn(volume_id)
self._copy_to_target_volume(target_volume_name, source_name)
def create_volume(self, name, size_in_bytes, space_efficiency, pool, io_group, volume_group, flashcopy_2):
self._create_cli_volume(name, size_in_bytes, space_efficiency, pool, io_group, volume_group)
cli_volume = self._get_cli_volume(name)
return self._generate_volume_response(cli_volume)
def _delete_volume_by_name(self, volume_name, not_exist_err=True):
logger.info("deleting volume with name : {0}".format(volume_name))
try:
self.client.svctask.rmvolume(vdisk_id=volume_name)
except (svc_errors.CommandExecutionError, CLIFailureError) as ex:
if is_warning_message(ex.my_message):
logger.warning("exception encountered during deletion of volume {}: {}".format(volume_name,
ex.my_message))
else:
logger.error("Failed to delete volume {}".format(volume_name))
if (OBJ_NOT_FOUND in ex.my_message or VOL_NOT_FOUND in ex.my_message) and not_exist_err:
raise array_errors.ObjectNotFoundError(volume_name)
raise ex
def delete_volume(self, volume_id):
logger.info("Deleting volume with id : {0}".format(volume_id))
self._delete_volume(volume_id)
logger.info("Finished volume deletion. id : {0}".format(volume_id))
def get_snapshot(self, volume_id, snapshot_name, pool, flashcopy_2):
logger.debug("Get snapshot : {}".format(snapshot_name))
if flashcopy_2:
if self._is_addsnapshot_supported():
cli_snapshot = self._get_cli_snapshot_by_name(snapshot_name)
if not cli_snapshot:
return None
source_cli_volume = self._get_cli_volume_by_wwn(volume_id)
return self._generate_snapshot_response_from_cli_snapshot(cli_snapshot, source_cli_volume)
raise array_errors.Flashcopy2NotSupportedMessage(volume_id)
target_cli_volume = self._get_cli_volume_if_exists(snapshot_name)
if not target_cli_volume:
return None
return self._generate_snapshot_response_with_verification(target_cli_volume)
def get_object_by_id(self, object_id, object_type):
cli_object = self._get_cli_volume_by_wwn(object_id)
if not cli_object:
return None
if object_type is controller_config.SNAPSHOT_TYPE_NAME:
return self._generate_snapshot_response_with_verification(cli_object)
cli_volume = self._get_cli_volume(cli_object.name)
return self._generate_volume_response(cli_volume)
def _create_similar_volume(self, source_cli_volume, target_volume_name, space_efficiency, pool):
logger.info("creating target cli volume '{0}' from source volume '{1}'".format(target_volume_name,
source_cli_volume.name))
if not space_efficiency:
space_efficiency = _get_cli_volume_space_efficiency(source_cli_volume)
size_in_bytes = int(source_cli_volume.capacity)
if not pool:
pool = self._get_volume_pools(source_cli_volume)[0]
io_group = source_cli_volume.IO_group_name
self._create_cli_volume(target_volume_name, size_in_bytes, space_efficiency, pool, io_group)
def _create_fcmap(self, source_volume_name, target_volume_name, is_copy):
logger.info("creating FlashCopy Mapping from '{0}' to '{1}'".format(source_volume_name, target_volume_name))
mkfcmap_kwargs = {} if is_copy else {'copyrate': 0}
try:
self.client.svctask.mkfcmap(source=source_volume_name, target=target_volume_name, **mkfcmap_kwargs)
except (svc_errors.CommandExecutionError, CLIFailureError) as ex:
if is_warning_message(ex.my_message):
logger.warning("exception encountered during FlashCopy Mapping creation"
" for source '{0}' and target '{1}': {2}".format(source_volume_name,
target_volume_name,
ex.my_message))
else:
if FCMAP_ALREADY_EXIST in ex.my_message:
logger.info("FlashCopy Mapping already exists"
" for source '{0}' and target '{1}'".format(source_volume_name,
target_volume_name))
else:
raise ex
def _start_fcmap(self, fcmap_id):
logger.info("starting FlashCopy Mapping '{0}'".format(fcmap_id))
try:
self.client.svctask.startfcmap(prep=True, object_id=fcmap_id)
except (svc_errors.CommandExecutionError, CLIFailureError) as ex:
if is_warning_message(ex.my_message):
logger.warning("exception encountered while starting"
" FlashCopy Mapping '{}': {}".format(fcmap_id,
ex.my_message))
else:
if FCMAP_ALREADY_COPYING in ex.my_message:
logger.info("FlashCopy Mapping '{0}' already copying".format(fcmap_id))
else:
raise ex
def _create_and_start_fcmap(self, source_volume_name, target_volume_name, is_copy):
self._create_fcmap(source_volume_name, target_volume_name, is_copy)
target_cli_volume = self._get_cli_volume(target_volume_name)
self._start_fcmap(target_cli_volume.FC_id)
return target_cli_volume
def _delete_fcmap(self, fcmap_id, force):
logger.info("deleting fcmap with id : {0}".format(fcmap_id))
try:
self.client.svctask.rmfcmap(object_id=fcmap_id, force=force)
except (svc_errors.CommandExecutionError, CLIFailureError) as ex:
if is_warning_message(ex.my_message):
logger.warning("exception encountered during fcmap '{}' deletion: {}".format(fcmap_id,
ex.my_message))
else:
logger.error("Failed to delete fcmap '{0}': {1}".format(fcmap_id, ex))
raise ex
def _stop_fcmap(self, fcmap_id):
logger.info("stopping fcmap with id : {0}".format(fcmap_id))
try:
self.client.svctask.stopfcmap(object_id=fcmap_id)
except (svc_errors.CommandExecutionError, CLIFailureError) as ex:
if is_warning_message(ex.my_message):
logger.warning("exception encountered while stopping fcmap '{}': {}".format(fcmap_id,
ex.my_message))
else:
if FCMAP_ALREADY_IN_THE_STOPPED_STATE in ex.my_message:
logger.info("fcmap '{0}' is already in the stopped state".format(fcmap_id))
else:
logger.error("Failed to stop fcmap '{0}': {1}".format(fcmap_id, ex))
raise ex
def _safe_stop_and_delete_fcmap(self, fcmap):
if not self._is_in_remote_copy_relationship(fcmap):
self._stop_fcmap(fcmap.id)
self._delete_fcmap(fcmap.id, force=True)
def _safe_delete_fcmaps(self, object_name, fcmaps):
fcmaps_to_delete = []
fcmaps_in_use = []
for fcmap in fcmaps:
if not self._is_in_remote_copy_relationship(fcmap):
if fcmap.status != FCMAP_STATUS_DONE or fcmap.copy_rate == "0":
fcmaps_in_use.append(fcmap)
else:
fcmaps_to_delete.append(fcmap)
if fcmaps_in_use:
raise array_errors.ObjectIsStillInUseError(id_or_name=object_name, used_by=fcmaps_in_use)
for fcmap in fcmaps_to_delete:
self._delete_fcmap(fcmap.id, force=False)
def _is_in_remote_copy_relationship(self, fcmap):
return fcmap.rc_controlled == YES
def _delete_volume(self, volume_id, is_snapshot=False):
cli_volume = self._get_cli_volume_by_wwn(volume_id, not_exist_err=True)
object_name = cli_volume.name
if is_snapshot and not cli_volume.FC_id:
raise array_errors.ObjectNotFoundError(object_name)
fcmap_as_target = self._get_fcmap_as_target_if_exists(object_name)
if is_snapshot and not fcmap_as_target:
raise array_errors.ObjectNotFoundError(object_name)
fcmaps_as_source = self._get_fcmaps_as_source_if_exist(object_name)
if fcmaps_as_source:
self._safe_delete_fcmaps(object_name, fcmaps_as_source)
if fcmap_as_target:
self._safe_stop_and_delete_fcmap(fcmap_as_target)
self._delete_volume_by_name(object_name)
def _delete_unstarted_fcmap_if_exists(self, target_volume_name):
target_cli_volume = self._get_cli_volume_if_exists(target_volume_name)
if target_cli_volume and target_cli_volume.FC_id:
self._delete_fcmap(target_cli_volume.FC_id, force=False)
return target_cli_volume
def _delete_target_volume_if_exists(self, target_cli_volume):
if target_cli_volume:
self._delete_volume_by_name(target_cli_volume.name, not_exist_err=False)
@retry(svc_errors.StorageArrayClientException, tries=5, delay=1)
def _rollback_create_snapshot(self, target_volume_name):
target_cli_volume = self._delete_unstarted_fcmap_if_exists(target_volume_name)
self._delete_target_volume_if_exists(target_cli_volume)
def _create_snapshot(self, target_volume_name, source_cli_volume, space_efficiency, pool):
try:
self._create_similar_volume(source_cli_volume, target_volume_name, space_efficiency, pool)
return self._create_and_start_fcmap(source_cli_volume.name, target_volume_name, is_copy=False)
except (svc_errors.CommandExecutionError, CLIFailureError) as ex:
logger.error("Failed to create snapshot '{0}': {1}".format(target_volume_name, ex))
logger.info("rolling back create snapshot '{0}'".format(target_volume_name))
self._rollback_create_snapshot(target_volume_name)
raise ex
def _get_pool_site(self, pool):
filter_value = 'name={}'.format(pool)
cli_pool = self.client.svcinfo.lsmdiskgrp(filtervalue=filter_value).as_single_element
if cli_pool:
return cli_pool.site_name
raise array_errors.PoolDoesNotExist(pool, self.endpoint)
def _is_cli_volume_in_site(self, cli_volume, site_name):
volume_pools = self._get_volume_pools(cli_volume)
for pool in volume_pools:
volume_site_name = self._get_pool_site(pool)
if volume_site_name == site_name:
return True
return False
def _get_rcrelationships_as_master_in_cluster(self, volume_name):
filter_value = 'master_vdisk_name={}:aux_cluster_id={}'.format(volume_name, self.identifier)
return self._lsrcrelationship(filter_value).as_list
def _get_cli_volume_in_pool_site(self, volume_name, pool_name):
cli_volume = self._get_cli_volume(volume_name)
if not pool_name or ':' in pool_name:
return cli_volume
pool_site_name = self._get_pool_site(pool_name)
if self._is_cli_volume_in_site(cli_volume, pool_site_name):
return cli_volume
rcrelationships = self._get_rcrelationships_as_master_in_cluster(volume_name)
for rcrelationship in rcrelationships:
other_cli_volume = self._get_cli_volume(rcrelationship.aux_vdisk_name)
if self._is_cli_volume_in_site(other_cli_volume, pool_site_name):
return other_cli_volume
raise RuntimeError('could not find a volume for {} in site {}'.format(volume_name, pool_site_name))
def create_snapshot(self, volume_id, snapshot_name, space_efficiency, pool, flashcopy_2):
logger.info("creating snapshot '{0}' from volume '{1}'".format(snapshot_name, volume_id))
source_volume_name = self._get_volume_name_by_wwn(volume_id)
source_cli_volume = self._get_cli_volume_in_pool_site(source_volume_name, pool)
if flashcopy_2:
if self._is_addsnapshot_supported():
target_cli_snapshot = self._add_snapshot(snapshot_name, source_cli_volume, pool)
snapshot = self._generate_snapshot_response_from_cli_snapshot(target_cli_snapshot, source_cli_volume)
else:
raise array_errors.Flashcopy2NotSupportedMessage(volume_id)
else:
target_cli_volume = self._create_snapshot(snapshot_name, source_cli_volume, space_efficiency, pool)
snapshot = self._generate_snapshot_response_from_cli_volume(target_cli_volume, source_cli_volume.vdisk_UID)
logger.info("finished creating snapshot '{0}' from volume '{1}'".format(snapshot_name, volume_id))
return snapshot
def _is_addsnapshot_supported(self):
return hasattr(self.client.svctask, "addsnapshot")
def _rmsnapshot(self, internal_snapshot_id):
try:
self.client.svctask.rmsnapshot(snapshotid=internal_snapshot_id)
except (svc_errors.CommandExecutionError, CLIFailureError) as ex:
if SNAPSHOT_NOT_EXIST in ex.my_message:
raise array_errors.ObjectNotFoundError(internal_snapshot_id)
raise ex
def delete_snapshot(self, snapshot_id, internal_snapshot_id):
logger.info("Deleting snapshot with id : {0}".format(snapshot_id))
if self._is_addsnapshot_supported() and not snapshot_id:
self._rmsnapshot(internal_snapshot_id)
else:
self._delete_volume(snapshot_id, is_snapshot=True)
logger.info("Finished snapshot deletion. id : {0}".format(snapshot_id))
def _get_host_ports(self, host, attribute_name):
ports = host.get(attribute_name, [])
return ports if isinstance(ports, list) else [ports]
def _get_host_by_host_identifiers_slow(self, initiators):
logger.debug("Scanning all hosts for initiators : {0}".format(initiators))
detailed_hosts_list = self._get_detailed_hosts_list()
nvme_host, fc_host, iscsi_host = None, None, None
connectivity_types = set()
for host in detailed_hosts_list:
host_nqns = self._get_host_ports(host, HOST_NQN)
if initiators.is_array_nvme_nqn_match(host_nqns):
nvme_host = host.name
connectivity_types.add(config.NVME_OVER_FC_CONNECTIVITY_TYPE)
logger.debug("found nvme nqn in list : {0} for host : "
"{1}".format(initiators.nvme_nqns, nvme_host))
host_wwns = self._get_host_ports(host, HOST_WWPN)
if initiators.is_array_wwns_match(host_wwns):
fc_host = host.name
connectivity_types.add(config.FC_CONNECTIVITY_TYPE)
logger.debug("found fc wwns in list : {0} for host : "
"{1}".format(initiators.fc_wwns, fc_host))
host_iqns = self._get_host_ports(host, HOST_ISCSI_NAME)
if initiators.is_array_iscsi_iqns_match(host_iqns):
iscsi_host = host.name
connectivity_types.add(config.ISCSI_CONNECTIVITY_TYPE)
logger.debug("found iscsi iqn in list : {0} for host : "
"{1}".format(initiators.iscsi_iqns, iscsi_host))
if not connectivity_types:
logger.debug("could not find host by using initiators: {0} ".format(initiators))
raise array_errors.HostNotFoundError(initiators)
host_name = self._get_host_name_if_equal(nvme_host, fc_host, iscsi_host)
if not host_name:
raise array_errors.MultipleHostsFoundError(initiators, fc_host)
return host_name, list(connectivity_types)
def _get_host_names_by_wwpn(self, host_wwpn):
fabrics = self._lsfabric(wwpn=host_wwpn).as_list
return set(fabric.name for fabric in fabrics)
def _lsnvmefabric(self, host_nqn):
try:
return self.client.svcinfo.lsnvmefabric(remotenqn=host_nqn).as_list
except(svc_errors.CommandExecutionError, CLIFailureError) as ex:
logger.error("Failed to get nvme fabrics. Reason "
"is: {0}".format(ex))
raise ex
def _get_host_names_by_nqn(self, nqn):
nvme_fabrics = self._lsnvmefabric(nqn)
return set(nvme_fabric.object_name for nvme_fabric in nvme_fabrics)
def _lshostiplogin(self, iqn):
try:
return self.client.svcinfo.lshostiplogin(object_id=iqn).as_single_element
except(svc_errors.CommandExecutionError, CLIFailureError) as ex:
if SPECIFIED_OBJ_NOT_EXIST in ex.my_message:
return None
logger.error("Failed to get iscsi host. Reason "
"is: {0}".format(ex))
raise ex
def _get_host_name_by_iqn(self, iqn):
iscsi_login = self._lshostiplogin(iqn)
if iscsi_login:
return iscsi_login.host_name
return None
def _get_host_names_and_connectivity_types(self, initiators):
host_names = set()
connectivity_types = set()
for connectivity_type, initiator in initiators:
if connectivity_type == config.NVME_OVER_FC_CONNECTIVITY_TYPE:
nvme_host_names = self._get_host_names_by_nqn(initiator)
if nvme_host_names:
host_names.update(nvme_host_names)
connectivity_types.add(config.NVME_OVER_FC_CONNECTIVITY_TYPE)
elif connectivity_type == config.FC_CONNECTIVITY_TYPE:
fc_host_names = self._get_host_names_by_wwpn(initiator)
if fc_host_names:
host_names.update(fc_host_names)
connectivity_types.add(config.FC_CONNECTIVITY_TYPE)
elif connectivity_type == config.ISCSI_CONNECTIVITY_TYPE:
iscsi_host_name = self._get_host_name_by_iqn(initiator)
if iscsi_host_name:
host_names.add(iscsi_host_name)
connectivity_types.add(config.ISCSI_CONNECTIVITY_TYPE)
return host_names, connectivity_types
def get_host_by_host_identifiers(self, initiators):
logger.debug("Getting host name for initiators : {0}".format(initiators))
host_names, connectivity_types = self._get_host_names_and_connectivity_types(initiators)
host_names = set(filter(None, host_names))
if len(host_names) > 1:
raise array_errors.MultipleHostsFoundError(initiators, host_names)
if len(host_names) == 1:
return host_names.pop(), connectivity_types
return self._get_host_by_host_identifiers_slow(initiators)
def _get_detailed_hosts_list(self):
logger.debug("Getting detailed hosts list on array {0}".format(self.endpoint))
hosts_list = self.client.svcinfo.lshost()
if not hosts_list:
return []
# get all hosts details by sending a single batch of commands, in which each command is per host
detailed_hosts_list_cmd = self._get_detailed_hosts_list_cmd(hosts_list)
logger.debug("Sending getting detailed hosts list commands batch")
raw_response = self.client.send_raw_command(detailed_hosts_list_cmd)
response = SVCResponse(raw_response, {'delim': ' '})
return response.as_list
def _get_detailed_hosts_list_cmd(self, host_list):
writer = StringIO()
for host in host_list:
writer.write(LIST_HOSTS_CMD_FORMAT.format(HOST_ID=host.id))
return writer.getvalue()
def _get_cli_host(self, id_or_name):
cli_host = self.client.svcinfo.lshost(object_id=id_or_name).as_single_element
if not cli_host:
raise array_errors.HostNotFoundError(id_or_name)
return cli_host
def get_host_by_name(self, host_name):
cli_host = self._get_cli_host(host_name)
nvme_nqns = self._get_host_ports(cli_host, HOST_NQN)
fc_wwns = self._get_host_ports(cli_host, HOST_WWPN)
iscsi_iqns = self._get_host_ports(cli_host, HOST_ISCSI_NAME)
connectivity_types = []
if nvme_nqns:
connectivity_types.append(config.NVME_OVER_FC_CONNECTIVITY_TYPE)
if fc_wwns:
connectivity_types.append(config.FC_CONNECTIVITY_TYPE)
if iscsi_iqns:
connectivity_types.append(config.ISCSI_CONNECTIVITY_TYPE)
return Host(name=cli_host.name, connectivity_types=connectivity_types, nvme_nqns=nvme_nqns,
fc_wwns=fc_wwns, iscsi_iqns=iscsi_iqns)
def _lsvdiskhostmap(self, volume_name):
try:
return self.client.svcinfo.lsvdiskhostmap(vdisk_name=volume_name)
except(svc_errors.CommandExecutionError, CLIFailureError) as ex:
logger.error(ex)
raise array_errors.ObjectNotFoundError(volume_name)
def get_volume_mappings(self, volume_id):
logger.debug("Getting volume mappings for volume id : "
"{0}".format(volume_id))
volume_name = self._get_volume_name_by_wwn(volume_id)
logger.debug("volume name : {0}".format(volume_name))
mapping_list = self._lsvdiskhostmap(volume_name)
luns_by_host = {}
for mapping in mapping_list:
logger.debug("mapping for volume is :{0}".format(mapping))
luns_by_host[mapping.get('host_name', '')] = mapping.get('SCSI_id', '')
return luns_by_host
def _get_used_lun_ids_from_host(self, host_name):
logger.debug("getting used lun ids for host :{0}".format(host_name))
luns_in_use = set()
try:
for mapping in self.client.svcinfo.lshostvdiskmap(host=host_name):
luns_in_use.add(mapping.get('SCSI_id', ''))
except(svc_errors.CommandExecutionError, CLIFailureError) as ex:
logger.error(ex)
raise array_errors.HostNotFoundError(host_name)
logger.debug("The used lun ids for host :{0}".format(luns_in_use))
return luns_in_use
def _get_free_lun(self, host_name):
logger.debug("getting random free lun id for "
"host :{0}".format(host_name))
lun = None
luns_in_use = self._get_used_lun_ids_from_host(host_name)
# Today we have SS_MAX_HLUN_MAPPINGS_PER_HOST as 2048 on high end
# platforms (SVC / V7000 etc.) and 512 for the lower
# end platforms (V3500 etc.). This limits the number of volumes that
# can be mapped to a single host. (Note that some hosts such as linux
# do not support more than 255 or 511 mappings today irrespective of
# our constraint).
lun_range_gen = range(self.MIN_LUN_NUMBER, self.MAX_LUN_NUMBER + 1)
lun_range = [str(lun) for lun in lun_range_gen]
free_luns = [lun for lun in lun_range if lun not in luns_in_use]
free_luns_in_interval = free_luns[:LUN_INTERVAL]
if free_luns:
lun = choice(free_luns_in_interval)
else:
raise array_errors.NoAvailableLunError(host_name)
logger.debug("The chosen available lun is : {0}".format(lun))
return lun
def map_volume(self, volume_id, host_name, connectivity_type):
logger.debug("mapping volume : {0} to host : "
"{1}".format(volume_id, host_name))
volume_name = self._get_volume_name_by_wwn(volume_id)
cli_kwargs = {
'host': host_name,
'object_id': volume_name,
'force': True
}
lun = ""
try:
if connectivity_type != config.NVME_OVER_FC_CONNECTIVITY_TYPE:
lun = self._get_free_lun(host_name)
cli_kwargs.update({'scsi': lun})
self.client.svctask.mkvdiskhostmap(**cli_kwargs)
except (svc_errors.CommandExecutionError, CLIFailureError) as ex:
if is_warning_message(ex.my_message):
logger.warning("exception encountered during volume {0} mapping to host {1}: {2}".format(volume_name,
host_name,
ex.my_message))
else:
logger.error("Map volume {0} to host {1} failed. Reason "
"is: {2}".format(volume_name, host_name, ex))
if NAME_NOT_EXIST_OR_MEET_RULES in ex.my_message:
raise array_errors.HostNotFoundError(host_name)
if SPECIFIED_OBJ_NOT_EXIST in ex.my_message:
raise array_errors.ObjectNotFoundError(volume_name)
if LUN_ALREADY_IN_USE in ex.my_message:
raise array_errors.LunAlreadyInUseError(lun,
host_name)
raise array_errors.MappingError(volume_name, host_name, ex)
return str(lun)
def unmap_volume(self, volume_id, host_name):
logger.debug("unmapping volume : {0} from host : "
"{1}".format(volume_id, host_name))
volume_name = self._get_volume_name_by_wwn(volume_id)
cli_kwargs = {
'host': host_name,
'vdisk_id': volume_name
}
try:
self.client.svctask.rmvdiskhostmap(**cli_kwargs)
except (svc_errors.CommandExecutionError, CLIFailureError) as ex:
if is_warning_message(ex.my_message):
logger.warning("exception encountered during volume {0}"
" unmapping from host {1}: {2}".format(volume_name,
host_name,
ex.my_message))
else:
logger.error("unmapping volume {0} from host {1} failed. Reason "
"is: {2}".format(volume_name, host_name, ex))
if NAME_NOT_EXIST_OR_MEET_RULES in ex.my_message:
raise array_errors.HostNotFoundError(host_name)
if OBJ_NOT_FOUND in ex.my_message:
raise array_errors.ObjectNotFoundError(volume_name)
if VOL_ALREADY_UNMAPPED in ex.my_message:
raise array_errors.VolumeAlreadyUnmappedError(
volume_name)
raise array_errors.UnmappingError(volume_name,
host_name, ex)
def _get_array_iqns_by_node_id(self):
logger.debug("Getting array nodes id and iscsi name")
nodes_list = self.client.svcinfo.lsnode()
array_iqns_by_id = {node.id: node.iscsi_name for node in nodes_list
if node.status.lower() == "online"}
logger.debug("Found iqns by node id: {}".format(array_iqns_by_id))
return array_iqns_by_id
def _list_ip_ports(self, portset_id):
try:
if portset_id:
filter_value = 'portset_id={}'.format(portset_id)
return self.client.svcinfo.lsip(filtervalue=filter_value)
return self.client.svcinfo.lsportip(filtervalue='state=configured:failover=no')
except (svc_errors.CommandExecutionError, CLIFailureError) as ex:
logger.error("Get iscsi targets failed. Reason is: {}".format(ex))
raise array_errors.NoIscsiTargetsFoundError(self.endpoint)
@staticmethod
def _create_ips_by_node_id_map(ports):
ips_by_node_id = defaultdict(list)
for port in ports:
if port.get('IP_address'):
ips_by_node_id[port.node_id].append(port.IP_address)
if port.get('IP_address_6'):
ipv6 = port.IP_address_6.join('[]')
ips_by_node_id[port.node_id].append(ipv6)
return dict(ips_by_node_id)
@staticmethod
def _unify_ips_by_iqn(iqns_by_node_id, ips_by_node_id):
ips_by_iqn = defaultdict(list)
for node_id, iqn in iqns_by_node_id.items():
ips = ips_by_node_id.get(node_id, [])
ips_by_iqn[iqn].extend(ips)
return dict(ips_by_iqn)
def _get_iscsi_targets_by_node_id(self, host_name):
portset_id = self._get_host_portset_id(host_name)
ports = self._list_ip_ports(portset_id)
return self._create_ips_by_node_id_map(ports)
def get_iscsi_targets_by_iqn(self, host_name):
logger.debug("Getting iscsi targets by iqn")
iqns_by_node_id = self._get_array_iqns_by_node_id()
ips_by_node_id = self._get_iscsi_targets_by_node_id(host_name)
ips_by_iqn = self._unify_ips_by_iqn(iqns_by_node_id, ips_by_node_id)
if ips_by_iqn and any(ips_by_iqn.values()):
logger.debug("Found iscsi target IPs by iqn: {}".format(ips_by_iqn))
return ips_by_iqn
raise array_errors.NoIscsiTargetsFoundError(self.endpoint)
def _lsfabric(self, **kwargs):
try:
return self.client.svcinfo.lsfabric(**kwargs)
except(svc_errors.CommandExecutionError, CLIFailureError) as ex:
logger.error("Failed to get fabrics for {0}. Reason "
"is: {1}".format(kwargs, ex))
raise ex
def get_array_fc_wwns(self, host_name):
logger.debug("Getting the connected fc port wwn value from array "
"related to host : {}.".format(host_name))
fc_port_wwns = []
fc_wwns = self._lsfabric(host=host_name)
for wwn in fc_wwns:
state = wwn.get('state', '')
if state in ('active', 'inactive'):
fc_port_wwns.append(wwn.get('local_wwpn', ''))
logger.debug("Getting fc wwns : {}".format(fc_port_wwns))
return fc_port_wwns
def _get_host_portset_id(self, host_name):
cli_host = self._get_cli_host(host_name)
return cli_host.get(HOST_PORTSET_ID)
def _get_replication_endpoint_type(self, rcrelationship):
if self.identifier == rcrelationship.master_cluster_id:
return ENDPOINT_TYPE_MASTER
return ENDPOINT_TYPE_AUX
@staticmethod
def _get_other_endpoint_type(endpoint_type):
if endpoint_type == ENDPOINT_TYPE_MASTER:
return ENDPOINT_TYPE_AUX
return ENDPOINT_TYPE_MASTER
def _get_replication_other_endpoint_type(self, rcrelationship):
endpoint_type = self._get_replication_endpoint_type(rcrelationship)
return self._get_other_endpoint_type(endpoint_type)
@staticmethod
def _is_replication_idle(rcrelationship):
return rcrelationship.state == RCRELATIONSHIP_STATE_IDLE
@staticmethod
def _is_replication_disconnected(rcrelationship):
return 'disconnected' in rcrelationship.state
@staticmethod
def _is_replication_ready(rcrelationship):
return rcrelationship.state == RCRELATIONSHIP_STATE_READY
def _is_replication_endpoint_primary(self, rcrelationship, endpoint_type=None):
if not endpoint_type:
endpoint_type = self._get_replication_endpoint_type(rcrelationship)
if rcrelationship.primary:
return rcrelationship.primary == endpoint_type
return None
@staticmethod
def _get_replication_copy_type(rcrelationship):
if rcrelationship.copy_type == 'global':
return config.REPLICATION_COPY_TYPE_ASYNC
return config.REPLICATION_COPY_TYPE_SYNC
def _generate_replication_response(self, rcrelationship, volume_internal_id, other_volume_internal_id):
copy_type = self._get_replication_copy_type(rcrelationship)
is_ready = self._is_replication_ready(rcrelationship)
is_primary = self._is_replication_endpoint_primary(rcrelationship)
return Replication(name=rcrelationship.name,
volume_internal_id=volume_internal_id,
other_volume_internal_id=other_volume_internal_id,
copy_type=copy_type,
is_ready=is_ready,
is_primary=is_primary)
def _lsrcrelationship(self, filter_value):
return self.client.svcinfo.lsrcrelationship(filtervalue=filter_value)
def _get_rcrelationship_by_name(self, replication_name, not_exist_error=True):
filter_value = 'RC_rel_name={0}'.format(replication_name)
rcrelationship = self._lsrcrelationship(filter_value).as_single_element
if not rcrelationship and not_exist_error:
raise array_errors.ObjectNotFoundError(replication_name)
return rcrelationship
def _get_rcrelationships(self, cli_volume_id, other_cli_volume_id, other_system_id, as_master):
endpoint_type = ENDPOINT_TYPE_AUX
other_endpoint_type = ENDPOINT_TYPE_MASTER
if as_master:
endpoint_type = ENDPOINT_TYPE_MASTER
other_endpoint_type = ENDPOINT_TYPE_AUX
filter_value = '{END}_vdisk_id={VDISK_ID}:' \
'{OTHER_END}_vdisk_id={OTHER_VDISK_ID}:' \
'{OTHER_END}_cluster_id={OTHER_CLUSTER_ID}'.format(END=endpoint_type, VDISK_ID=cli_volume_id,
OTHER_END=other_endpoint_type,
OTHER_VDISK_ID=other_cli_volume_id,
OTHER_CLUSTER_ID=other_system_id)
return self._lsrcrelationship(filter_value).as_list
def _get_rcrelationship(self, cli_volume_id, other_cli_volume_id, other_system_id):
rcrelationships = self._get_rcrelationships(cli_volume_id, other_cli_volume_id,
other_system_id, as_master=True)
rcrelationships.extend(self._get_rcrelationships(cli_volume_id, other_cli_volume_id,
other_system_id, as_master=False))
if len(rcrelationships) > 1:
error_message = ('found {0} rcrelationships for volume id {1} '
'with volume id {2} of system {3}: {4}'.format(len(rcrelationships),
cli_volume_id,
other_cli_volume_id,
other_system_id,
rcrelationships))
logger.error(error_message)
raise RuntimeError(error_message)
return rcrelationships[0] if rcrelationships else None
def get_replication(self, volume_internal_id, other_volume_internal_id, other_system_id):
rcrelationship = self._get_rcrelationship(volume_internal_id, other_volume_internal_id, other_system_id)
if not rcrelationship:
return None
logger.info("found rcrelationship: {}".format(rcrelationship))
return self._generate_replication_response(rcrelationship, volume_internal_id, other_volume_internal_id)
def _create_rcrelationship(self, master_cli_volume_id, aux_cli_volume_id, other_system_id, copy_type):
logger.info("creating remote copy relationship for master volume id: {0} "
"and auxiliary volume id: {1} with system {2} using {3} copy type".format(master_cli_volume_id,
aux_cli_volume_id,
other_system_id,
copy_type))
kwargs = build_create_replication_kwargs(master_cli_volume_id, aux_cli_volume_id, other_system_id, copy_type)
try:
svc_response = self.client.svctask.mkrcrelationship(**kwargs)
return self._get_id_from_response(svc_response)
except (svc_errors.CommandExecutionError, CLIFailureError) as ex:
if is_warning_message(ex.my_message):
logger.warning("exception encountered during creation of rcrelationship for volume id {0} "
"with volume id {1} of system {2}: {3}".format(master_cli_volume_id,
aux_cli_volume_id,
other_system_id,
ex))
else:
logger.error("failed to create rcrelationship for volume id {0} "
"with volume id {1} of system {2}: {3}".format(master_cli_volume_id,
aux_cli_volume_id,
other_system_id,
ex))
raise ex
return None
def _start_rcrelationship(self, rcrelationship_id, primary_endpoint_type=None, force=False):
logger.info("starting remote copy relationship with id: {} primary: {} force: {}".format(rcrelationship_id,
primary_endpoint_type,
force))
try:
kwargs = build_start_replication_kwargs(rcrelationship_id, primary_endpoint_type, force)
self.client.svctask.startrcrelationship(**kwargs)
except (svc_errors.CommandExecutionError, CLIFailureError) as ex:
if is_warning_message(ex.my_message):
logger.warning("exception encountered while starting rcrelationship '{}': {}".format(rcrelationship_id,
ex.my_message))
else:
logger.warning("failed to start rcrelationship '{}': {}".format(rcrelationship_id, ex))
def create_replication(self, volume_internal_id, other_volume_internal_id, other_system_id, copy_type):
rc_id = self._create_rcrelationship(volume_internal_id, other_volume_internal_id, other_system_id, copy_type)
self._start_rcrelationship(rc_id)
def _stop_rcrelationship(self, rcrelationship_id, add_access_to_secondary=False):
logger.info("stopping remote copy relationship with id: {}. access: {}".format(rcrelationship_id,
add_access_to_secondary))
kwargs = build_stop_replication_kwargs(rcrelationship_id, add_access_to_secondary)
try:
self.client.svctask.stoprcrelationship(**kwargs)
except (svc_errors.CommandExecutionError, CLIFailureError) as ex:
if is_warning_message(ex.my_message):
logger.warning("exception encountered while stopping"
" rcrelationship '{0}': {1}".format(rcrelationship_id,
ex.my_message))
else:
logger.warning("failed to stop rcrelationship '{0}': {1}".format(rcrelationship_id, ex))
def _delete_rcrelationship(self, rcrelationship_id):
logger.info("deleting remote copy relationship with id: {0}".format(rcrelationship_id))
try:
self.client.svctask.rmrcrelationship(object_id=rcrelationship_id)
except (svc_errors.CommandExecutionError, CLIFailureError) as ex:
if is_warning_message(ex.my_message):
logger.warning("exception encountered during rcrelationship"
" '{0}' deletion: {1}".format(rcrelationship_id,
ex.my_message))
else:
logger.warning("failed to delete rcrelationship '{0}': {1}".format(rcrelationship_id, ex))
def delete_replication(self, replication_name):
rcrelationship = self._get_rcrelationship_by_name(replication_name, not_exist_error=False)
if not rcrelationship:
logger.info("could not find replication with name {}".format(replication_name))
return
self._stop_rcrelationship(rcrelationship.id)
self._delete_rcrelationship(rcrelationship.id)
def _promote_replication_endpoint(self, endpoint_type, replication_name):
logger.info("making '{}' primary for remote copy relationship {}".format(endpoint_type, replication_name))
try:
self.client.svctask.switchrcrelationship(primary=endpoint_type, object_id=replication_name)
except (svc_errors.CommandExecutionError, CLIFailureError) as ex:
if is_warning_message(ex.my_message):
logger.warning("exception encountered while making '{}' primary"
" for rcrelationship {}: {}".format(endpoint_type,
replication_name,
ex.my_message))
else:
logger.error("failed to make '{}' primary for rcrelationship {}: {}".format(endpoint_type,
replication_name,
ex.my_message))
raise
logger.info("succeeded making '{}' primary for remote copy relationship {}".format(endpoint_type,
replication_name))
def _ensure_endpoint_is_primary(self, rcrelationship, endpoint_type):
if self._is_replication_endpoint_primary(rcrelationship, endpoint_type):
logger.info("'{}' is already primary for rcrelationship {}. "
"skipping the switch".format(endpoint_type,
rcrelationship.name))
return
if self._is_replication_idle(rcrelationship):
other_endpoint_type = self._get_other_endpoint_type(endpoint_type)
self._start_rcrelationship(rcrelationship.id, primary_endpoint_type=other_endpoint_type, force=True)
self._promote_replication_endpoint(endpoint_type, rcrelationship.name)
def promote_replication_volume(self, replication_name):
rcrelationship = self._get_rcrelationship_by_name(replication_name)
if self._is_replication_disconnected(rcrelationship):
self._stop_rcrelationship(rcrelationship.id, add_access_to_secondary=True)
return
endpoint_type = self._get_replication_endpoint_type(rcrelationship)
self._ensure_endpoint_is_primary(rcrelationship, endpoint_type)
def demote_replication_volume(self, replication_name):
rcrelationship = self._get_rcrelationship_by_name(replication_name)
endpoint_type_to_promote = self._get_replication_other_endpoint_type(rcrelationship)
self._ensure_endpoint_is_primary(rcrelationship, endpoint_type_to_promote)
def _get_host_name_if_equal(self, nvme_host, fc_host, iscsi_host):
unique_names = {nvme_host, iscsi_host, fc_host}
unique_names.discard(None)
if len(unique_names) == 1:
return unique_names.pop()
return None
def _addsnapshot(self, name, source_volume_id, pool):
try:
return self.client.svctask.addsnapshot(name=name, volumes=source_volume_id, pool=pool)
except (svc_errors.CommandExecutionError, CLIFailureError) as ex:
if is_warning_message(ex.my_message):
logger.warning("exception encountered while creating snapshot '{}': {}".format(name,
ex.my_message))
else:
logger.error("cannot create snapshot {0}, Reason is: {1}".format(name, ex))
if OBJ_ALREADY_EXIST in ex.my_message:
raise array_errors.SnapshotAlreadyExists(name, self.endpoint)
if NAME_NOT_EXIST_OR_MEET_RULES in ex.my_message or NOT_CHILD_POOL in ex.my_message:
raise array_errors.PoolDoesNotExist(pool, self.endpoint)
if NOT_ENOUGH_EXTENTS_IN_POOL_CREATE in ex.my_message:
raise array_errors.NotEnoughSpaceInPool(id_or_name=pool)
if any(msg_id in ex.my_message for msg_id in (NON_ASCII_CHARS, INVALID_NAME, TOO_MANY_CHARS)):
raise array_errors.IllegalObjectName(ex.my_message)
raise ex
return None
def _get_id_from_response(self, response):
message = str(response.response[0])
id_start, id_end = message.find('[') + 1, message.find(']')
raw_id = message[id_start:id_end]
return int(raw_id)
def _lsvolumesnapshot(self, **kwargs):
try:
return self.client.svcinfo.lsvolumesnapshot(**kwargs).as_single_element
except (svc_errors.CommandExecutionError, CLIFailureError) as ex:
if OBJ_NOT_FOUND in ex.my_message or NAME_NOT_EXIST_OR_MEET_RULES in ex.my_message:
logger.info("snapshot not found for args: {}".format(kwargs))
elif any(msg_id in ex.my_message for msg_id in (NON_ASCII_CHARS, VALUE_TOO_LONG)):
raise array_errors.IllegalObjectName(ex.my_message)
else:
raise ex
return None
def _get_cli_snapshot_by_id(self, snapshot_id):
return self._lsvolumesnapshot(object_id=snapshot_id)
def _get_cli_snapshot_by_name(self, snapshot_name):
filter_value = 'snapshot_name={}'.format(snapshot_name)
return self._lsvolumesnapshot(filtervalue=filter_value)
def _add_snapshot(self, snapshot_name, source_cli_volume, pool):
svc_response = self._addsnapshot(name=snapshot_name, source_volume_id=source_cli_volume.id, pool=pool)
snapshot_id = self._get_id_from_response(svc_response)
cli_snapshot = self._get_cli_snapshot_by_id(snapshot_id)
if cli_snapshot is None:
raise array_errors.ObjectNotFoundError(snapshot_id)
return cli_snapshot
|
[
"controller.array_action.errors.MappingError",
"collections.defaultdict",
"controller.array_action.errors.ExpectedSnapshotButFoundVolumeError",
"controller.array_action.errors.StorageManagementIPsNotSupportError",
"controller.array_action.errors.VolumeAlreadyExists",
"controller.array_action.errors.VolumeAlreadyUnmappedError",
"controller.array_action.errors.HostNotFoundError",
"controller.array_action.array_action_types.Host",
"controller.array_action.errors.PoolDoesNotMatchSpaceEfficiency",
"controller.array_action.errors.ObjectNotFoundError",
"controller.array_action.errors.InvalidArgumentError",
"controller.array_action.array_action_types.Replication",
"pysvc.unified.response.SVCResponse",
"io.StringIO",
"controller.common.csi_logger.get_stdout_logger",
"controller.array_action.errors.SnapshotAlreadyExists",
"controller.array_action.errors.LunAlreadyInUseError",
"controller.array_action.errors.ObjectIsStillInUseError",
"controller.array_action.errors.UnsupportedStorageVersionError",
"controller.array_action.errors.UnmappingError",
"controller.array_action.errors.NoAvailableLunError",
"controller.array_action.errors.IllegalObjectID",
"controller.array_action.errors.Flashcopy2NotSupportedMessage",
"controller.array_action.errors.MultipleHostsFoundError",
"retry.retry",
"random.choice",
"controller.array_action.errors.NoIscsiTargetsFoundError",
"controller.array_action.errors.IllegalObjectName",
"controller.array_action.utils.convert_scsi_id_to_nguid",
"pysvc.unified.client.connect",
"packaging.version.Version",
"controller.array_action.errors.SpaceEfficiencyNotSupported",
"controller.array_action.errors.CredentialsError",
"controller.array_action.errors.NotEnoughSpaceInPool",
"controller.array_action.errors.PoolDoesNotExist"
] |
[((851, 870), 'controller.common.csi_logger.get_stdout_logger', 'get_stdout_logger', ([], {}), '()\n', (868, 870), False, 'from controller.common.csi_logger import get_stdout_logger\n'), ((20224, 20287), 'retry.retry', 'retry', (['svc_errors.StorageArrayClientException'], {'tries': '(5)', 'delay': '(1)'}), '(svc_errors.StorageArrayClientException, tries=5, delay=1)\n', (20229, 20287), False, 'from retry import retry\n'), ((30976, 31039), 'retry.retry', 'retry', (['svc_errors.StorageArrayClientException'], {'tries': '(5)', 'delay': '(1)'}), '(svc_errors.StorageArrayClientException, tries=5, delay=1)\n', (30981, 31039), False, 'from retry import retry\n'), ((32164, 32214), 'controller.array_action.errors.PoolDoesNotExist', 'array_errors.PoolDoesNotExist', (['pool', 'self.endpoint'], {}), '(pool, self.endpoint)\n', (32193, 32214), True, 'import controller.array_action.errors as array_errors\n'), ((41165, 41206), 'pysvc.unified.response.SVCResponse', 'SVCResponse', (['raw_response', "{'delim': ' '}"], {}), "(raw_response, {'delim': ' '})\n", (41176, 41206), False, 'from pysvc.unified.response import CLIFailureError, SVCResponse\n'), ((41312, 41322), 'io.StringIO', 'StringIO', ([], {}), '()\n', (41320, 41322), False, 'from io import StringIO\n'), ((42306, 42435), 'controller.array_action.array_action_types.Host', 'Host', ([], {'name': 'cli_host.name', 'connectivity_types': 'connectivity_types', 'nvme_nqns': 'nvme_nqns', 'fc_wwns': 'fc_wwns', 'iscsi_iqns': 'iscsi_iqns'}), '(name=cli_host.name, connectivity_types=connectivity_types, nvme_nqns=\n nvme_nqns, fc_wwns=fc_wwns, iscsi_iqns=iscsi_iqns)\n', (42310, 42435), False, 'from controller.array_action.array_action_types import Volume, Snapshot, Replication, Host\n'), ((49484, 49501), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (49495, 49501), False, 'from collections import defaultdict\n'), ((49924, 49941), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (49935, 49941), False, 'from collections import defaultdict\n'), ((50825, 50877), 'controller.array_action.errors.NoIscsiTargetsFoundError', 'array_errors.NoIscsiTargetsFoundError', (['self.endpoint'], {}), '(self.endpoint)\n', (50862, 50877), True, 'import controller.array_action.errors as array_errors\n'), ((53721, 53919), 'controller.array_action.array_action_types.Replication', 'Replication', ([], {'name': 'rcrelationship.name', 'volume_internal_id': 'volume_internal_id', 'other_volume_internal_id': 'other_volume_internal_id', 'copy_type': 'copy_type', 'is_ready': 'is_ready', 'is_primary': 'is_primary'}), '(name=rcrelationship.name, volume_internal_id=volume_internal_id,\n other_volume_internal_id=other_volume_internal_id, copy_type=copy_type,\n is_ready=is_ready, is_primary=is_primary)\n', (53732, 53919), False, 'from controller.array_action.array_action_types import Volume, Snapshot, Replication, Host\n'), ((6446, 6504), 'controller.array_action.errors.StorageManagementIPsNotSupportError', 'array_errors.StorageManagementIPsNotSupportError', (['endpoint'], {}), '(endpoint)\n', (6494, 6504), True, 'import controller.array_action.errors as array_errors\n'), ((6781, 6847), 'pysvc.unified.client.connect', 'connect', (['self.endpoint'], {'username': 'self.user', 'password': 'self.password'}), '(self.endpoint, username=self.user, password=self.password)\n', (6788, 6847), False, 'from pysvc.unified.client import connect\n'), ((9812, 9897), 'controller.array_action.errors.ExpectedSnapshotButFoundVolumeError', 'array_errors.ExpectedSnapshotButFoundVolumeError', (['cli_object.name', 'self.endpoint'], {}), '(cli_object.name, self.endpoint\n )\n', (9860, 9897), True, 'import controller.array_action.errors as array_errors\n'), ((10032, 10117), 'controller.array_action.errors.ExpectedSnapshotButFoundVolumeError', 'array_errors.ExpectedSnapshotButFoundVolumeError', (['cli_object.name', 'self.endpoint'], {}), '(cli_object.name, self.endpoint\n )\n', (10080, 10117), True, 'import controller.array_action.errors as array_errors\n'), ((11213, 11258), 'controller.array_action.errors.ObjectNotFoundError', 'array_errors.ObjectNotFoundError', (['volume_name'], {}), '(volume_name)\n', (11245, 11258), True, 'import controller.array_action.errors as array_errors\n'), ((16195, 16253), 'controller.array_action.errors.SpaceEfficiencyNotSupported', 'array_errors.SpaceEfficiencyNotSupported', (['space_efficiency'], {}), '(space_efficiency)\n', (16235, 16253), True, 'import controller.array_action.errors as array_errors\n'), ((17531, 17566), 'controller.array_action.utils.convert_scsi_id_to_nguid', 'convert_scsi_id_to_nguid', (['volume_id'], {}), '(volume_id)\n', (17555, 17566), False, 'from controller.array_action.utils import ClassProperty, convert_scsi_id_to_nguid\n'), ((17690, 17733), 'controller.array_action.errors.ObjectNotFoundError', 'array_errors.ObjectNotFoundError', (['volume_id'], {}), '(volume_id)\n', (17722, 17733), True, 'import controller.array_action.errors as array_errors\n'), ((18219, 18262), 'controller.array_action.errors.ObjectNotFoundError', 'array_errors.ObjectNotFoundError', (['volume_id'], {}), '(volume_id)\n', (18251, 18262), True, 'import controller.array_action.errors as array_errors\n'), ((23447, 23500), 'controller.array_action.errors.Flashcopy2NotSupportedMessage', 'array_errors.Flashcopy2NotSupportedMessage', (['volume_id'], {}), '(volume_id)\n', (23489, 23500), True, 'import controller.array_action.errors as array_errors\n'), ((29414, 29502), 'controller.array_action.errors.ObjectIsStillInUseError', 'array_errors.ObjectIsStillInUseError', ([], {'id_or_name': 'object_name', 'used_by': 'fcmaps_in_use'}), '(id_or_name=object_name, used_by=\n fcmaps_in_use)\n', (29450, 29502), True, 'import controller.array_action.errors as array_errors\n'), ((29934, 29979), 'controller.array_action.errors.ObjectNotFoundError', 'array_errors.ObjectNotFoundError', (['object_name'], {}), '(object_name)\n', (29966, 29979), True, 'import controller.array_action.errors as array_errors\n'), ((30121, 30166), 'controller.array_action.errors.ObjectNotFoundError', 'array_errors.ObjectNotFoundError', (['object_name'], {}), '(object_name)\n', (30153, 30166), True, 'import controller.array_action.errors as array_errors\n'), ((37332, 37374), 'controller.array_action.errors.HostNotFoundError', 'array_errors.HostNotFoundError', (['initiators'], {}), '(initiators)\n', (37362, 37374), True, 'import controller.array_action.errors as array_errors\n'), ((37500, 37557), 'controller.array_action.errors.MultipleHostsFoundError', 'array_errors.MultipleHostsFoundError', (['initiators', 'fc_host'], {}), '(initiators, fc_host)\n', (37536, 37557), True, 'import controller.array_action.errors as array_errors\n'), ((40364, 40424), 'controller.array_action.errors.MultipleHostsFoundError', 'array_errors.MultipleHostsFoundError', (['initiators', 'host_names'], {}), '(initiators, host_names)\n', (40400, 40424), True, 'import controller.array_action.errors as array_errors\n'), ((41630, 41672), 'controller.array_action.errors.HostNotFoundError', 'array_errors.HostNotFoundError', (['id_or_name'], {}), '(id_or_name)\n', (41660, 41672), True, 'import controller.array_action.errors as array_errors\n'), ((44844, 44873), 'random.choice', 'choice', (['free_luns_in_interval'], {}), '(free_luns_in_interval)\n', (44850, 44873), False, 'from random import choice\n'), ((44906, 44949), 'controller.array_action.errors.NoAvailableLunError', 'array_errors.NoAvailableLunError', (['host_name'], {}), '(host_name)\n', (44938, 44949), True, 'import controller.array_action.errors as array_errors\n'), ((54472, 54522), 'controller.array_action.errors.ObjectNotFoundError', 'array_errors.ObjectNotFoundError', (['replication_name'], {}), '(replication_name)\n', (54504, 54522), True, 'import controller.array_action.errors as array_errors\n'), ((68494, 68539), 'controller.array_action.errors.ObjectNotFoundError', 'array_errors.ObjectNotFoundError', (['snapshot_id'], {}), '(snapshot_id)\n', (68526, 68539), True, 'import controller.array_action.errors as array_errors\n'), ((6897, 6922), 'packaging.version.Version', 'Version', (['self._code_level'], {}), '(self._code_level)\n', (6904, 6922), False, 'from packaging.version import Version\n'), ((6925, 6960), 'packaging.version.Version', 'Version', (['self.MIN_SUPPORTED_VERSION'], {}), '(self.MIN_SUPPORTED_VERSION)\n', (6932, 6960), False, 'from packaging.version import Version\n'), ((6984, 7078), 'controller.array_action.errors.UnsupportedStorageVersionError', 'array_errors.UnsupportedStorageVersionError', (['self._code_level', 'self.MIN_SUPPORTED_VERSION'], {}), '(self._code_level, self.\n MIN_SUPPORTED_VERSION)\n', (7027, 7078), True, 'import controller.array_action.errors as array_errors\n'), ((7236, 7280), 'controller.array_action.errors.CredentialsError', 'array_errors.CredentialsError', (['self.endpoint'], {}), '(self.endpoint)\n', (7265, 7280), True, 'import controller.array_action.errors as array_errors\n'), ((34227, 34280), 'controller.array_action.errors.Flashcopy2NotSupportedMessage', 'array_errors.Flashcopy2NotSupportedMessage', (['volume_id'], {}), '(volume_id)\n', (34269, 34280), True, 'import controller.array_action.errors as array_errors\n'), ((42707, 42752), 'controller.array_action.errors.ObjectNotFoundError', 'array_errors.ObjectNotFoundError', (['volume_name'], {}), '(volume_name)\n', (42739, 42752), True, 'import controller.array_action.errors as array_errors\n'), ((43770, 43811), 'controller.array_action.errors.HostNotFoundError', 'array_errors.HostNotFoundError', (['host_name'], {}), '(host_name)\n', (43800, 43811), True, 'import controller.array_action.errors as array_errors\n'), ((49344, 49396), 'controller.array_action.errors.NoIscsiTargetsFoundError', 'array_errors.NoIscsiTargetsFoundError', (['self.endpoint'], {}), '(self.endpoint)\n', (49381, 49396), True, 'import controller.array_action.errors as array_errors\n'), ((17288, 17331), 'controller.array_action.errors.IllegalObjectID', 'array_errors.IllegalObjectID', (['ex.my_message'], {}), '(ex.my_message)\n', (17316, 17331), True, 'import controller.array_action.errors as array_errors\n'), ((35046, 35100), 'controller.array_action.errors.ObjectNotFoundError', 'array_errors.ObjectNotFoundError', (['internal_snapshot_id'], {}), '(internal_snapshot_id)\n', (35078, 35100), True, 'import controller.array_action.errors as array_errors\n'), ((46803, 46856), 'controller.array_action.errors.MappingError', 'array_errors.MappingError', (['volume_name', 'host_name', 'ex'], {}), '(volume_name, host_name, ex)\n', (46828, 46856), True, 'import controller.array_action.errors as array_errors\n'), ((48350, 48405), 'controller.array_action.errors.UnmappingError', 'array_errors.UnmappingError', (['volume_name', 'host_name', 'ex'], {}), '(volume_name, host_name, ex)\n', (48377, 48405), True, 'import controller.array_action.errors as array_errors\n'), ((10750, 10795), 'controller.array_action.errors.ObjectNotFoundError', 'array_errors.ObjectNotFoundError', (['volume_name'], {}), '(volume_name)\n', (10782, 10795), True, 'import controller.array_action.errors as array_errors\n'), ((10913, 10958), 'controller.array_action.errors.IllegalObjectName', 'array_errors.IllegalObjectName', (['ex.my_message'], {}), '(ex.my_message)\n', (10943, 10958), True, 'import controller.array_action.errors as array_errors\n'), ((14070, 14115), 'controller.array_action.errors.ObjectNotFoundError', 'array_errors.ObjectNotFoundError', (['volume_name'], {}), '(volume_name)\n', (14102, 14115), True, 'import controller.array_action.errors as array_errors\n'), ((14213, 14284), 'controller.array_action.errors.NotEnoughSpaceInPool', 'array_errors.NotEnoughSpaceInPool', ([], {'id_or_name': 'cli_volume.mdisk_grp_name'}), '(id_or_name=cli_volume.mdisk_grp_name)\n', (14246, 14284), True, 'import controller.array_action.errors as array_errors\n'), ((19386, 19439), 'controller.array_action.errors.VolumeAlreadyExists', 'array_errors.VolumeAlreadyExists', (['name', 'self.endpoint'], {}), '(name, self.endpoint)\n', (19418, 19439), True, 'import controller.array_action.errors as array_errors\n'), ((19532, 19580), 'controller.array_action.errors.InvalidArgumentError', 'array_errors.InvalidArgumentError', (['ex.my_message'], {}), '(ex.my_message)\n', (19565, 19580), True, 'import controller.array_action.errors as array_errors\n'), ((19719, 19791), 'controller.array_action.errors.PoolDoesNotMatchSpaceEfficiency', 'array_errors.PoolDoesNotMatchSpaceEfficiency', (['pool', 'space_efficiency', 'ex'], {}), '(pool, space_efficiency, ex)\n', (19763, 19791), True, 'import controller.array_action.errors as array_errors\n'), ((19889, 19939), 'controller.array_action.errors.NotEnoughSpaceInPool', 'array_errors.NotEnoughSpaceInPool', ([], {'id_or_name': 'pool'}), '(id_or_name=pool)\n', (19922, 19939), True, 'import controller.array_action.errors as array_errors\n'), ((20077, 20122), 'controller.array_action.errors.IllegalObjectName', 'array_errors.IllegalObjectName', (['ex.my_message'], {}), '(ex.my_message)\n', (20107, 20122), True, 'import controller.array_action.errors as array_errors\n'), ((22591, 22636), 'controller.array_action.errors.ObjectNotFoundError', 'array_errors.ObjectNotFoundError', (['volume_name'], {}), '(volume_name)\n', (22623, 22636), True, 'import controller.array_action.errors as array_errors\n'), ((46414, 46455), 'controller.array_action.errors.HostNotFoundError', 'array_errors.HostNotFoundError', (['host_name'], {}), '(host_name)\n', (46444, 46455), True, 'import controller.array_action.errors as array_errors\n'), ((46543, 46588), 'controller.array_action.errors.ObjectNotFoundError', 'array_errors.ObjectNotFoundError', (['volume_name'], {}), '(volume_name)\n', (46575, 46588), True, 'import controller.array_action.errors as array_errors\n'), ((46671, 46720), 'controller.array_action.errors.LunAlreadyInUseError', 'array_errors.LunAlreadyInUseError', (['lun', 'host_name'], {}), '(lun, host_name)\n', (46704, 46720), True, 'import controller.array_action.errors as array_errors\n'), ((48001, 48042), 'controller.array_action.errors.HostNotFoundError', 'array_errors.HostNotFoundError', (['host_name'], {}), '(host_name)\n', (48031, 48042), True, 'import controller.array_action.errors as array_errors\n'), ((48120, 48165), 'controller.array_action.errors.ObjectNotFoundError', 'array_errors.ObjectNotFoundError', (['volume_name'], {}), '(volume_name)\n', (48152, 48165), True, 'import controller.array_action.errors as array_errors\n'), ((48250, 48302), 'controller.array_action.errors.VolumeAlreadyUnmappedError', 'array_errors.VolumeAlreadyUnmappedError', (['volume_name'], {}), '(volume_name)\n', (48289, 48302), True, 'import controller.array_action.errors as array_errors\n'), ((66377, 66432), 'controller.array_action.errors.SnapshotAlreadyExists', 'array_errors.SnapshotAlreadyExists', (['name', 'self.endpoint'], {}), '(name, self.endpoint)\n', (66411, 66432), True, 'import controller.array_action.errors as array_errors\n'), ((66560, 66610), 'controller.array_action.errors.PoolDoesNotExist', 'array_errors.PoolDoesNotExist', (['pool', 'self.endpoint'], {}), '(pool, self.endpoint)\n', (66589, 66610), True, 'import controller.array_action.errors as array_errors\n'), ((66708, 66758), 'controller.array_action.errors.NotEnoughSpaceInPool', 'array_errors.NotEnoughSpaceInPool', ([], {'id_or_name': 'pool'}), '(id_or_name=pool)\n', (66741, 66758), True, 'import controller.array_action.errors as array_errors\n'), ((66896, 66941), 'controller.array_action.errors.IllegalObjectName', 'array_errors.IllegalObjectName', (['ex.my_message'], {}), '(ex.my_message)\n', (66926, 66941), True, 'import controller.array_action.errors as array_errors\n'), ((67726, 67771), 'controller.array_action.errors.IllegalObjectName', 'array_errors.IllegalObjectName', (['ex.my_message'], {}), '(ex.my_message)\n', (67756, 67771), True, 'import controller.array_action.errors as array_errors\n')]
|
# -*- coding: utf-8 -*-
"""
Created on Mon Sep 30 13:23:53 2019
@author: casimp
"""
import numpy as np
import csv
import matplotlib.pyplot as plt
from cpex.transformation import strain_transformation
class Extract():
def __init__(self):
pass
def extract_grains(self, data='elastic', idx=1, grain_idx=None):
"""
Extracts data (stress, strain etc.) for either all grains, or a
specified grain at a given (orthogonal) orientation or component
(where data='time', 'frame' etc. indexing does not work.
Parameters
----------
data: str
The data label, either 'stress', 'strain', 'elastic' (strain),
'back stress', 'rot', 'time', 'frame'
idx: int
The orientation (referenced via an idx) of the defined data
e.g. data='stress', idx=1 => sigma_yy
grain_idx: int
The index of the grain (note GRAIN-1 => idx=0)
Returns
-------
order: array
Dataset
"""
if idx == None and grain_idx != None:
idx = np.s_[:, grain_idx]
elif idx == None and grain_idx == None:
idx = np.s_[:, :]
elif idx != None and grain_idx == None:
idx = np.s_[idx, :]
else:
idx = np.s_[idx, grain_idx]
d = {'strain':self.e,
'stress':self.s,
'elastic':self.elastic,
'back stress':self.b_stress,
'rot':self.rot - self.rot[:,:, 0][:, :, None],
'time':self.t,
'frame':np.arange(self.num_frames)}
if data not in ['time', 'frame', 'rot']:
ex = d[data][idx]
else:
ex = d[data]
return ex
def extract_neighbours_idx(self, grain_idx, frame=0):
"""
Extracts the indinces of all grains ordered with respect to position
away from a given grain (index).
Grains move a small amount during deformation, the frame can be defined
to explicity interrogtae neightbours at a given load level/time.
Parameters
----------
grain_idx: int
The index of the grain to search around
frame: int, None
The frame to reference (default = 0). If None extracts ordered
inidices for all frames.
Returns
-------
order: list
Grain indices ordered by euclidean distance from selected grain
"""
if frame == None:
frame = np.s_[:]
dims = self.dims[:, :, frame]
rel_dims = dims - dims[:, grain_idx, None] # Keeps correct dimensionality
euc_dist = np.sum(rel_dims ** 2, axis=0)**0.5
order = np.argsort(euc_dist, axis=0)
return order[1:]
def extract_neighbours(self, grain_idx, data='strain', idx=1, frame=-1,
cmean=False, dimframe='simple'):
"""
Extracts data (stress, strain etc.) for all grains, with data being
ordered with respect to position away from a given grain (index).
Calls extract_grains and extrains_neighbours_idx methods.
Parameters
----------
grain_idx: int
The index of the grain to search around
data: str
The data label, either 'stress', 'strain', 'elastic' (strain),
'back stress'
idx: int
The orientation (referenced via an idx) of the defined data
e.g. data='stress', idx=1 => sigma_yy
frame: int, None
The frame to reference (default = 0). If None extracts ordered
data for all frames.
cmean: bool
Compute a rolling, cumulative mean
dimframe: str, int, None
If frame is not None then the neighbour ordering is done on same
frame. If frame is None then the dimensions are taken from the
final frame or a specified frame (int) unless dimframes==None, in
which case neighbour ordering is done for each frame . Warning
this is slow!
Returns
-------
order: array
Ordered dataset
"""
if frame==None:
frame=np.s_[:]
ex = self.extract_grains(data=data, idx=idx)
if frame == np.s_[:] and dimframe !='simple':
# Not ideal!!!
order = self.extract_neighbours_idx(grain_idx, None)
ex_ordered = np.column_stack([i[j] for i, j in zip(np.split(ex, ex.shape[1], axis=1),
np.split(order, order.shape[1], axis=1))]).squeeze()
elif frame == np.s_[:] and isinstance(dimframe, int):
order = self.extract_neighbours_idx(grain_idx, dimframe)
ex_ordered = ex[order]
else:
dimframe = frame if frame != np.s_[:] else -1
order = self.extract_neighbours_idx(grain_idx, dimframe)
# print(order)
ex_ordered = ex[order]
if cmean:
ex_csum = np.cumsum(ex_ordered, axis=0)
ex_cmean = ex_csum / np.arange(1, ex_csum.shape[0] + 1)[:, None]
return ex_cmean[..., frame]
return ex_ordered[..., frame]
def plot_neighbours(self, grain_idx, data='plastic', idx=1, frame=-1,
cmean=True, ):
"""
Plots data (stress, strain etc.) for all n grains, with data being
ordered with respect to position away from a given grain (index).
Parameters
----------
grain_idx: int
The index of the grain to search around
data: str
The data to plot either 'stress', 'strain', 'elastic' (strain),
'back stress'
idx: int
The orientation (referenced via an idx) of the defined data
e.g. data='stress', idx=1 => sigma_yy
frame: int, None
The frame to reference (default = 0). If None extracts ordered
data for all frames.
cmean: bool
Compute a rolling, cumulative mean
"""
assert frame != None, "Can't study response across all frames."
ex_ordered = self.extract_neighbours(grain_idx, data=data,
idx=idx, frame=frame,
cmean=cmean)
# Tinkering with axis labels
x = 'nth nearest neighbour'
y = 'cumulative mean {} (window=n)'.format(data) if cmean else data
# Plotting
plt.plot(np.arange(1, np.size(ex_ordered) +1), ex_ordered, label=grain_idx)
plt.legend()
plt.ylabel(y)
plt.xlabel(x)
def extract_lattice(self, data='lattice', family='311',
grain_idx=None, plane_idx=None):
"""
Routine to extract information about some or all (default) grains for a
specified lattice plane.
Parameters:
-----------
data: str
Either 'lattice' or 'phi'
family: str
The lattice plane family to assess
grain_idx: int, [int,...], None
If None then all grains of this family to be extracted else
the individual grain (or list of grains)
plane_idx: int, [int,...], None
If None then all planes of this family/grain combination to be
extracted else the individual planes (or list of planes)
Returns:
--------
data: array
Lattice strains (or phi) for given family (and potentially
grain/plane specification)
"""
if plane_idx == None and grain_idx != None:
idx = np.s_[:, grain_idx]
elif plane_idx == None and grain_idx == None:
idx = np.s_[:, :]
elif plane_idx != None and grain_idx == None:
idx = np.s_[plane_idx, :]
else:
idx = np.s_[plane_idx, grain_idx]
lattice = self.lattice_strain[family][idx]
phi = self.lattice_phi[family]
d = {'phi':phi,'lattice':lattice}
return d[data]
def extract_phi_idx(self, family='311', phi=0, window=10, frame=0):
"""
Allows for selection of the index of lattice planes wityh a defined
orientation with resepect to the y axis (nominally the loading axis).
A 2D array of indices with be returned if a frame is specified, the
elemtns in the array will be structured:
[[grain_idx, plane_idx],
[grain_idx, plane_idx],
...]
If None is passed as the frame variable then the rotation of
the grain during loading/dwell etc. is being considered - a 2D array
is returned with each element being structured as follows:
[[grain_idx, frame_idx, plane_idx],
[grain_idx, frame_idx, plane_idx],
...]
** In addition to the list of indices an equivalent boolean array is
returned in each case. **
Parameters
----------
family: str
The index of the grain to search around
phi: float
The data to extractm either 'stress', 'strain', 'elastic' (strain),
'back stress'
window: float
The orientation (referenced via an idx) of the defined data
e.g. data='stress', idx=1 => sigma_yy
frame: int, None
The frame to reference (default = 0). If None extracts ordered
data for all frames.
Returns
-------
va: array (bool)
Boolean array of the same dimension as the lattice strain array -
elements are True if they are within the window, else False)
select: array (int)
A list of the grains/plane indices for all grains that lie within
specified orientation/window combination.
"""
if frame == None:
frame = np.s_[:]
phi_ = 180 * self.lattice_phi[family][:, frame] / np.pi
phi_ -= 90
phi -= 90
w = window / 2
p0, p1 = phi - w, phi + w
s0 = np.logical_and(phi_ > np.min(p0), phi_ < np.max(p1))
s1 = np.logical_and(-phi_ > np.min(p0), -phi_ < np.max(p1))
select = np.logical_or(s0, s1)
va = np.argwhere(select)
return va, select
def plot_phi(self, y='lattice', family='200', frame=-1, idx=0,
alpha=0.1, restrict_z=False, restrict_range = [70, 110]):
"""
For a given lattice family (and frame) plots the variation in the
*resolved* lattice strain (or back stress) with respect to the angle
the planes make to the loading axis (phi). Can be restricted across
a smaller z_rot if required. N.b. rotations of grains defined as
(x_rot, phi, z_rot).
Parameters
----------
y: str
The data to plot on the y axis. This is typically lattice strain
but it is also possible to plot wrt. back stress.
family: str
The lattice plane family to assess
frame: int
The frame to extract data from (default = 0).
idx: int
The compnent (referenced via an idx) of the defined data. Only
valid for back stress (for fcc, idx = 0-11)
alpha: float
Plotting data transparency
restrict_z: bool
Restrict data extraction/plotting across one angular range. Can be
used to normalise the amount of data wrt. phi
restrict_range: [float, float]
Range across which to limit z rotations.
"""
lattice = self.lattice_strain
y_ = {'lattice': lattice[family],
'back stress': self.b_stress[idx]}[y]
try:
y_tensor = self.lattice_tensor[family]
tens = True
except KeyError:
print('Tensor not available')
tens=False
if y == 'back stress':
x = self.rot[1]
else:
x = self.lattice_phi[family]
rot = self.lattice_rot[family]
if restrict_z == True and y == 'lattice':
r0, r1 = restrict_range
t_z = rot[:, :, 2]* 180 / np.pi
va = np.logical_and(t_z > r0, t_z < r1)
vaf = np.zeros_like(rot[:, :, 2], dtype='bool')
vaf[:, frame, :] += True
va = np.logical_and(va, vaf)
else:
va = np.s_[:, frame]
plt.plot(x[va].flatten(), y_[va].flatten(), '.', alpha=alpha)
if y == 'lattice' and tens:
st = strain_transformation(np.linspace(0, np.pi, 1001), *y_tensor[:, frame])
plt.plot(np.linspace(0, np.pi, 1001), st, 'r')
x = 'lattice rot (phi)' if y == 'lattice' else 'grain rot (phi)'
plt.xlabel(x)
plt.ylabel(y)
def plot_grains(self, y='elastic', x='stress', x_mean=True,
y_mean=False, x_idx=1, y_idx=1, grain_idx=None, alpha=0.2,
color='k', mcolor='r'):
"""
The plot_grain method is very general plotting routing
and any grain (not lattice) specific vaues can be plotted on
either axis.
- Define data to plot on either axis i.e. y='stress', x='strain'
- Specify whether the data on given axis is the mean response of all grains
- Where relevant, the index of that data must be specified
i.e. for y='stress', y_idx = 1 for sigma_yy
While general a limited number of x, y combinations will,
unsurprisingly, not work.
Parameters
----------
y, x: str, str
The data (label), either 'stress', 'strain', 'elastic' (strain),
'back stress', 'rot', 'time', 'frame' to plot on x/y axis
x_mean, y_mean: bool, bool
Whether to take the mean (across all grains) of the data on the
x/y axis
x_idx, y_idx: int, int
Component/orientation of the specified data to plot
e.g. x='stress', idx=1 => sigma_xx
grain_idx: [int, ...]
List on grains (indices) to plot (if None, all grains plotted)
alpha, color: float, str
Plotting options for the grain specific lines
mcolor:
The color of the grain average (across x and y) line
"""
# If necessary put grain_idx into list for fancy indexing
if isinstance(grain_idx, int):
grain_idx = [grain_idx,]
# Time and frame can't be averaged
if x in ['time', 'frame']:
x_mean = False
if y in ['time', 'frame']:
y_mean = False
# Data extraction
x_ = self.extract_grains(data=x, idx=x_idx, grain_idx=grain_idx)
y_ = self.extract_grains(data=y, idx=y_idx, grain_idx=grain_idx)
# Saving x, y locations (?)
csvfile = open('strain_grain.csv', 'w', newline='')
obj = csv.writer(csvfile)
for val in np.transpose(x_):
obj.writerow(val)
csvfile.close()
csvfile = open('stress_grain.csv', 'w', newline='')
obj = csv.writer(csvfile)
for val in np.transpose(y_):
obj.writerow(val)
csvfile.close()
# Calculate mean of arrays
xm = np.nanmean(x_, axis=0) if x not in ['time', 'frame'] else x_
ym = np.nanmean(y_, axis=0) if y not in ['time', 'frame'] else y_
x__ = xm if x_mean else x_.T
y__ = ym if y_mean else y_.T
# Tinkering with axis labels
x = '{} (idx={})'.format(x, x_idx) if x not in ['time', 'frame'] else x
y = '{} (idx={})'.format(y, y_idx) if y not in ['time', 'frame'] else y
x = 'mean {}'.format(x) if x_mean else x
y = 'mean {}'.format(y) if y_mean else y
# Plotting
plt.plot(np.squeeze(x__), np.squeeze(y__), color=color, alpha=alpha)
if (not y_mean or not x_mean) and (grain_idx == None or len(grain_idx) != 1):
plt.plot(xm, ym, color=mcolor, label='Mean response')
plt.legend()
plt.ylabel(y)
plt.xlabel(x)
def plot_lattice(self, family='200', phi=0, window=10, lat_ax='x',
ax2='stress', ax2_idx=1, ax2_mean=True,
alpha=0.2, color='k', mcolor='r',
plot_select=True, phi_frame=0):
"""
The lattice strains for a given family are plotted if they lie at (or
close to) an angle, phi (with the loading axis). The angular tolerance
/ azimuthal window is defined by the user (window). For XRD, a window
of 10deg is often used.
Parameters:
-----------
family: str
The lattice plane family to assess
phi: float
Angle at which to extract the lattice plane strains
window: float
Azimuthal tolerance (absolute window width) for lattice data
extraction
lat_ax: str
Axis to plot the lattice data on, either 'x' or 'y'
ax2: str
The data to plot against the lattice strain. Either 'stress',
'strain', 'elastic' (strain), 'back stress'
ax2_idx: int
Component/orientation of the specified second axis data to plot
e.g. ax2='stress', ax2_idx=1 => sigma_xx
ax2_mean: bool
Whether to take the mean (across all grains) of the data on the
second axis
alpha, color: float, str
Plotting options for the grain specific lines
mcolor:
The color of the grain average (across x and y) line
plot_select: bool
If plot_select is True the individual lattice planes will be
plotted in addition to the mean result, when False just the mean
response
phi_frame: int
The frame to define the grains that lie within the aimuthal
window (default = 0).
"""
ax2_mean = False if ax2 in ['time', 'frame'] else ax2_mean
d = self.extract_grains(data=ax2, idx=ax2_idx, grain_idx=None)
valid, select = self.extract_phi_idx(family=family, phi=phi,window=window, frame=phi_frame)
if ax2 in ['time', 'frame']:
d, dm = d, d
else:
d = np.nanmean(d, axis=0) if ax2_mean else d[valid[:,0]].T
dm = d if ax2_mean else np.nanmean(d, axis=1)
lattice = self.extract_lattice(family=family)
lattice = lattice[valid[:,0], :, valid[:,1]].T
x_ = lattice if lat_ax == 'x' else d
y_ = lattice if lat_ax != 'x' else d
assert np.sum(select) > 0, 'Phi window too small for {} - no grains/planes selected'.format(family)
if plot_select:
plt.plot(x_, y_, 'k', alpha=alpha)
x_ = np.nanmean(lattice, axis=1) if lat_ax == 'x' else dm
y_ = np.nanmean(lattice, axis=1) if lat_ax != 'x' else dm
plt.plot(x_, y_, label=family, color=mcolor)
ax2 = '{} (idx={})'.format(ax2, ax2_idx) if ax2 not in ['time', 'frame'] else ax2
ax2 = ax2 if not ax2_mean else 'mean {}'.format(ax2)
xlabel = ax2 if lat_ax != 'x' else 'lattice'
ylabel = ax2 if lat_ax == 'x' else 'lattice'
plt.xlabel(xlabel)
plt.ylabel(ylabel)
def extract_lattice_map(self, family='200', az_bins=19):
"""
Average the lattice strains data across a defined number of bins
(i.e. azimuthally integrate), return 2D array of lattice strains against frame
for the specified family.
Parameters:
-----------
family: str
The lattice plane family to assess
az_bins: int
Number of bins to extract lattice strains across
Returns:
--------
bins: list
List of the phi bins that data has been extracted at
data: array
Lattice strains for given family averaged across a user
defined (az_bins) number of azimuthally arrayed bins
"""
phi_steps = az_bins + 1
arr1 = np.moveaxis(self.lattice_strain[family], 1, 2)
arr1 = arr1.reshape((-1, arr1.shape[-1]))
arr2 = np.moveaxis(self.lattice_phi[family], 1, 2)
arr2 = arr2.reshape((-1, arr2.shape[-1]))
arr2[arr2 > np.pi/2] -= np.pi # -90 to 90
bins = np.linspace(-90, 90, phi_steps)
e_phi = np.nan * np.ones((phi_steps - 1, self.num_frames))
for idx, i in enumerate(bins[:-1]):
va = np.logical_and(arr2 < bins[idx + 1] * np.pi / 180, arr2 > bins[idx] * np.pi / 180)
try:
e_phi[idx] = np.sum(arr1 * va, axis=0) / np.nansum(va, axis=0)
except ZeroDivisionError:
pass
return (bins[:-1]+bins[1:])/2, e_phi
def plot_lattice_map(self, family='200', az_bins=19, ax2='time',
ax2_idx=1):
"""
Plot 2D map of the azimtuhally arrayed lattice strains as a function of
second variable such as time or frame. Also works with macro stress
or strain although obvious issues may arise if there is creep dwells.
Parameters:
-----------
family: str
The lattice plane family to assess
az_bins: int
Number of bins to extract lattice strains across
ax2: str
The data to plot against the lattice strain. Either 'stress',
'strain', 'elastic' (strain), 'back stress'
ax2_idx: int
Component/orientation of the specified second axis data to plot
e.g. ax2='stress', ax2_idx=1 => sigma_xx
Returns:
--------
bins: list
List of the phi bins that data has been extracted at
data: array
Lattice strains for given family averaged across a user
defined (az_bins) number of azimuthally arrayed bins
"""
bin_c, e_phi = self.extract_lattice_map(family=family, az_bins=az_bins)
d = self.extract_grains(data=ax2, idx=ax2_idx, grain_idx=None)
ax2_mean = False if ax2 in ['time', 'frame'] else True
if ax2_mean:
d = np.nanmean(d, axis=0)
time, phi = np.meshgrid(d, bin_c)
plt.contourf(time, phi, e_phi)
plt.colorbar()
ax2 = 'mean {} (idx={})'.format(ax2, ax2_idx) if ax2 not in ['time', 'frame'] else ax2
plt.xlabel(ax2)
plt.ylabel('phi (reflected at 0$^o$)')
def plot_lattice_all(self, phi=0, window=10, lat_ax='x', ax2='stress',
ax2_idx=1, ax2_mean=True, phi_frame=0):
"""
The lattice strains for a ALL families are plotted if they lie at (or
close to) an angle, phi (with the loading axis). The angular tolerance
/ azimuthal window is defined by the user (window). For XRD, a window
of 10deg is often used.
Parameters:
-----------
phi: float
Angle at which to extract the lattice plane strains
window: float
Azimuthal tolerance (absolute window width) for lattice data
extraction
lat_ax: str
Axis to plot the lattice data on, either 'x' or 'y'
ax2: str
The data to plot against the lattice strain. Either 'stress',
'strain', 'elastic' (strain), 'back stress'
ax2_idx: int
Component/orientation of the specified second axis data to plot
e.g. ax2='stress', ax2_idx=1 => sigma_xx
ax2_mean: bool
Whether to take the mean (across all grains) of the data on the
second axis
phi_frame: int
The frame to define the grains that lie within the aimuthal
window (default = 0).
"""
for family in self.lattice_list:
try:
self.plot_lattice(family=family, lat_ax=lat_ax, ax2=ax2, ax2_idx=ax2_idx, phi=phi,
window=window, phi_frame=phi_frame, plot_select=False, mcolor=None, ax2_mean=ax2_mean)
except AssertionError:
print('Phi window too small for {} - no grains/planes selected'.format(family))
plt.legend(self.lattice_list)
def plot_back_lattice(self, family='200', phi=0, window=10,
back_ax='y', b_idx=1, ax2='stress', ax2_idx=1,
alpha=0.2, color='k', mcolor='r',
plot_select=True, phi_frame=0):
"""
Plot a component of back stress for a specified family of lattice
planes at a defined azimuthal angle. Plot against any other extracted
stress, strain, time etc. component.
Parameters:
-----------
family: str
The lattice plane family to assess
phi: float
Angle at which to extract the lattice plane strains
window: float
Azimuthal tolerance (absolute window width) for lattice data
extraction
back_ax: str
Axis to plot the lattice data on, either 'x' or 'y'
back_idx: int
Component of the back stress to plot (for fcc 0-11)
ax2: str
The data to plot against the lattice strain. Either 'stress',
'strain', 'elastic' (strain), 'back stress'
ax2_idx: int
Component/orientation of the specified second axis data to plot
e.g. ax2='stress', ax2_idx=1 => sigma_xx
alpha, color: float, str
Plotting options for the grain specific lines
mcolor:
The color of the grain average (across x and y) line
plot_select: bool
If plot_select is True the individual lattice planes will be
plotted in addition to the mean result, when False just the mean
response
phi_frame: int
The frame to define the grains that lie within the aimuthal
window (default = 0).
"""
back = self.extract_grains(data='back stress', idx=b_idx, grain_idx=None)
d = self.extract_grains(data=ax2, idx=ax2_idx, grain_idx=None)
d = d if ax2 in ['time', 'frame'] else np.nanmean(d, axis=0)
valid, select = self.extract_phi_idx(family=family, phi=phi,window=window, frame=phi_frame)
# back = back[valid[:,0], :, valid[:,1]].T
v = np.unique(valid[:,0])
back = back[v, :].T
x_ = back if back_ax == 'x' else d
y_ = back if back_ax != 'x' else d
assert np.sum(select) > 0, 'Phi window too small for {} - no grains/planes selected'.format(family)
if plot_select:
plt.plot(x_, y_, 'k', alpha=alpha)
ax2 = 'mean {} (idx={})'.format(ax2, ax2_idx) if ax2 not in ['time', 'frame'] else ax2
xlabel = ax2 if back_ax != 'x' else 'back stress'
ylabel = ax2 if back_ax == 'x' else 'back stress'
plt.xlabel(xlabel)
plt.ylabel(ylabel)
def plot_active_slip(self, family='200', phi=0, window=10,
back_ax='y', b_active=2, ax2='stress', ax2_idx=1,
alpha=0.2, color='k', mcolor='r',
plot_select=True, phi_frame=0):
"""
Plot the number of active slip systems for every plane for a specified
family of lattice planes at a defined azimuthal angle (angle wrt y axis).
Plotting is a function of time, frame, stress strain etc. The
activation of a slip system is taken to occur when the absolute back
stress associated with that system (i.e. back stress component) rises
above a user define value
Parameters:
-----------
family: str
The lattice plane family to assess
phi: float
Angle at which to extract the lattice plane strains
window: float
Azimuthal tolerance (absolute window width) for lattice data
extraction
back_ax: str
Axis to plot the lattice data on, either 'x' or 'y'
b_active: int
Component of the back stress to plot (for fcc 0-11)
ax2: str
The data to plot against the lattice strain. Either 'stress',
'strain', 'elastic' (strain), 'back stress'
ax2_idx: int
Component/orientation of the specified second axis data to plot
e.g. ax2='stress', ax2_idx=1 => sigma_xx
alpha, color: float, str
Plotting options for the grain specific lines
mcolor:
The color of the grain average (across x and y) line
plot_select: bool
If plot_select is True the individual lattice planes will be
plotted in addition to the mean result, when False just the mean
response
phi_frame: int
The frame to define the grains that lie within the aimuthal
window (default = 0).
"""
back = self.extract_grains(data='back stress', idx=None, grain_idx=None)
back_bool = np.abs(back) > b_active
d = self.extract_grains(data=ax2, idx=ax2_idx, grain_idx=None)
d = d if ax2 in ['time', 'frame'] else np.nanmean(d, axis=0)
valid, select = self.extract_phi_idx(family=family, phi=phi,window=window, frame=phi_frame)
# back = back[valid[:,0], :, valid[:,1]].T
v = np.unique(valid[:,0])
back_active = np.sum(back_bool, axis=0)[v, :].T
x_ = back_active if back_ax == 'x' else d
y_ = back_active if back_ax != 'x' else d
assert np.sum(select) > 0, 'Phi window too small for {} - no grains/planes selected'.format(family)
if plot_select:
plt.plot(x_, y_, 'k', alpha=alpha)
x_ = np.nanmean(back_active, axis=1) if back_ax == 'x' else d
y_ = np.nanmean(back_active, axis=1) if back_ax != 'x' else d
plt.plot(x_, y_, label=family, color=mcolor)
ax2 = 'mean {} (idx={})'.format(ax2, ax2_idx) if ax2 not in ['time', 'frame'] else ax2
xlabel = ax2 if back_ax != 'x' else 'Active slip systems'
ylabel = ax2 if back_ax == 'x' else 'Active slip systems'
plt.xlabel(xlabel)
plt.ylabel(ylabel)
def plot_active_slip_all(self, phi=0, window=10, back_ax='y', b_active = 2,
ax2='stress', ax2_idx=1, phi_frame=0):
"""
Plot the plane averaged number of active slip systems for all families
of lattice planes at a defined azimuthal angle (angle wrt y axis).
Plotting is a function of time, frame, stress strain etc. The
activation of a slip system is taken to occur when the absolute back
stress associated with that system (i.e. back stress component) rises
above a user define value
Parameters:
-----------
phi: float
Angle at which to extract the lattice plane strains
window: float
Azimuthal tolerance (absolute window width) for lattice data
extraction
back_ax: str
Axis to plot the lattice data on, either 'x' or 'y'
b_active: int
Component of the back stress to plot (for fcc 0-11)
ax2: str
The data to plot against the lattice strain. Either 'stress',
'strain', 'elastic' (strain), 'back stress'
ax2_idx: int
Component/orientation of the specified second axis data to plot
e.g. ax2='stress', ax2_idx=1 => sigma_xx
phi_frame: int
The frame to define the grains that lie within the aimuthal
window (default = 0).
"""
for family in self.lattice_list:
try:
self.plot_active_slip(family=family, back_ax=back_ax, ax2=ax2, ax2_idx=ax2_idx, phi=phi,
window=window, frame=phi_frame, plot_select=False, mcolor=None)
except AssertionError:
print('Phi window too small for {} - no grains/planes selected'.format(family))
plt.legend(self.lattice_list)
|
[
"numpy.moveaxis",
"numpy.sum",
"numpy.abs",
"numpy.ones",
"numpy.argsort",
"matplotlib.pyplot.contourf",
"numpy.arange",
"numpy.unique",
"numpy.nanmean",
"numpy.meshgrid",
"numpy.zeros_like",
"numpy.transpose",
"matplotlib.pyplot.colorbar",
"numpy.cumsum",
"numpy.max",
"numpy.linspace",
"numpy.size",
"numpy.nansum",
"csv.writer",
"matplotlib.pyplot.legend",
"numpy.min",
"numpy.argwhere",
"matplotlib.pyplot.ylabel",
"numpy.squeeze",
"matplotlib.pyplot.plot",
"numpy.logical_and",
"numpy.split",
"numpy.logical_or",
"matplotlib.pyplot.xlabel"
] |
[((2844, 2872), 'numpy.argsort', 'np.argsort', (['euc_dist'], {'axis': '(0)'}), '(euc_dist, axis=0)\n', (2854, 2872), True, 'import numpy as np\n'), ((6900, 6912), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (6910, 6912), True, 'import matplotlib.pyplot as plt\n'), ((6921, 6934), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['y'], {}), '(y)\n', (6931, 6934), True, 'import matplotlib.pyplot as plt\n'), ((6943, 6956), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['x'], {}), '(x)\n', (6953, 6956), True, 'import matplotlib.pyplot as plt\n'), ((10705, 10726), 'numpy.logical_or', 'np.logical_or', (['s0', 's1'], {}), '(s0, s1)\n', (10718, 10726), True, 'import numpy as np\n'), ((10749, 10768), 'numpy.argwhere', 'np.argwhere', (['select'], {}), '(select)\n', (10760, 10768), True, 'import numpy as np\n'), ((13337, 13350), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['x'], {}), '(x)\n', (13347, 13350), True, 'import matplotlib.pyplot as plt\n'), ((13359, 13372), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['y'], {}), '(y)\n', (13369, 13372), True, 'import matplotlib.pyplot as plt\n'), ((15506, 15525), 'csv.writer', 'csv.writer', (['csvfile'], {}), '(csvfile)\n', (15516, 15525), False, 'import csv\n'), ((15545, 15561), 'numpy.transpose', 'np.transpose', (['x_'], {}), '(x_)\n', (15557, 15561), True, 'import numpy as np\n'), ((15700, 15719), 'csv.writer', 'csv.writer', (['csvfile'], {}), '(csvfile)\n', (15710, 15719), False, 'import csv\n'), ((15739, 15755), 'numpy.transpose', 'np.transpose', (['y_'], {}), '(y_)\n', (15751, 15755), True, 'import numpy as np\n'), ((16666, 16679), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['y'], {}), '(y)\n', (16676, 16679), True, 'import matplotlib.pyplot as plt\n'), ((16688, 16701), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['x'], {}), '(x)\n', (16698, 16701), True, 'import matplotlib.pyplot as plt\n'), ((19650, 19694), 'matplotlib.pyplot.plot', 'plt.plot', (['x_', 'y_'], {'label': 'family', 'color': 'mcolor'}), '(x_, y_, label=family, color=mcolor)\n', (19658, 19694), True, 'import matplotlib.pyplot as plt\n'), ((19981, 19999), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['xlabel'], {}), '(xlabel)\n', (19991, 19999), True, 'import matplotlib.pyplot as plt\n'), ((20008, 20026), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['ylabel'], {}), '(ylabel)\n', (20018, 20026), True, 'import matplotlib.pyplot as plt\n'), ((20849, 20895), 'numpy.moveaxis', 'np.moveaxis', (['self.lattice_strain[family]', '(1)', '(2)'], {}), '(self.lattice_strain[family], 1, 2)\n', (20860, 20895), True, 'import numpy as np\n'), ((20970, 21013), 'numpy.moveaxis', 'np.moveaxis', (['self.lattice_phi[family]', '(1)', '(2)'], {}), '(self.lattice_phi[family], 1, 2)\n', (20981, 21013), True, 'import numpy as np\n'), ((21138, 21169), 'numpy.linspace', 'np.linspace', (['(-90)', '(90)', 'phi_steps'], {}), '(-90, 90, phi_steps)\n', (21149, 21169), True, 'import numpy as np\n'), ((23080, 23101), 'numpy.meshgrid', 'np.meshgrid', (['d', 'bin_c'], {}), '(d, bin_c)\n', (23091, 23101), True, 'import numpy as np\n'), ((23110, 23140), 'matplotlib.pyplot.contourf', 'plt.contourf', (['time', 'phi', 'e_phi'], {}), '(time, phi, e_phi)\n', (23122, 23140), True, 'import matplotlib.pyplot as plt\n'), ((23149, 23163), 'matplotlib.pyplot.colorbar', 'plt.colorbar', ([], {}), '()\n', (23161, 23163), True, 'import matplotlib.pyplot as plt\n'), ((23269, 23284), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['ax2'], {}), '(ax2)\n', (23279, 23284), True, 'import matplotlib.pyplot as plt\n'), ((23293, 23331), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""phi (reflected at 0$^o$)"""'], {}), "('phi (reflected at 0$^o$)')\n", (23303, 23331), True, 'import matplotlib.pyplot as plt\n'), ((25071, 25100), 'matplotlib.pyplot.legend', 'plt.legend', (['self.lattice_list'], {}), '(self.lattice_list)\n', (25081, 25100), True, 'import matplotlib.pyplot as plt\n'), ((27315, 27337), 'numpy.unique', 'np.unique', (['valid[:, 0]'], {}), '(valid[:, 0])\n', (27324, 27337), True, 'import numpy as np\n'), ((27892, 27910), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['xlabel'], {}), '(xlabel)\n', (27902, 27910), True, 'import matplotlib.pyplot as plt\n'), ((27919, 27937), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['ylabel'], {}), '(ylabel)\n', (27929, 27937), True, 'import matplotlib.pyplot as plt\n'), ((30419, 30441), 'numpy.unique', 'np.unique', (['valid[:, 0]'], {}), '(valid[:, 0])\n', (30428, 30441), True, 'import numpy as np\n'), ((30956, 31000), 'matplotlib.pyplot.plot', 'plt.plot', (['x_', 'y_'], {'label': 'family', 'color': 'mcolor'}), '(x_, y_, label=family, color=mcolor)\n', (30964, 31000), True, 'import matplotlib.pyplot as plt\n'), ((31248, 31266), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['xlabel'], {}), '(xlabel)\n', (31258, 31266), True, 'import matplotlib.pyplot as plt\n'), ((31275, 31293), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['ylabel'], {}), '(ylabel)\n', (31285, 31293), True, 'import matplotlib.pyplot as plt\n'), ((33134, 33163), 'matplotlib.pyplot.legend', 'plt.legend', (['self.lattice_list'], {}), '(self.lattice_list)\n', (33144, 33163), True, 'import matplotlib.pyplot as plt\n'), ((1625, 1651), 'numpy.arange', 'np.arange', (['self.num_frames'], {}), '(self.num_frames)\n', (1634, 1651), True, 'import numpy as np\n'), ((2784, 2813), 'numpy.sum', 'np.sum', (['(rel_dims ** 2)'], {'axis': '(0)'}), '(rel_dims ** 2, axis=0)\n', (2790, 2813), True, 'import numpy as np\n'), ((5251, 5280), 'numpy.cumsum', 'np.cumsum', (['ex_ordered'], {'axis': '(0)'}), '(ex_ordered, axis=0)\n', (5260, 5280), True, 'import numpy as np\n'), ((12781, 12815), 'numpy.logical_and', 'np.logical_and', (['(t_z > r0)', '(t_z < r1)'], {}), '(t_z > r0, t_z < r1)\n', (12795, 12815), True, 'import numpy as np\n'), ((12834, 12875), 'numpy.zeros_like', 'np.zeros_like', (['rot[:, :, 2]'], {'dtype': '"""bool"""'}), "(rot[:, :, 2], dtype='bool')\n", (12847, 12875), True, 'import numpy as np\n'), ((12930, 12953), 'numpy.logical_and', 'np.logical_and', (['va', 'vaf'], {}), '(va, vaf)\n', (12944, 12953), True, 'import numpy as np\n'), ((15862, 15884), 'numpy.nanmean', 'np.nanmean', (['x_'], {'axis': '(0)'}), '(x_, axis=0)\n', (15872, 15884), True, 'import numpy as np\n'), ((15936, 15958), 'numpy.nanmean', 'np.nanmean', (['y_'], {'axis': '(0)'}), '(y_, axis=0)\n', (15946, 15958), True, 'import numpy as np\n'), ((16421, 16436), 'numpy.squeeze', 'np.squeeze', (['x__'], {}), '(x__)\n', (16431, 16436), True, 'import numpy as np\n'), ((16438, 16453), 'numpy.squeeze', 'np.squeeze', (['y__'], {}), '(y__)\n', (16448, 16453), True, 'import numpy as np\n'), ((16579, 16632), 'matplotlib.pyplot.plot', 'plt.plot', (['xm', 'ym'], {'color': 'mcolor', 'label': '"""Mean response"""'}), "(xm, ym, color=mcolor, label='Mean response')\n", (16587, 16632), True, 'import matplotlib.pyplot as plt\n'), ((16645, 16657), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (16655, 16657), True, 'import matplotlib.pyplot as plt\n'), ((19332, 19346), 'numpy.sum', 'np.sum', (['select'], {}), '(select)\n', (19338, 19346), True, 'import numpy as np\n'), ((19461, 19495), 'matplotlib.pyplot.plot', 'plt.plot', (['x_', 'y_', '"""k"""'], {'alpha': 'alpha'}), "(x_, y_, 'k', alpha=alpha)\n", (19469, 19495), True, 'import matplotlib.pyplot as plt\n'), ((19522, 19549), 'numpy.nanmean', 'np.nanmean', (['lattice'], {'axis': '(1)'}), '(lattice, axis=1)\n', (19532, 19549), True, 'import numpy as np\n'), ((19588, 19615), 'numpy.nanmean', 'np.nanmean', (['lattice'], {'axis': '(1)'}), '(lattice, axis=1)\n', (19598, 19615), True, 'import numpy as np\n'), ((21195, 21236), 'numpy.ones', 'np.ones', (['(phi_steps - 1, self.num_frames)'], {}), '((phi_steps - 1, self.num_frames))\n', (21202, 21236), True, 'import numpy as np\n'), ((21307, 21393), 'numpy.logical_and', 'np.logical_and', (['(arr2 < bins[idx + 1] * np.pi / 180)', '(arr2 > bins[idx] * np.pi / 180)'], {}), '(arr2 < bins[idx + 1] * np.pi / 180, arr2 > bins[idx] * np.pi /\n 180)\n', (21321, 21393), True, 'import numpy as np\n'), ((23029, 23050), 'numpy.nanmean', 'np.nanmean', (['d'], {'axis': '(0)'}), '(d, axis=0)\n', (23039, 23050), True, 'import numpy as np\n'), ((27111, 27132), 'numpy.nanmean', 'np.nanmean', (['d'], {'axis': '(0)'}), '(d, axis=0)\n', (27121, 27132), True, 'import numpy as np\n'), ((27484, 27498), 'numpy.sum', 'np.sum', (['select'], {}), '(select)\n', (27490, 27498), True, 'import numpy as np\n'), ((27613, 27647), 'matplotlib.pyplot.plot', 'plt.plot', (['x_', 'y_', '"""k"""'], {'alpha': 'alpha'}), "(x_, y_, 'k', alpha=alpha)\n", (27621, 27647), True, 'import matplotlib.pyplot as plt\n'), ((30064, 30076), 'numpy.abs', 'np.abs', (['back'], {}), '(back)\n', (30070, 30076), True, 'import numpy as np\n'), ((30215, 30236), 'numpy.nanmean', 'np.nanmean', (['d'], {'axis': '(0)'}), '(d, axis=0)\n', (30225, 30236), True, 'import numpy as np\n'), ((30630, 30644), 'numpy.sum', 'np.sum', (['select'], {}), '(select)\n', (30636, 30644), True, 'import numpy as np\n'), ((30759, 30793), 'matplotlib.pyplot.plot', 'plt.plot', (['x_', 'y_', '"""k"""'], {'alpha': 'alpha'}), "(x_, y_, 'k', alpha=alpha)\n", (30767, 30793), True, 'import matplotlib.pyplot as plt\n'), ((30820, 30851), 'numpy.nanmean', 'np.nanmean', (['back_active'], {'axis': '(1)'}), '(back_active, axis=1)\n', (30830, 30851), True, 'import numpy as np\n'), ((30890, 30921), 'numpy.nanmean', 'np.nanmean', (['back_active'], {'axis': '(1)'}), '(back_active, axis=1)\n', (30900, 30921), True, 'import numpy as np\n'), ((10589, 10599), 'numpy.min', 'np.min', (['p0'], {}), '(p0)\n', (10595, 10599), True, 'import numpy as np\n'), ((10608, 10618), 'numpy.max', 'np.max', (['p1'], {}), '(p1)\n', (10614, 10618), True, 'import numpy as np\n'), ((10656, 10666), 'numpy.min', 'np.min', (['p0'], {}), '(p0)\n', (10662, 10666), True, 'import numpy as np\n'), ((10676, 10686), 'numpy.max', 'np.max', (['p1'], {}), '(p1)\n', (10682, 10686), True, 'import numpy as np\n'), ((13147, 13174), 'numpy.linspace', 'np.linspace', (['(0)', 'np.pi', '(1001)'], {}), '(0, np.pi, 1001)\n', (13158, 13174), True, 'import numpy as np\n'), ((13218, 13245), 'numpy.linspace', 'np.linspace', (['(0)', 'np.pi', '(1001)'], {}), '(0, np.pi, 1001)\n', (13229, 13245), True, 'import numpy as np\n'), ((18964, 18985), 'numpy.nanmean', 'np.nanmean', (['d'], {'axis': '(0)'}), '(d, axis=0)\n', (18974, 18985), True, 'import numpy as np\n'), ((19055, 19076), 'numpy.nanmean', 'np.nanmean', (['d'], {'axis': '(1)'}), '(d, axis=1)\n', (19065, 19076), True, 'import numpy as np\n'), ((30463, 30488), 'numpy.sum', 'np.sum', (['back_bool'], {'axis': '(0)'}), '(back_bool, axis=0)\n', (30469, 30488), True, 'import numpy as np\n'), ((5315, 5349), 'numpy.arange', 'np.arange', (['(1)', '(ex_csum.shape[0] + 1)'], {}), '(1, ex_csum.shape[0] + 1)\n', (5324, 5349), True, 'import numpy as np\n'), ((6838, 6857), 'numpy.size', 'np.size', (['ex_ordered'], {}), '(ex_ordered)\n', (6845, 6857), True, 'import numpy as np\n'), ((21436, 21461), 'numpy.sum', 'np.sum', (['(arr1 * va)'], {'axis': '(0)'}), '(arr1 * va, axis=0)\n', (21442, 21461), True, 'import numpy as np\n'), ((21464, 21485), 'numpy.nansum', 'np.nansum', (['va'], {'axis': '(0)'}), '(va, axis=0)\n', (21473, 21485), True, 'import numpy as np\n'), ((4680, 4713), 'numpy.split', 'np.split', (['ex', 'ex.shape[1]'], {'axis': '(1)'}), '(ex, ex.shape[1], axis=1)\n', (4688, 4713), True, 'import numpy as np\n'), ((4758, 4797), 'numpy.split', 'np.split', (['order', 'order.shape[1]'], {'axis': '(1)'}), '(order, order.shape[1], axis=1)\n', (4766, 4797), True, 'import numpy as np\n')]
|
# -*- coding: utf-8 -*-
"""
Модуль с основной логикой для декоратора.
Для общения с нодой(Chrome/Node.js) используется Chrome DevTools Protocol:
https://chromedevtools.github.io/devtools-protocol/
"""
import inspect
import pathlib
import shutil
import xml.etree.ElementTree as xml
from functools import wraps
from time import sleep, time
from sealant.cdp import DevToolsProtocolConnection
from sealant.config import SeaLantConfig
from sealant.errors import LeakError
from sealant.heapfile_processing import HeapObject, check_leak_with_timeline
from sealant.heapfile_processing import check_leak_with_snapshots
from sealant.logger import log, set_logger
conf = SeaLantConfig()
def sealant(timeline=True, host='', port='', ws='',
wait_func=True):
"""
Декорируемый объект может быть классом или методом.
В случае класса устанавливаются параметры подключения к ноде для всех
тестируемых методов декорируемого класса.
В случае метода устанавливаются параметры подключения к ноде для текущего
метода, происходит подключение к ноде, действия по нахождению
утечки и отключение от ноды.
Заданные хост/порт или адрес вебсокета имеют следующий приоритет в порядке
убывания:
1. В декораторе метода
2. В декораторе класса
3. В config.py
Можно задать только хост/порт или только адрес ws. Заданный адрес ws
имеет приоритет над хостом/портом.
:param timeline: если False - проверка с помощью снэпшотов
:param host: хост для подключения к ноде
:param port: порт для подключения к ноде
:param ws: адрес ws:// для подключения к ноде
:param wait_func: активировать возможность использования метода cdp.wait_full_load
"""
def wrapper(obj):
if inspect.isclass(obj):
return _wrapper_for_class(obj, host=host, port=port, ws=ws)
elif inspect.isfunction(obj):
@wraps(obj)
def test(*args, **kwargs):
_wrapper_for_test(obj, timeline, host, port, ws,
wait_func, *args, **kwargs)
return test
return wrapper
def _wrapper_for_class(obj, host, port, ws):
"""
Функция обработки класса в декораторе.
:param obj: декорируемый класс
:param host: хост для подключения к ноде
:param port: порт для подключения к ноде
:param ws: адрес ws:// для подключения к ноде
:return: декорируемый класс с привязанным экземпляром класса
"""
set_logger()
obj.cdp = conf.cdp = DevToolsProtocolConnection(host=host, port=port, ws=ws)
conf.clear_conf_cdp = False
return obj
def _wrapper_for_test(obj, timeline, host, port, ws, wait_func,
*args, **kwargs):
"""
Функция обработки теста в декораторе.
:param obj: декорируемый тест
:param timeline: если False - проверка с помощью снэпшотов
:param host: хост для подключения к ноде
:param port: порт для подключения к ноде
:param ws: адрес ws:// для подключения к ноде
:param wait_func: активировать возможность использования метода cdp.wait_full_load
"""
if conf.clear_conf_cdp:
set_logger()
conf.cdp = DevToolsProtocolConnection(host=host, port=port,
ws=ws)
cdp = obj.cdp = conf.cdp
cdp.name = obj.__name__
host_name = host or cdp.class_host or conf.host
port_num = port or cdp.class_port or conf.port
websocket_url = ws or cdp.class_ws or conf.websocket_url or ''
cdp.connect_to_node(host_name, port_num, websocket_url)
cdp.tab.HeapProfiler.enable()
if wait_func:
cdp.activate_wait_func()
measure_repeat = conf.measure_repeat + 1
step_repeat = conf.number_of_test_repeats
heap_type = 'heaptimeline' if timeline else 'heapsnapshot'
for i in range(measure_repeat):
log('Количество повторов: {0}/{1} '
'Шагов: {2} '
'Heap файл: {3}'.format(i + 1, measure_repeat,
step_repeat, heap_type))
result_metric = []
dif_result_metrics = []
result_metric.append(cdp.get_metrics())
if timeline:
result = _meas_timeline(obj, step_repeat, wait_func,
*args, **kwargs)
else:
result = _meas_snapshot(obj, step_repeat,
*args, **kwargs)
leaksize, leak = result
log('Leak is {:.2f} KB'.format(leaksize))
if result_metric[0]:
result_metric.append(cdp.get_metrics())
for j in range(len(result_metric)):
dif = (result_metric[1][j] - result_metric[0][j]) / step_repeat
if dif:
dif_result_metrics.append([dif, conf.metrics[j][1]])
log("Добавлено {}/шаг: {}".format(conf.metrics[j][1], dif))
if not leak:
break
step_repeat += 2
cdp.disconnect_from_node()
if leak:
need_zip = False
if conf.get_xml_table:
_create_xml_report(cdp, leaksize, dif_result_metrics, heap_type)
need_zip = True
if conf.save_leaked_heapfile:
pathlib.Path('leaks').mkdir(parents=True, exist_ok=True)
path = "{0}s/{1}".format(heap_type, cdp.name)
heap_file_location = '{0}leaks/{1}'.format(conf.path_to_save,
cdp.name)
need_zip = True
if need_zip:
shutil.make_archive(heap_file_location, format='zip', root_dir=path)
shutil.rmtree('{}s'.format(heap_type))
raise LeakError("В тесте есть утечка")
shutil.rmtree('{}s'.format(heap_type))
return True
def _meas_timeline(decorated_function, step_repeat, wait_func,
*args, **kwargs):
"""
Замер утечки с использованием таймлайна.
Количество повторов тестируемой функции увеличивается на 2:
добавляются прогревочный и завершающий шаги
:param decorated_function: тестируемая функция
:param step_repeat: количество повторов тестируемой функции
:param args: аргументы тестируемой функции
:param kwargs: аргументы тестируемой функции
:return: (размер утечки в шаге в КБ, наличие утечки boolean)
"""
cdp = conf.cdp
cdp.tab.HeapProfiler.startTrackingHeapObjects()
time_of_steps = []
for i in range(step_repeat):
start_step = time()
sleep(0.1)
decorated_function(*args, **kwargs)
if conf.default_wait_full_load and wait_func:
cdp.wait_full_load()
cdp.twice_collect_garbage()
time_of_steps.append(time() - start_step)
heap_file = cdp.get_heap_file(timeline=True)
heap_calc = HeapObject(heapfile=heap_file)
heap_calc.parsing_heap_file()
result = heap_calc.get_leak_size(period_dur=time_of_steps)
return check_leak_with_timeline(result=result,
leak_size_limit=conf.leak_size_limit)
def _meas_snapshot(decorated_function, step_repeat, *args, **kwargs):
"""
Замер утечки с использованием снэпшота.
Перед тестом два прогревочных повторая
:param decorated_function: тестируемая функция
:param step_repeat: количество повторов тестируемой функции
:param args: аргументы тестируемой функции
:param kwargs: аргументы тестируемой функции
:return: (размер утечки в шаге в КБ, наличие утечки boolean)
"""
cdp = conf.cdp
heap_files = []
results = []
for i in range(step_repeat):
decorated_function(*args, **kwargs)
cdp.twice_collect_garbage()
heap_files.append(cdp.get_heap_file(timeline=False))
for heap_file in heap_files:
heap_calc = HeapObject(heapfile=heap_file)
heap_calc.parsing_heap_file()
results.append(heap_calc.get_leak_size())
result = check_leak_with_snapshots(result=results,
leak_size_limit=conf.leak_size_limit)
return result
def _create_xml_report(cdp, leaksize, dif_result_metrics, heap_type):
root = xml.Element("root")
main_report = xml.Element("LeakReport")
root.append(main_report)
name_report = xml.SubElement(main_report, "TestName")
name_report.text = cdp.name
leak_report = xml.SubElement(main_report, "LeakSize")
leak_report.text = 'Утечка за шаг: {:.2f} KB'.format(leaksize)
if dif_result_metrics:
metric_report = []
for i, dif in enumerate(dif_result_metrics):
metric_report.append(
xml.SubElement(main_report, 'Metric_{}'.format(i + 1)))
metric_report[i].text = "Добавлено {0}/шаг: {1}".format(dif[1],
dif[0])
heap_file_report = xml.SubElement(main_report, 'HeapFile')
heap_file_report.text = "Cохранение heapfile: {}".format(
conf.save_leaked_heapfile)
tree = xml.ElementTree(root)
with open('{0}s/{1}/report.xml'.format(heap_type, cdp.name), 'wb') as fh:
tree.write(fh, xml_declaration=True, encoding='utf-8')
|
[
"shutil.make_archive",
"sealant.logger.set_logger",
"xml.etree.ElementTree.ElementTree",
"inspect.isclass",
"xml.etree.ElementTree.Element",
"inspect.isfunction",
"sealant.heapfile_processing.check_leak_with_timeline",
"time.time",
"sealant.cdp.DevToolsProtocolConnection",
"time.sleep",
"pathlib.Path",
"functools.wraps",
"xml.etree.ElementTree.SubElement",
"sealant.heapfile_processing.HeapObject",
"sealant.errors.LeakError",
"sealant.heapfile_processing.check_leak_with_snapshots",
"sealant.config.SeaLantConfig"
] |
[((663, 678), 'sealant.config.SeaLantConfig', 'SeaLantConfig', ([], {}), '()\n', (676, 678), False, 'from sealant.config import SeaLantConfig\n'), ((2447, 2459), 'sealant.logger.set_logger', 'set_logger', ([], {}), '()\n', (2457, 2459), False, 'from sealant.logger import log, set_logger\n'), ((2485, 2540), 'sealant.cdp.DevToolsProtocolConnection', 'DevToolsProtocolConnection', ([], {'host': 'host', 'port': 'port', 'ws': 'ws'}), '(host=host, port=port, ws=ws)\n', (2511, 2540), False, 'from sealant.cdp import DevToolsProtocolConnection\n'), ((6696, 6726), 'sealant.heapfile_processing.HeapObject', 'HeapObject', ([], {'heapfile': 'heap_file'}), '(heapfile=heap_file)\n', (6706, 6726), False, 'from sealant.heapfile_processing import HeapObject, check_leak_with_timeline\n'), ((6835, 6912), 'sealant.heapfile_processing.check_leak_with_timeline', 'check_leak_with_timeline', ([], {'result': 'result', 'leak_size_limit': 'conf.leak_size_limit'}), '(result=result, leak_size_limit=conf.leak_size_limit)\n', (6859, 6912), False, 'from sealant.heapfile_processing import HeapObject, check_leak_with_timeline\n'), ((7815, 7894), 'sealant.heapfile_processing.check_leak_with_snapshots', 'check_leak_with_snapshots', ([], {'result': 'results', 'leak_size_limit': 'conf.leak_size_limit'}), '(result=results, leak_size_limit=conf.leak_size_limit)\n', (7840, 7894), False, 'from sealant.heapfile_processing import check_leak_with_snapshots\n'), ((8035, 8054), 'xml.etree.ElementTree.Element', 'xml.Element', (['"""root"""'], {}), "('root')\n", (8046, 8054), True, 'import xml.etree.ElementTree as xml\n'), ((8073, 8098), 'xml.etree.ElementTree.Element', 'xml.Element', (['"""LeakReport"""'], {}), "('LeakReport')\n", (8084, 8098), True, 'import xml.etree.ElementTree as xml\n'), ((8146, 8185), 'xml.etree.ElementTree.SubElement', 'xml.SubElement', (['main_report', '"""TestName"""'], {}), "(main_report, 'TestName')\n", (8160, 8185), True, 'import xml.etree.ElementTree as xml\n'), ((8236, 8275), 'xml.etree.ElementTree.SubElement', 'xml.SubElement', (['main_report', '"""LeakSize"""'], {}), "(main_report, 'LeakSize')\n", (8250, 8275), True, 'import xml.etree.ElementTree as xml\n'), ((8731, 8770), 'xml.etree.ElementTree.SubElement', 'xml.SubElement', (['main_report', '"""HeapFile"""'], {}), "(main_report, 'HeapFile')\n", (8745, 8770), True, 'import xml.etree.ElementTree as xml\n'), ((8879, 8900), 'xml.etree.ElementTree.ElementTree', 'xml.ElementTree', (['root'], {}), '(root)\n', (8894, 8900), True, 'import xml.etree.ElementTree as xml\n'), ((1732, 1752), 'inspect.isclass', 'inspect.isclass', (['obj'], {}), '(obj)\n', (1747, 1752), False, 'import inspect\n'), ((3112, 3124), 'sealant.logger.set_logger', 'set_logger', ([], {}), '()\n', (3122, 3124), False, 'from sealant.logger import log, set_logger\n'), ((3144, 3199), 'sealant.cdp.DevToolsProtocolConnection', 'DevToolsProtocolConnection', ([], {'host': 'host', 'port': 'port', 'ws': 'ws'}), '(host=host, port=port, ws=ws)\n', (3170, 3199), False, 'from sealant.cdp import DevToolsProtocolConnection\n'), ((5597, 5629), 'sealant.errors.LeakError', 'LeakError', (['"""В тесте есть утечка"""'], {}), "('В тесте есть утечка')\n", (5606, 5629), False, 'from sealant.errors import LeakError\n'), ((6388, 6394), 'time.time', 'time', ([], {}), '()\n', (6392, 6394), False, 'from time import sleep, time\n'), ((6403, 6413), 'time.sleep', 'sleep', (['(0.1)'], {}), '(0.1)\n', (6408, 6413), False, 'from time import sleep, time\n'), ((7683, 7713), 'sealant.heapfile_processing.HeapObject', 'HeapObject', ([], {'heapfile': 'heap_file'}), '(heapfile=heap_file)\n', (7693, 7713), False, 'from sealant.heapfile_processing import HeapObject, check_leak_with_timeline\n'), ((1839, 1862), 'inspect.isfunction', 'inspect.isfunction', (['obj'], {}), '(obj)\n', (1857, 1862), False, 'import inspect\n'), ((5467, 5535), 'shutil.make_archive', 'shutil.make_archive', (['heap_file_location'], {'format': '"""zip"""', 'root_dir': 'path'}), "(heap_file_location, format='zip', root_dir=path)\n", (5486, 5535), False, 'import shutil\n'), ((1877, 1887), 'functools.wraps', 'wraps', (['obj'], {}), '(obj)\n', (1882, 1887), False, 'from functools import wraps\n'), ((6610, 6616), 'time.time', 'time', ([], {}), '()\n', (6614, 6616), False, 'from time import sleep, time\n'), ((5152, 5173), 'pathlib.Path', 'pathlib.Path', (['"""leaks"""'], {}), "('leaks')\n", (5164, 5173), False, 'import pathlib\n')]
|
from sklearn.decomposition import PCA
import pandas as pd
import matplotlib.pyplot as plt
from brightics.common.report import ReportBuilder, strip_margin, pandasDF2MD, plt2MD, dict2MD
from brightics.function.utils import _model_dict
from brightics.common.groupby import _function_by_group
from brightics.common.utils import check_required_parameters
def pca(table, group_by=None, **params):
check_required_parameters(_pca, params, ['table'])
if group_by is not None:
return _function_by_group(_pca, table, group_by=group_by, **params)
else:
return _pca(table, **params)
def _pca(table, input_cols, new_column_name='projected_', n_components=None, copy=True, whiten=False, svd_solver='auto',
tol=0.0, iterated_power='auto', random_state=None):
num_feature_cols = len(input_cols)
if n_components is None:
n_components = num_feature_cols
pca = PCA(n_components, copy, whiten, svd_solver, tol, iterated_power, random_state)
pca_model = pca.fit(table[input_cols])
column_names = []
for i in range(0, n_components):
column_names.append(new_column_name + str(i))
# print(column_names)
pca_result = pca_model.transform(table[input_cols])
out_df = pd.DataFrame(data=pca_result, columns=[column_names])
res_components = pca_model.components_
res_components_df = pd.DataFrame(data=res_components, columns=[input_cols])
res_explained_variance = pca_model.explained_variance_
res_explained_variance_ratio = pca_model.explained_variance_ratio_
res_singular_values = pca_model.singular_values_
res_mean = pca_model.mean_
res_n_components = pca_model.n_components_
res_noise_variance = pca_model.noise_variance_
res_get_param = pca_model.get_params()
res_get_covariance = pca_model.get_covariance()
res_get_precision = pca_model.get_precision()
# visualization
plt.figure()
if res_n_components == 1:
plt.scatter(pca_result[:, 0], pca_result[:, 0])
else:
plt.scatter(pca_result[:, 0], pca_result[:, 1])
# plt.title('PCA result with two components')
# plt.show()
plt_two = plt2MD(plt)
plt.clf()
rb = ReportBuilder()
rb.addMD(strip_margin("""
|
| ### Plot
| The x-axis and y-axis of the following plot is projected0 and projected1, respectively.
| {image1}
|
| ### Result
| {table1}
| only showing top 20 rows
|
| ### Parameters
| {parameter1}
|
| ### Components
| {table2}
|
| ### Mean
| {array1}
|
| ### Explained Variance
| {array2}
|
""".format(table1=pandasDF2MD(out_df, 20),
image1=plt_two,
parameter1=dict2MD(res_get_param),
table2=pandasDF2MD(res_components_df),
array1=res_mean,
array2=res_explained_variance
)))
model = _model_dict('pca')
model['components'] = res_components
model['explained_variance'] = res_explained_variance
model['explained_variance_ratio'] = res_explained_variance_ratio
model['singular_values'] = res_singular_values
model['mean'] = res_mean
model['n_components'] = res_n_components
model['noise_variance'] = res_noise_variance
model['parameters'] = res_get_param
model['covariance'] = res_get_covariance
model['precision'] = res_get_precision
model['report'] = rb.get()
model['pca_model'] = pca_model
model['input_cols'] = input_cols
out_df = pd.concat([table.reset_index(drop=True), out_df], axis=1)
out_df.columns = table.columns.values.tolist() + column_names
return {'out_table': out_df, 'model' : model}
def pca_with_model(table, model, group_by=None, **params):
check_required_parameters(_pca_with_model, params, ['table', 'model'])
if group_by is not None:
return _function_by_group(_pca_with_model, table, model, group_by=group_by, **params)
else:
return _pca_with_model(table, model, **params)
def _pca_with_model(table, model, new_column_name = 'projected_'):
new_col_names = []
for i in range(0, model['n_components']):
new_col_names.append(new_column_name + str(i))
pca_result = model['pca_model'].transform(table[model['input_cols']])
out_table = pd.concat([table.reset_index(drop=True), pd.DataFrame(data=pca_result, columns=[new_col_names])], axis=1)
out_table.columns = table.columns.values.tolist() + new_col_names
return {'out_table' : out_table}
|
[
"pandas.DataFrame",
"matplotlib.pyplot.clf",
"matplotlib.pyplot.scatter",
"brightics.function.utils._model_dict",
"matplotlib.pyplot.figure",
"sklearn.decomposition.PCA",
"brightics.common.report.plt2MD",
"brightics.common.report.ReportBuilder",
"brightics.common.utils.check_required_parameters",
"brightics.common.report.pandasDF2MD",
"brightics.common.report.dict2MD",
"brightics.common.groupby._function_by_group"
] |
[((407, 457), 'brightics.common.utils.check_required_parameters', 'check_required_parameters', (['_pca', 'params', "['table']"], {}), "(_pca, params, ['table'])\n", (432, 457), False, 'from brightics.common.utils import check_required_parameters\n'), ((967, 1045), 'sklearn.decomposition.PCA', 'PCA', (['n_components', 'copy', 'whiten', 'svd_solver', 'tol', 'iterated_power', 'random_state'], {}), '(n_components, copy, whiten, svd_solver, tol, iterated_power, random_state)\n', (970, 1045), False, 'from sklearn.decomposition import PCA\n'), ((1316, 1369), 'pandas.DataFrame', 'pd.DataFrame', ([], {'data': 'pca_result', 'columns': '[column_names]'}), '(data=pca_result, columns=[column_names])\n', (1328, 1369), True, 'import pandas as pd\n'), ((1449, 1504), 'pandas.DataFrame', 'pd.DataFrame', ([], {'data': 'res_components', 'columns': '[input_cols]'}), '(data=res_components, columns=[input_cols])\n', (1461, 1504), True, 'import pandas as pd\n'), ((2021, 2033), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (2031, 2033), True, 'import matplotlib.pyplot as plt\n'), ((2274, 2285), 'brightics.common.report.plt2MD', 'plt2MD', (['plt'], {}), '(plt)\n', (2280, 2285), False, 'from brightics.common.report import ReportBuilder, strip_margin, pandasDF2MD, plt2MD, dict2MD\n'), ((2291, 2300), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (2298, 2300), True, 'import matplotlib.pyplot as plt\n'), ((2329, 2344), 'brightics.common.report.ReportBuilder', 'ReportBuilder', ([], {}), '()\n', (2342, 2344), False, 'from brightics.common.report import ReportBuilder, strip_margin, pandasDF2MD, plt2MD, dict2MD\n'), ((3108, 3126), 'brightics.function.utils._model_dict', '_model_dict', (['"""pca"""'], {}), "('pca')\n", (3119, 3126), False, 'from brightics.function.utils import _model_dict\n'), ((3987, 4057), 'brightics.common.utils.check_required_parameters', 'check_required_parameters', (['_pca_with_model', 'params', "['table', 'model']"], {}), "(_pca_with_model, params, ['table', 'model'])\n", (4012, 4057), False, 'from brightics.common.utils import check_required_parameters\n'), ((504, 564), 'brightics.common.groupby._function_by_group', '_function_by_group', (['_pca', 'table'], {'group_by': 'group_by'}), '(_pca, table, group_by=group_by, **params)\n', (522, 564), False, 'from brightics.common.groupby import _function_by_group\n'), ((2074, 2121), 'matplotlib.pyplot.scatter', 'plt.scatter', (['pca_result[:, 0]', 'pca_result[:, 0]'], {}), '(pca_result[:, 0], pca_result[:, 0])\n', (2085, 2121), True, 'import matplotlib.pyplot as plt\n'), ((2142, 2189), 'matplotlib.pyplot.scatter', 'plt.scatter', (['pca_result[:, 0]', 'pca_result[:, 1]'], {}), '(pca_result[:, 0], pca_result[:, 1])\n', (2153, 2189), True, 'import matplotlib.pyplot as plt\n'), ((4104, 4182), 'brightics.common.groupby._function_by_group', '_function_by_group', (['_pca_with_model', 'table', 'model'], {'group_by': 'group_by'}), '(_pca_with_model, table, model, group_by=group_by, **params)\n', (4122, 4182), False, 'from brightics.common.groupby import _function_by_group\n'), ((4586, 4640), 'pandas.DataFrame', 'pd.DataFrame', ([], {'data': 'pca_result', 'columns': '[new_col_names]'}), '(data=pca_result, columns=[new_col_names])\n', (4598, 4640), True, 'import pandas as pd\n'), ((2807, 2830), 'brightics.common.report.pandasDF2MD', 'pandasDF2MD', (['out_df', '(20)'], {}), '(out_df, 20)\n', (2818, 2830), False, 'from brightics.common.report import ReportBuilder, strip_margin, pandasDF2MD, plt2MD, dict2MD\n'), ((2891, 2913), 'brightics.common.report.dict2MD', 'dict2MD', (['res_get_param'], {}), '(res_get_param)\n', (2898, 2913), False, 'from brightics.common.report import ReportBuilder, strip_margin, pandasDF2MD, plt2MD, dict2MD\n'), ((2938, 2968), 'brightics.common.report.pandasDF2MD', 'pandasDF2MD', (['res_components_df'], {}), '(res_components_df)\n', (2949, 2968), False, 'from brightics.common.report import ReportBuilder, strip_margin, pandasDF2MD, plt2MD, dict2MD\n')]
|
from __future__ import print_function, absolute_import, division
import numpy as np
from poseutils.logger import log
from poseutils.datasets.unprocessed.Dataset import Dataset
class TDPWDataset(Dataset):
"""Dataset class for handling 3DPW dataset
:param path: path to npz file
:type path: str
"""
def __init__(self, path):
super(TDPWDataset, self).__init__('3dpw')
self.load_data(path)
def load_data(self, path):
data = np.load(path, allow_pickle=True, encoding='latin1')['data'].item()
data_train = data['train']
data_valid = data['test']
self._data_train['2d'] = data_train["combined_2d"]
self._data_train['3d'] = data_train["combined_3d_cam"]*1000
self._data_valid['2d'] = data_valid["combined_2d"]
self._data_valid['3d'] = data_valid["combined_3d_cam"]*1000
log("Loaded raw data")
|
[
"numpy.load",
"poseutils.logger.log"
] |
[((885, 907), 'poseutils.logger.log', 'log', (['"""Loaded raw data"""'], {}), "('Loaded raw data')\n", (888, 907), False, 'from poseutils.logger import log\n'), ((483, 534), 'numpy.load', 'np.load', (['path'], {'allow_pickle': '(True)', 'encoding': '"""latin1"""'}), "(path, allow_pickle=True, encoding='latin1')\n", (490, 534), True, 'import numpy as np\n')]
|
# Copyright 2020 Makani Technologies LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""GS GPS parameters."""
from makani.config import mconfig
from makani.control import system_types
import numpy as np
@mconfig.Config(deps={
'gs_model': 'base_station.gs_model',
'test_site': 'common.test_site',
})
def MakeParams(params):
"""Make ground station gps parameters."""
if params['gs_model'] == system_types.kGroundStationModelTopHat:
gps_primary_antenna_dir = [0.0, 0.0, -1.0]
gps_primary_pos = [1.418, -1.657, -2.417]
# TopHat doesn't actually have a secondary gps.
gps_secondary_antenna_dir = gps_primary_antenna_dir
gps_secondary_pos = gps_primary_pos
# Angle [rad] from the GPS compass baseline to the zero-azimuth
# reference of the perch frame. Note: The TopHat does not have a
# GPS compass, but this value is set for historical consistency.
gps_compass_to_perch_azi = -2.440
elif params['gs_model'] == system_types.kGroundStationModelGSv1:
gps_primary_antenna_dir = [0.0, 0.0, -1.0]
# Position measured on 2015-06-15.
gps_primary_pos = [0.0, 0.0, -2.94]
# GSv1 doesn't actually have a secondary gps.
gps_secondary_antenna_dir = gps_primary_antenna_dir
gps_secondary_pos = gps_primary_pos
# Angle [rad] from the GPS compass baseline to the zero-azimuth
# reference of the perch frame
gps_compass_to_perch_azi = -2.440
elif params['gs_model'] == system_types.kGroundStationModelGSv2:
gps_primary_antenna_dir = [0.0, 0.0, -1.0]
gps_secondary_antenna_dir = [0.0, 0.0, -1.0]
if params['test_site'] == system_types.kTestSiteParkerRanch:
# See b/137283974 for details.
gps_primary_pos = [-0.002, 0.011, -6.7]
gps_secondary_pos = [-2.450, -0.428, -6.827]
elif params['test_site'] == system_types.kTestSiteNorway:
# See b/137660975 for details.
gps_primary_pos = [-0.002, 0.011, -6.7]
gps_secondary_pos = [-2.450, -0.428, -6.757]
else:
assert False, 'Unsupported test site.'
# Angle [rad] from the GPS compass baseline to the zero-azimuth
# reference of the platform frame. See b/118710931.
gps_compass_to_perch_azi = np.deg2rad(169.84)
else:
assert False, 'Unsupported ground station model.'
return {
# Position [m] of the GS GPS antenna in the platform frame.
# NOTE: The direction of the antennae is currently not used.
'primary_antenna_p': {
'antenna_dir': gps_primary_antenna_dir,
'pos': gps_primary_pos,
},
'secondary_antenna_p': {
'antenna_dir': gps_secondary_antenna_dir,
'pos': gps_secondary_pos,
},
# Calibration for the ground station compass ([#], [rad], [#]).
# The bias is used to account for the angle between the perch
# frame and the NovAtel differential GPS receiver.
# TODO: Remove this parameter once the computation of
# compass heading from the primary and secondary antennae is implemented.
'heading_cal': {
'scale': 1.0, 'bias': gps_compass_to_perch_azi, 'bias_count': 0}
}
|
[
"makani.config.mconfig.Config",
"numpy.deg2rad"
] |
[((711, 806), 'makani.config.mconfig.Config', 'mconfig.Config', ([], {'deps': "{'gs_model': 'base_station.gs_model', 'test_site': 'common.test_site'}"}), "(deps={'gs_model': 'base_station.gs_model', 'test_site':\n 'common.test_site'})\n", (725, 806), False, 'from makani.config import mconfig\n'), ((2688, 2706), 'numpy.deg2rad', 'np.deg2rad', (['(169.84)'], {}), '(169.84)\n', (2698, 2706), True, 'import numpy as np\n')]
|
import sys
sys.path.append('./')
from unittest import TestCase
from SumpOverflowAlert.Calibration.Calibrator import Calibrator
from SumpOverflowAlert import config
class TestCalibrator(TestCase):
def __init__(self, method_name='runTest'):
super().__init__(method_name)
def setUp(self):
config.trigger_distance = 2
self.initial_config_value = config.trigger_distance
self.calibrator = Calibrator()
def testShouldSetLevelToConfigTriggerDistance(self):
self.assertEqual(self.calibrator.trigger_distance, config.trigger_distance)
self.assertFalse(hasattr(self.calibrator, 'near_distance'))
self.assertFalse(hasattr(self.calibrator, 'far_distance'))
def testShouldSetNearAndFarToFirstValue(self):
self.calibrator.record_observation(10)
self.assertEqual(10, self.calibrator.near_distance)
self.assertEqual(10, self.calibrator.far_distance)
self.assertConfigWasNotChanged()
def testShouldRecordNewNearDistance(self):
self.calibrator.record_observation(5)
self.calibrator.record_observation(4)
self.assertEqual(self.calibrator.near_distance, 4)
self.assertEqual(self.calibrator.far_distance, 5)
self.assertConfigWasNotChanged()
def testShouldNotChangeNearWhenRecordingFartherValue(self):
self.calibrator.record_observation(6)
self.calibrator.record_observation(7)
self.assertEquals(self.calibrator.near_distance, 6)
self.assertEqual(self.calibrator.far_distance, 7)
self.assertEqual(self.initial_config_value, self.calibrator.trigger_distance)
self.assertConfigWasNotChanged()
def testShouldUpdateTriggerDistanceWhenCloserToFarThanNear(self):
self.calibrator.record_observation(3)
self.calibrator.record_observation(10)
self.calibrator.record_observation(7)
self.assertGreater(self.calibrator.trigger_distance, self.initial_config_value)
self.assertConfigWasNotChanged()
def testShouldNotReduceNearPastTriggerDistance(self):
self.calibrator.record_observation(3)
self.calibrator.record_observation(10)
self.calibrator.record_observation(1)
self.assertGreaterEqual(self.calibrator.trigger_distance, self.initial_config_value)
self.assertGreaterEqual(self.calibrator.near_distance, self.calibrator.trigger_distance)
self.assertConfigWasNotChanged()
def testGetTriggerDistanceShouldReturnTriggerDistance(self):
trigger_distance = self.calibrator.get_trigger_distance()
self.assertEqual(config.trigger_distance, trigger_distance)
def assertConfigWasNotChanged(self):
self.assertEqual(self.initial_config_value, config.trigger_distance)
|
[
"sys.path.append",
"SumpOverflowAlert.Calibration.Calibrator.Calibrator"
] |
[((12, 33), 'sys.path.append', 'sys.path.append', (['"""./"""'], {}), "('./')\n", (27, 33), False, 'import sys\n'), ((429, 441), 'SumpOverflowAlert.Calibration.Calibrator.Calibrator', 'Calibrator', ([], {}), '()\n', (439, 441), False, 'from SumpOverflowAlert.Calibration.Calibrator import Calibrator\n')]
|
from lex import *
import sys
def main():
print("Mini Java Compiler - Lexer Test")
if len(sys.argv) != 2:
sys.exit("Error: Compiler needs source file as argument.")
with open(sys.argv[1], 'r') as f:
buffer = f.read()
lexer = Lexer(buffer)
filler = ""
# Token stream test
print(f"{filler:-<50}\nToken Stream Test")
# for token in lexer.tokens():
# print(token)
tokens = lexer.tokens(ignore=True)
while True:
try:
print(next(tokens))
except StopIteration:
break
if __name__ == '__main__':
sys.argv = ["./lexerTest.py", "../test/case1/Main.java"]
main()
|
[
"sys.exit"
] |
[((124, 182), 'sys.exit', 'sys.exit', (['"""Error: Compiler needs source file as argument."""'], {}), "('Error: Compiler needs source file as argument.')\n", (132, 182), False, 'import sys\n')]
|
from ticTacToe import *
from neuralNetwork import *
from math import sqrt
from math import floor
class AITrainer:
def __init__(self, numberOfAIs):
self.AIList = []
self.numberOfAIs = numberOfAIs
self.numberOfSurvivingAIs = floor(sqrt(numberOfAIs))
self.trainingStarted=0
for i in range (0,self.numberOfAIs):
self.AIList.append(neuralNetwork(9, 5, 9, 9, []))
def train(self):
self.trainingStarted=1
while self.trainingStarted:
self.crank()
def stopTraining(self):
self.trainingStarted=0
def crank(self):
newAIList = []
winner = 0
for k in range (0, floor(sqrt(len(self.AIList)))):
for i in range (0, self.numberOfSurvivingAIs):
for j in range (i+1, len(self.AIList)-1):
winner = max(self.findWinner(self.AIList[i],self.AIList[j]),self.findWinner(self.AIList[j],self.AIList[i]))
if winner==2:
self.AIList[i],self.AIList[j] = self.AIList[j],self.AIList[i]
for i in range (0, self.numberOfSurvivingAIs):
for j in range (i, self.numberOfSurvivingAIs):
newAI=neuralNetwork(9, 5, 9, 9, [])
newAI.generateWeightsMatrixFromParents(self.AIList[i],self.AIList[j])
newAIList.append(newAI)
while len(newAIList) < len(self.AIList):
newAIList.append(neuralNetwork(9, 5, 9, 9, []))
self.AIList = newAIList
self.numberOfSurvivingAIs = floor(sqrt(len(self.AIList)))
def initializeAI(self):
newAI = neuralNetwork(9, 5, 9, 9, [])
return newAI
def findWinner(self, ai1, ai2):
gameWinner = 0
turn = 1
answer = 0
gameBoardReturnString = ""
gameBoard = TicTacToeBoard()
while not gameWinner:
aiInput = gameBoard.returnInputForAi()
transformInput(aiInput)
if turn==1:
answer=ai1.answer(aiInput)
elif turn==2:
answer=ai2.answer(aiInput)
gameBoardReturnString=gameBoard.move(floor(answer/3)+1,answer%3+1)
if gameBoardReturnString == "Player 1 won":
gameWinner=1
elif gameBoardReturnString == "Player 2 won":
gameWinner=2
elif gameBoardReturnString == "Tie":
gameWinner=2
elif gameBoardReturnString == "Invalid move":
if turn == 1:
gameWinner = 2
elif turn == 2:
gameWinner = 1
turn=turn^3
if gameWinner != turn:
print(gameBoard.returnInputForAi())
return gameWinner
|
[
"math.floor",
"math.sqrt"
] |
[((258, 275), 'math.sqrt', 'sqrt', (['numberOfAIs'], {}), '(numberOfAIs)\n', (262, 275), False, 'from math import sqrt\n'), ((2144, 2161), 'math.floor', 'floor', (['(answer / 3)'], {}), '(answer / 3)\n', (2149, 2161), False, 'from math import floor\n')]
|
import struct
from typing import BinaryIO
from PIL import Image
from PIL.ImageFile import PyDecoder
class TileDecoder(PyDecoder):
def decode(self, b: bytes):
if len(b) % 8 != 0:
raise Exception("tile too smol")
width = self.state.xsize
if width % 8 != 0:
raise Exception("canvas too smol")
raw = bytearray(width * self.state.ysize)
for i in range(0, len(b), width):
data = b[i:i + width]
plop_blanks = width - len(data)
if plop_blanks > 0:
data += b"\0" * plop_blanks
i *= 8
for m, x in zip((1, 2, 4, 8, 0x10, 0x20, 0x40, 0x80), range(i, i + width * 8, width)):
for d in data:
raw[x] = 0 if d & m else 0xff
x += 1
self.set_as_raw(bytes(raw))
return -1, 0
@staticmethod
def from_stream(f: BinaryIO, width: int = 16, height: int = 16) -> Image.Image:
return Image.frombytes("L", (width * 8, height * 8), f.read(width * height * 8), "tile")
Image.register_decoder("tile", TileDecoder)
def bigQ_into_bytearray(tile: int, mask: int, im: Image.Image, start_x: int, start_y: int):
cols = [((tile >> x) & 0xff, (mask >> x) & 0xff) for x in range(56, -1, -8)]
for m, y in zip((1, 2, 4, 8, 0x10, 0x20, 0x40, 0x80), range(start_y, start_y + 8)):
for (c, a), x in zip(cols, range(start_x, start_x + 8)):
im.putpixel((x, y), (0 if c & m else 0xff, 0 if a & m else 0xff))
class SpriteDecoder(PyDecoder):
def decode(self, b: bytes):
if len(b) % 64 != 0:
raise Exception("sprite too smol")
width = self.state.xsize
if width % 16 != 0:
raise Exception("canvas too smol")
x, y = 0, 0
bwidth = width // 16 * 64
for i in range(0, len(b), bwidth):
data = b[i:i + bwidth]
for j in range(0, len(data), 64):
m1, m2, d1, d2, m3, m4, d3, d4 = struct.unpack(">8Q", data[j:j + 64])
ul_data, bl_data, ur_data, br_data = d1 & ~m1, d2 & ~m2, d3 & ~m3, d4 & ~m4
bigQ_into_bytearray(ul_data, m1, self.im, x, y)
bigQ_into_bytearray(ur_data, m3, self.im, x + 8, y)
bigQ_into_bytearray(bl_data, m2, self.im, x, y + 8)
bigQ_into_bytearray(br_data, m4, self.im, x + 8, y + 8)
x += 16
x = 0
y += 16
return -1, 0
@staticmethod
def from_stream(f: BinaryIO, width: int = 16, height: int = 16) -> Image.Image:
return Image.frombytes(
"LA", (width * 16, height * 16), f.read(width * height * 64), "sprite"
)
Image.register_decoder("sprite", SpriteDecoder)
|
[
"PIL.Image.register_decoder",
"struct.unpack"
] |
[((909, 952), 'PIL.Image.register_decoder', 'Image.register_decoder', (['"""tile"""', 'TileDecoder'], {}), "('tile', TileDecoder)\n", (931, 952), False, 'from PIL import Image\n'), ((2328, 2375), 'PIL.Image.register_decoder', 'Image.register_decoder', (['"""sprite"""', 'SpriteDecoder'], {}), "('sprite', SpriteDecoder)\n", (2350, 2375), False, 'from PIL import Image\n'), ((1734, 1770), 'struct.unpack', 'struct.unpack', (['""">8Q"""', 'data[j:j + 64]'], {}), "('>8Q', data[j:j + 64])\n", (1747, 1770), False, 'import struct\n')]
|
from django.db import models
from django.contrib.auth.models import User
from django.contrib.postgres.search import TrigramSimilarity
from django.core.exceptions import ObjectDoesNotExist
from django.conf import settings
from StreamServerApp.subtitles import get_subtitles
from StreamServerApp.media_processing import convert_subtitles_to_webvtt
from StreamServerApp.media_management.fileinfo import createfileinfo, readfileinfo
import os
import subprocess
class SearchManager(models.Manager):
def search_trigramm(self, model_field, query):
queryset = self.annotate(similarity=TrigramSimilarity(model_field, query)) \
.filter(similarity__gte=0.01) \
.order_by('-similarity')
return queryset
class CommonInfo(models.Model):
title = models.CharField(max_length=300)
created_at = models.DateTimeField(auto_now_add=True)
objects = SearchManager()
class Meta:
abstract = True
def __str__(self):
return self.title
class Movie(CommonInfo):
pass
class Series(CommonInfo):
thumbnail = models.CharField(max_length=300, default="")
@property
def season_list(self):
return list(set(self.video_set.values_list('season', flat=True)))
def return_season_episodes(self, season):
return self.video_set.filter(season=season).order_by('episode')
class Video(models.Model):
name = models.CharField(max_length=200)
video_codec = models.CharField(max_length=100, default="")
height = models.IntegerField(default=0)
width = models.IntegerField(default=0)
audio_codec = models.CharField(max_length=100, default="")
metadata = models.CharField(max_length=100, blank=True, default="")
video_url = models.CharField(max_length=300, default="")
video_folder = models.CharField(max_length=300, default="")
thumbnail = models.CharField(max_length=300, default="")
# Relations to series and movies
# on_delete=SET_NULL keeps videos indexed if we remove a serie or a video it belongs to
series = models.ForeignKey(Series, blank=True, null=True, on_delete=models.SET_NULL)
movie = models.ForeignKey(Movie, blank=True, null=True, on_delete=models.SET_NULL)
# For series & movie episodes and series seasons
episode = models.PositiveSmallIntegerField(default=None, blank=True, null=True, db_index=True)
season = models.PositiveSmallIntegerField(default=None, blank=True, null=True, db_index=True)
history = models.ManyToManyField(User, through='UserVideoHistory')
objects = SearchManager()
@property
def next_episode(self):
if self.series:
try:
return self.series.video_set.get(episode=self.episode+1, season=self.season).id
except ObjectDoesNotExist:
try:
return self.series.video_set.get(episode=1, season=self.season+1).id
except ObjectDoesNotExist:
return None
def return_user_time_history(self, user):
video_history = self.uservideohistory_set.filter(user=user)
if video_history.count() > 0:
return video_history.first().time
else:
return 0
def get_subtitles(self, video_path, remote_url):
""" # get subtitles for the current instance of video.
Args:
ov_subtitles: boolean (True if input has subtitles, False if not).
"""
video_infos = []
fileinfos_path = "{}/fileinfo.json".format(
os.path.split(self.video_folder)[0])
if os.path.isfile(fileinfos_path):
video_infos = readfileinfo(fileinfos_path)
if not video_infos:
print("video infos are empty, don't add subs")
return 0
else:
print("{} is not a file ".format(fileinfos_path))
return 0
print("get sub for {}".format(video_infos["video_full_path"]))
subtitles_list = get_subtitles(video_infos["video_full_path"])
webvtt_subtitles_full_path = subtitles_list[0]
srt_subtitles_full_path = subtitles_list[1]
webvtt_subtitles_remote_path = {}
for language_str, srt_subtitle_url in webvtt_subtitles_full_path.items():
webvtt_subtitles_remote_path[language_str] = ''
vtt_subtitle_url = webvtt_subtitles_full_path[language_str]
if srt_subtitle_url and vtt_subtitle_url:
webvtt_subtitles_relative_path = os.path.relpath(
vtt_subtitle_url, video_path)
newsub = Subtitle()
newsub.video_id = self
newsub.vtt_path = vtt_subtitle_url
if srt_subtitles_full_path.get(language_str):
newsub.srt_path = srt_subtitles_full_path[language_str]
newsub.webvtt_subtitle_url = os.path.join(
remote_url, webvtt_subtitles_relative_path)
newsub.language = language_str
newsub.save()
def __str__(self):
return '{}'.format(self.name)
class UserVideoHistory(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE)
video = models.ForeignKey(Video, on_delete=models.CASCADE)
time = models.IntegerField() # time in sec
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True, db_index=True)
class Subtitle(models.Model):
webvtt_subtitle_url = models.CharField(max_length=300, default="")
webvtt_sync_url = models.CharField(max_length=300, default="")
srt_path = models.CharField(max_length=300, default="")
srt_sync_path = models.CharField(max_length=300, default="")
vtt_path = models.CharField(max_length=300, default="")
vtt_sync_path = models.CharField(max_length=300, default="")
FRENCH = 'fra'
ENGLISH = 'eng'
OV = 'OV'
LANGUAGE_CHOICES = [
(FRENCH, 'French'),
(ENGLISH, 'English'),
(OV, 'Original Version'),
]
language = models.CharField(
max_length=3,
choices=LANGUAGE_CHOICES,
default=ENGLISH,
)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True, db_index=True)
video_id = models.ForeignKey(Video, related_name='subtitles', on_delete=models.CASCADE)
uploaded_data = models.FileField(upload_to='uploads/', default='')
def resync(self):
""" # resync current instance of sub
Args:
subtitle_id: subtitles id
"""
video_path = self.video_id.video_folder
subtitle_path = self.srt_path
webvtt_path = self.vtt_path.replace('.vtt', '_sync.vtt')
sync_subtitle_path = subtitle_path.replace('.srt', '_sync.srt')
subprocess.run(["ffs", video_path, "-i", subtitle_path, "-o", sync_subtitle_path])
convert_subtitles_to_webvtt(sync_subtitle_path, webvtt_path)
self.srt_sync_path = sync_subtitle_path
self.vtt_sync_path = webvtt_path
self.webvtt_sync_url = os.path.join(settings.VIDEO_URL, webvtt_path.split(settings.VIDEO_ROOT)[1])
self.save()
|
[
"django.db.models.FileField",
"subprocess.run",
"django.contrib.postgres.search.TrigramSimilarity",
"django.db.models.ManyToManyField",
"StreamServerApp.media_management.fileinfo.readfileinfo",
"django.db.models.CharField",
"django.db.models.ForeignKey",
"django.db.models.PositiveSmallIntegerField",
"django.db.models.IntegerField",
"os.path.isfile",
"StreamServerApp.media_processing.convert_subtitles_to_webvtt",
"os.path.relpath",
"django.db.models.DateTimeField",
"os.path.split",
"os.path.join",
"StreamServerApp.subtitles.get_subtitles"
] |
[((783, 815), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)'}), '(max_length=300)\n', (799, 815), False, 'from django.db import models\n'), ((833, 872), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (853, 872), False, 'from django.db import models\n'), ((1074, 1118), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)', 'default': '""""""'}), "(max_length=300, default='')\n", (1090, 1118), False, 'from django.db import models\n'), ((1395, 1427), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)'}), '(max_length=200)\n', (1411, 1427), False, 'from django.db import models\n'), ((1446, 1490), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)', 'default': '""""""'}), "(max_length=100, default='')\n", (1462, 1490), False, 'from django.db import models\n'), ((1504, 1534), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)'}), '(default=0)\n', (1523, 1534), False, 'from django.db import models\n'), ((1547, 1577), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)'}), '(default=0)\n', (1566, 1577), False, 'from django.db import models\n'), ((1596, 1640), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)', 'default': '""""""'}), "(max_length=100, default='')\n", (1612, 1640), False, 'from django.db import models\n'), ((1656, 1712), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)', 'blank': '(True)', 'default': '""""""'}), "(max_length=100, blank=True, default='')\n", (1672, 1712), False, 'from django.db import models\n'), ((1729, 1773), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)', 'default': '""""""'}), "(max_length=300, default='')\n", (1745, 1773), False, 'from django.db import models\n'), ((1793, 1837), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)', 'default': '""""""'}), "(max_length=300, default='')\n", (1809, 1837), False, 'from django.db import models\n'), ((1854, 1898), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)', 'default': '""""""'}), "(max_length=300, default='')\n", (1870, 1898), False, 'from django.db import models\n'), ((2042, 2117), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Series'], {'blank': '(True)', 'null': '(True)', 'on_delete': 'models.SET_NULL'}), '(Series, blank=True, null=True, on_delete=models.SET_NULL)\n', (2059, 2117), False, 'from django.db import models\n'), ((2130, 2204), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Movie'], {'blank': '(True)', 'null': '(True)', 'on_delete': 'models.SET_NULL'}), '(Movie, blank=True, null=True, on_delete=models.SET_NULL)\n', (2147, 2204), False, 'from django.db import models\n'), ((2273, 2361), 'django.db.models.PositiveSmallIntegerField', 'models.PositiveSmallIntegerField', ([], {'default': 'None', 'blank': '(True)', 'null': '(True)', 'db_index': '(True)'}), '(default=None, blank=True, null=True,\n db_index=True)\n', (2305, 2361), False, 'from django.db import models\n'), ((2372, 2460), 'django.db.models.PositiveSmallIntegerField', 'models.PositiveSmallIntegerField', ([], {'default': 'None', 'blank': '(True)', 'null': '(True)', 'db_index': '(True)'}), '(default=None, blank=True, null=True,\n db_index=True)\n', (2404, 2460), False, 'from django.db import models\n'), ((2472, 2528), 'django.db.models.ManyToManyField', 'models.ManyToManyField', (['User'], {'through': '"""UserVideoHistory"""'}), "(User, through='UserVideoHistory')\n", (2494, 2528), False, 'from django.db import models\n'), ((5123, 5172), 'django.db.models.ForeignKey', 'models.ForeignKey', (['User'], {'on_delete': 'models.CASCADE'}), '(User, on_delete=models.CASCADE)\n', (5140, 5172), False, 'from django.db import models\n'), ((5185, 5235), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Video'], {'on_delete': 'models.CASCADE'}), '(Video, on_delete=models.CASCADE)\n', (5202, 5235), False, 'from django.db import models\n'), ((5247, 5268), 'django.db.models.IntegerField', 'models.IntegerField', ([], {}), '()\n', (5266, 5268), False, 'from django.db import models\n'), ((5302, 5341), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (5322, 5341), False, 'from django.db import models\n'), ((5359, 5409), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)', 'db_index': '(True)'}), '(auto_now=True, db_index=True)\n', (5379, 5409), False, 'from django.db import models\n'), ((5468, 5512), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)', 'default': '""""""'}), "(max_length=300, default='')\n", (5484, 5512), False, 'from django.db import models\n'), ((5535, 5579), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)', 'default': '""""""'}), "(max_length=300, default='')\n", (5551, 5579), False, 'from django.db import models\n'), ((5595, 5639), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)', 'default': '""""""'}), "(max_length=300, default='')\n", (5611, 5639), False, 'from django.db import models\n'), ((5660, 5704), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)', 'default': '""""""'}), "(max_length=300, default='')\n", (5676, 5704), False, 'from django.db import models\n'), ((5720, 5764), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)', 'default': '""""""'}), "(max_length=300, default='')\n", (5736, 5764), False, 'from django.db import models\n'), ((5785, 5829), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)', 'default': '""""""'}), "(max_length=300, default='')\n", (5801, 5829), False, 'from django.db import models\n'), ((6021, 6094), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(3)', 'choices': 'LANGUAGE_CHOICES', 'default': 'ENGLISH'}), '(max_length=3, choices=LANGUAGE_CHOICES, default=ENGLISH)\n', (6037, 6094), False, 'from django.db import models\n'), ((6143, 6182), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (6163, 6182), False, 'from django.db import models\n'), ((6200, 6250), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)', 'db_index': '(True)'}), '(auto_now=True, db_index=True)\n', (6220, 6250), False, 'from django.db import models\n'), ((6266, 6342), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Video'], {'related_name': '"""subtitles"""', 'on_delete': 'models.CASCADE'}), "(Video, related_name='subtitles', on_delete=models.CASCADE)\n", (6283, 6342), False, 'from django.db import models\n'), ((6363, 6413), 'django.db.models.FileField', 'models.FileField', ([], {'upload_to': '"""uploads/"""', 'default': '""""""'}), "(upload_to='uploads/', default='')\n", (6379, 6413), False, 'from django.db import models\n'), ((3565, 3595), 'os.path.isfile', 'os.path.isfile', (['fileinfos_path'], {}), '(fileinfos_path)\n', (3579, 3595), False, 'import os\n'), ((3966, 4011), 'StreamServerApp.subtitles.get_subtitles', 'get_subtitles', (["video_infos['video_full_path']"], {}), "(video_infos['video_full_path'])\n", (3979, 4011), False, 'from StreamServerApp.subtitles import get_subtitles\n'), ((6781, 6867), 'subprocess.run', 'subprocess.run', (["['ffs', video_path, '-i', subtitle_path, '-o', sync_subtitle_path]"], {}), "(['ffs', video_path, '-i', subtitle_path, '-o',\n sync_subtitle_path])\n", (6795, 6867), False, 'import subprocess\n'), ((6872, 6932), 'StreamServerApp.media_processing.convert_subtitles_to_webvtt', 'convert_subtitles_to_webvtt', (['sync_subtitle_path', 'webvtt_path'], {}), '(sync_subtitle_path, webvtt_path)\n', (6899, 6932), False, 'from StreamServerApp.media_processing import convert_subtitles_to_webvtt\n'), ((3623, 3651), 'StreamServerApp.media_management.fileinfo.readfileinfo', 'readfileinfo', (['fileinfos_path'], {}), '(fileinfos_path)\n', (3635, 3651), False, 'from StreamServerApp.media_management.fileinfo import createfileinfo, readfileinfo\n'), ((3517, 3549), 'os.path.split', 'os.path.split', (['self.video_folder'], {}), '(self.video_folder)\n', (3530, 3549), False, 'import os\n'), ((4479, 4524), 'os.path.relpath', 'os.path.relpath', (['vtt_subtitle_url', 'video_path'], {}), '(vtt_subtitle_url, video_path)\n', (4494, 4524), False, 'import os\n'), ((4855, 4911), 'os.path.join', 'os.path.join', (['remote_url', 'webvtt_subtitles_relative_path'], {}), '(remote_url, webvtt_subtitles_relative_path)\n', (4867, 4911), False, 'import os\n'), ((591, 628), 'django.contrib.postgres.search.TrigramSimilarity', 'TrigramSimilarity', (['model_field', 'query'], {}), '(model_field, query)\n', (608, 628), False, 'from django.contrib.postgres.search import TrigramSimilarity\n')]
|
import asyncio
import json
from pymongo import ReadPreference
from werkzeug.datastructures import MultiDict
from analyzers import get_analyzer
from api import APIError, APIHandler
from blueprints.assets.models import Asset
from workers.tasks import AnalyzeTask
from .document import BaseCollectionHandler, BaseDocumentHandler
__all__ = [
'AnalyzeHandler',
'AnalyzeManyHandler'
]
# Handlers
class BaseAnalyzeHandler:
def validate_analyzers(self, asset_type, analyzers):
"""
Validate the given list of analyzers (if valid the analyzers are
returned.
"""
# Check the structure of the analyzers is valid
if not isinstance(analyzers, list):
raise APIError(
'invalid_request',
hint='Request body JSON must be a list.'
)
if len(analyzers) == 0:
raise APIError(
'invalid_request',
hint='At least one analyzer is required.'
)
# Check each analyzer is valid
for analyzer in analyzers:
# Check structure
if not (
len(analyzer) == 2
and isinstance(analyzer[0], str)
and isinstance(analyzer[1], dict)
):
raise APIError(
'invalid_request',
hint=f'Invalid analyzer structure: {analyzer}'
)
# Check the analyzer exists
analyzer_cls = get_analyzer(asset_type, analyzer[0])
if not analyzer_cls:
raise APIError(
'invalid_request',
hint=f'Unknown analyzer: {asset_type}:{analyzer[0]}.'
)
# Check the settings for the analyzer are correct
settings_form = analyzer_cls.get_settings_form_cls()(
MultiDict({
k: v for k, v in analyzer[1].items()
if v is not None
})
)
if not settings_form.validate():
raise APIError(
'invalid_request',
hint=(
'Invalid settings for analyzer: '
f'{asset_type}:{analyzer[0]}.'
),
arg_errors=settings_form.errors
)
return analyzers
class AnalyzeHandler(BaseDocumentHandler, BaseAnalyzeHandler):
async def post(self, uid):
"""Analyze the asset for additional meta data"""
asset = self.get_asset(
uid,
projection={
'_id': True,
'type': True,
'expires': True
}
)
# Extract the analyzers from the request body
try:
raw_analyzers = json.loads(self.get_body_argument('analyzers'))
except:
raise APIError(
'invalid_request',
hint='Analyzers argument is not valid JSON.'
)
analyzers = self.validate_analyzers(asset.type, raw_analyzers)
# Add a task to perform the asset analysis
notification_url = self.get_body_argument('notification_url', None)
task = AnalyzeTask(
self.account._id,
asset._id,
analyzers,
notification_url
)
if notification_url:
# Fire and forget
await self.add_task_and_forget(task)
self.finish()
else:
# Wait for response
event = await self.add_task_and_wait(task)
if not event:
raise APIError('error', 'Connection lost')
elif event.type == 'task_error':
raise APIError('error', event.reason)
# Fetch the asset again now the analysis is complete
with Asset.with_options(read_preference=ReadPreference.PRIMARY):
asset = self.get_asset(
uid,
projection={
'uid': True,
'expires': True,
'ext': True,
'meta': True,
'name': True
}
)
# Handle image expiry
if not asset:
raise APIError(
'not_found',
hint='Asset expired whilst being analyzed'
)
json_type = asset.to_json_type()
self.write({
'uid': json_type['uid'],
'meta': json_type['meta']
})
class AnalyzeManyHandler(BaseCollectionHandler, BaseAnalyzeHandler):
async def post(self):
"""Analyze one for more asset for additional meta data"""
assets = self.get_assets(
projection={
'_id': True,
'type': True,
'expires': True,
'uid': True
}
)
# Extract the analyzers from the request body
try:
raw_analyzers = json.loads(self.get_body_argument('analyzers'))
except:
raise APIError(
'invalid_request',
hint='Analyzers argument is not valid JSON.'
)
# Peek to determine if the user wants the variations applied globally
# or locally.
if self.get_body_argument('local', False):
# Variations must be defined for each uid
uids = set(self.get_body_arguments('uids'))
analyzer_keys = set(list(raw_analyzers.keys()))
if uids != analyzer_keys:
raise APIError(
'invalid_request',
hint='Each uid must be assigned a list of analyzers.'
)
analyzers = {
a.uid: self.validate_analyzers(a.type, raw_analyzers[a.uid])
for a in assets
}
else:
# Global application
# Ensure all assets are the same type / base type (file)
asset_types = set([a.type for a in assets if a.type != 'file'])
if len(asset_types) > 1:
raise APIError(
'invalid_request',
hint=(
'All assets must be of the same type / base type '
'(file)'
)
)
local_analyzers = self.validate_analyzers(
asset_types.pop(),
raw_analyzers
)
analyzers = {a.uid: local_analyzers for a in assets}
# Add a set of tasks to generate the asset variations
notification_url = self.get_body_argument('notification_url', None)
tasks = []
task_names = []
for asset in assets:
task = AnalyzeTask(
self.account._id,
asset._id,
analyzers[asset.uid],
notification_url
)
if notification_url:
tasks.append(self.add_task_and_forget(task))
else:
tasks.append(self.add_task_and_wait(task))
task_names.append(asset.uid)
if notification_url:
# Fire and forget
await self.add_task_and_forget(task)
self.finish()
else:
# Wait for response
events = await asyncio.gather(*tasks)
# Collect any errors
errors = {}
for i, event in enumerate(events):
if not event:
errors[task_names[i]] = ['Connection lost']
elif event.type == 'task_error':
errors[task_names[i]] = [event.reason]
if errors:
raise APIError('error', arg_errors=errors)
# Fetch the assets again now the analysis is complete
with Asset.with_options(read_preference=ReadPreference.PRIMARY):
assets = self.get_assets(
projection={
'uid': True,
'expires': True,
'ext': True,
'meta': True,
'name': True
}
)
# Handle image expiry
if not asset:
raise APIError(
'not_found',
hint='Asset expired whilst being analyzed'
)
results = [a.to_json_type() for a in assets]
self.write({
'results': [
{
'uid': r['uid'],
'meta': r['meta']
}
for r in results
]
})
|
[
"asyncio.gather",
"blueprints.assets.models.Asset.with_options",
"workers.tasks.AnalyzeTask",
"analyzers.get_analyzer",
"api.APIError"
] |
[((3262, 3331), 'workers.tasks.AnalyzeTask', 'AnalyzeTask', (['self.account._id', 'asset._id', 'analyzers', 'notification_url'], {}), '(self.account._id, asset._id, analyzers, notification_url)\n', (3273, 3331), False, 'from workers.tasks import AnalyzeTask\n'), ((723, 792), 'api.APIError', 'APIError', (['"""invalid_request"""'], {'hint': '"""Request body JSON must be a list."""'}), "('invalid_request', hint='Request body JSON must be a list.')\n", (731, 792), False, 'from api import APIError, APIHandler\n'), ((890, 960), 'api.APIError', 'APIError', (['"""invalid_request"""'], {'hint': '"""At least one analyzer is required."""'}), "('invalid_request', hint='At least one analyzer is required.')\n", (898, 960), False, 'from api import APIError, APIHandler\n'), ((1507, 1544), 'analyzers.get_analyzer', 'get_analyzer', (['asset_type', 'analyzer[0]'], {}), '(asset_type, analyzer[0])\n', (1519, 1544), False, 'from analyzers import get_analyzer\n'), ((6914, 6999), 'workers.tasks.AnalyzeTask', 'AnalyzeTask', (['self.account._id', 'asset._id', 'analyzers[asset.uid]', 'notification_url'], {}), '(self.account._id, asset._id, analyzers[asset.uid], notification_url\n )\n', (6925, 6999), False, 'from workers.tasks import AnalyzeTask\n'), ((1305, 1380), 'api.APIError', 'APIError', (['"""invalid_request"""'], {'hint': 'f"""Invalid analyzer structure: {analyzer}"""'}), "('invalid_request', hint=f'Invalid analyzer structure: {analyzer}')\n", (1313, 1380), False, 'from api import APIError, APIHandler\n'), ((1600, 1687), 'api.APIError', 'APIError', (['"""invalid_request"""'], {'hint': 'f"""Unknown analyzer: {asset_type}:{analyzer[0]}."""'}), "('invalid_request', hint=\n f'Unknown analyzer: {asset_type}:{analyzer[0]}.')\n", (1608, 1687), False, 'from api import APIError, APIHandler\n'), ((2096, 2233), 'api.APIError', 'APIError', (['"""invalid_request"""'], {'hint': 'f"""Invalid settings for analyzer: {asset_type}:{analyzer[0]}."""', 'arg_errors': 'settings_form.errors'}), "('invalid_request', hint=\n f'Invalid settings for analyzer: {asset_type}:{analyzer[0]}.',\n arg_errors=settings_form.errors)\n", (2104, 2233), False, 'from api import APIError, APIHandler\n'), ((2926, 2999), 'api.APIError', 'APIError', (['"""invalid_request"""'], {'hint': '"""Analyzers argument is not valid JSON."""'}), "('invalid_request', hint='Analyzers argument is not valid JSON.')\n", (2934, 2999), False, 'from api import APIError, APIHandler\n'), ((3678, 3714), 'api.APIError', 'APIError', (['"""error"""', '"""Connection lost"""'], {}), "('error', 'Connection lost')\n", (3686, 3714), False, 'from api import APIError, APIHandler\n'), ((3898, 3956), 'blueprints.assets.models.Asset.with_options', 'Asset.with_options', ([], {'read_preference': 'ReadPreference.PRIMARY'}), '(read_preference=ReadPreference.PRIMARY)\n', (3916, 3956), False, 'from blueprints.assets.models import Asset\n'), ((4369, 4434), 'api.APIError', 'APIError', (['"""not_found"""'], {'hint': '"""Asset expired whilst being analyzed"""'}), "('not_found', hint='Asset expired whilst being analyzed')\n", (4377, 4434), False, 'from api import APIError, APIHandler\n'), ((5209, 5282), 'api.APIError', 'APIError', (['"""invalid_request"""'], {'hint': '"""Analyzers argument is not valid JSON."""'}), "('invalid_request', hint='Analyzers argument is not valid JSON.')\n", (5217, 5282), False, 'from api import APIError, APIHandler\n'), ((5713, 5800), 'api.APIError', 'APIError', (['"""invalid_request"""'], {'hint': '"""Each uid must be assigned a list of analyzers."""'}), "('invalid_request', hint=\n 'Each uid must be assigned a list of analyzers.')\n", (5721, 5800), False, 'from api import APIError, APIHandler\n'), ((6257, 6352), 'api.APIError', 'APIError', (['"""invalid_request"""'], {'hint': '"""All assets must be of the same type / base type (file)"""'}), "('invalid_request', hint=\n 'All assets must be of the same type / base type (file)')\n", (6265, 6352), False, 'from api import APIError, APIHandler\n'), ((7498, 7520), 'asyncio.gather', 'asyncio.gather', (['*tasks'], {}), '(*tasks)\n', (7512, 7520), False, 'import asyncio\n'), ((7875, 7911), 'api.APIError', 'APIError', (['"""error"""'], {'arg_errors': 'errors'}), "('error', arg_errors=errors)\n", (7883, 7911), False, 'from api import APIError, APIHandler\n'), ((7996, 8054), 'blueprints.assets.models.Asset.with_options', 'Asset.with_options', ([], {'read_preference': 'ReadPreference.PRIMARY'}), '(read_preference=ReadPreference.PRIMARY)\n', (8014, 8054), False, 'from blueprints.assets.models import Asset\n'), ((8444, 8509), 'api.APIError', 'APIError', (['"""not_found"""'], {'hint': '"""Asset expired whilst being analyzed"""'}), "('not_found', hint='Asset expired whilst being analyzed')\n", (8452, 8509), False, 'from api import APIError, APIHandler\n'), ((3783, 3814), 'api.APIError', 'APIError', (['"""error"""', 'event.reason'], {}), "('error', event.reason)\n", (3791, 3814), False, 'from api import APIError, APIHandler\n')]
|
#--------------------------------------- RE par el lexer---------------------------------------
import re
import ply.lex as lex # Scanner
import ply.yacc as yacc
import math as math
import lexico
AuxList = ['temp', 'tempo']
Cuartetos = []
Temporales = []
Saltos = []
Scope = ['GLOBAL']
parametros = {}
sizeVar = 1
contVarLocal = [0]*9
LocationTemp = 'class'
Location = 'class'
paramChecktype = []
global lastVar
global DeclVar
global Funcion
global FuncionDeclarada
global cont
global claseDeclarada
global atributos
cont = 0
Memoria = []
#--------------------------------------- importar cuboSemantico---------------------------------------
from cuboSemantico import cuboSemantico
#cuboSemantico tiene todas las consideraciones semanticas
#-Generacion de codigo de expresiones aritmeticas
from stack import Stack
popper = Stack()
values = Stack()
tipos = Stack()
funct = Stack()
TemporalesFor = Stack()
#--------------------------------------- Variables ncesarias para usar yacc, lista de tokens y lexer---------------------------------------
tokens = lexico.tokens
lexer = lexico.lexer
#-------------- Directorio de Clases y Funciones, Tablas de Variables ---------
from directory import Directory
Tabla = Directory({}, {}, {}, {})
#--------- Memoria va asignando los espacios de memoria a las variables ----------#
from asignadorMemoria import AsignadorMemoria
memoria = AsignadorMemoria()
#--------- Constantes lleva el control de la tabla de constantes ----------#
from tablaConstantes import TablaConstantes
Constantes = TablaConstantes()
from tablaOperaciones import TablaOperaciones
Operadores = TablaOperaciones()
#-------------- principal---------------
#program creates the end quadruple for the VM
def p_programa(p):
'''
programa : PROGRAMA ID SEMICOLON scopeClases declaracion_clases scopeFunction declaracion_funciones scopeMain principal
| PROGRAMA ID SEMICOLON
'''
CrearCuadruplo('END','_','_','_')
#addScope(p[2])
p[0] = None
#neuralgic point to set the class scope and the location, also created the first quadruple to goto the main
def p_scopeClases(p):
'''
scopeClases : empty
'''
CrearCuadruplo('GOTO','_','_','_')
Tabla.SetScope('class')
Location = 'class'
Scope[0] = 'LOCAL'
p[0] = None
#neuralgic point to set the function scope
def p_scopeFunction(p):
'''
scopeFunction : empty
'''
Tabla.SetScope('function')
Location = 'function'
Scope[0] = 'LOCAL'
p[0] = None
#neuralgic point to set the main scope
def p_scopeMain(p):
'''
scopeMain : empty
'''
Tabla.SetScope('main')
Location = 'main'
Scope[0] = 'GLOBAL'
p[0] = None
#main sctructure of our language
def p_principal(p):
'''
principal : MAIN mainFin L_PARENTHESIS R_PARENTHESIS L_BRACKET cuerpo R_BRACKET
'''
p[0] = None
#neuralgic point to fill the first GOTO with the jump quadruple address
def p_mainFin(p):
'''
mainFin : empty
'''
Fill(0,cont)
p[0] = None
#cuerpo main estructure
def p_cuerpo(p) :
'''
cuerpo : cuerpo_aux cuerpo
|
'''
p[0] = None
#cuerpo options, you can used any of this structure in the body of our language
def p_cuerpo_aux(p) :
'''
cuerpo_aux : estatutos_repeticion
| estatutos_funciones
| declaracion_var
| instancear_objetos
| regreso
'''
p[0] = None
#-------------- estatutos---------------
#All the options our language have to choose between function statements
def p_estatutos_funciones(p):
'''
estatutos_funciones : input
| escribe
| llamada
| asignacion
| condicion
| listas
'''
#listas declaraction main structure
def p_listas(p):
'''
listas : ID METOD ID L_PARENTHESIS expresion R_PARENTHESIS
| ID METOD ID L_PARENTHESIS R_PARENTHESIS
'''
#crete the address and type structure
address = -1
tipo = ''
#Check if the list name exist in any other variable, if they exist add the type and address to their respective variables
if Tabla.CheckIfVariableExists(p[1],Location):
address = Tabla.GetAttribute(p[1],'Address',Location)
tipo = Tabla.GetAttribute(p[1],'Type',Location)
else:
#check if the list name exists in any other scope as a variable or atribute
if Tabla.CheckIfFunctExistInAtribute(p[1],Location):
address = Tabla.GetAttributeForParameters(p[1],'Address',Location)
tipo = Tabla.GetAttribute(p[1],'Type',Location)
else :
raise ErrorMsg('No existe la variable ' + p[1])
#------------------Append---------------------#
#take the type from the temporal and add list_ to the type, so you can have it in the list format
#that why you can check if they are of the same type
if p[3] == 'append':
if(len(p) <= 6):
raise ErrorMsg(p[3] + 'debe tener un argumento')
typeCheckTemp = tipos.pop() #check if the type of the list and the expression are the same
typeCheckAns = 'list_' + typeCheckTemp
if typeCheckAns != tipo :
raise ErrorMsg(p[3] + ' el argumento debe ser un ' + tipo + ' se dio un tipo: ' + typeCheckAns)
CrearCuadruplo('APPEND', values.pop(), '_', address) #created the quadruple
#------------------POP---------------------#
elif p[3] == 'pop':
if(len(p) > 6):
raise ErrorMsg(p[3] + ' no debe tener argumentos')
CrearCuadruplo('POP',address,'_','_') #create the pop quadruple
#------------------Sort---------------------#
elif p[3] == 'sort':
if(len(p) > 6):
raise ErrorMsg(p[3] + ' no debe tener argumentos')
CrearCuadruplo('SORT',address,'_','_') #create teh sort quadruple
#------------------find---------------------#
#take the type from the temporal and add list_ to the type, so you can have it in the list format
#that why you can check if they are of the same type
elif p[3] == 'find':
if(len(p) <= 6):
raise ErrorMsg(p[3] + 'debe tener un argumento')
typeCheckTemp = tipos.pop()
typeCheckAns = 'list_' + typeCheckTemp
if typeCheckAns != tipo :
raise ErrorMsg(p[3] + ' el argumento debe ser: ' + tipo + ' y se dio un tipo: ' + typeCheckAns)
#set the type to the same type of the list
val = values.pop()
if(tipo == 'list_int'):
tipo ='int'
if(tipo == 'list_bool'):
tipo ='bool'
if(tipo == 'list_float'):
tipo ='float'
#created a temporal with the same type has the list type
addressTemp = GenerarNuevoTemporal(tipo)
values.push(addressTemp)
CrearCuadruplo('FIND',address,val,addressTemp)#created the quadruple
#------------------Head---------------------#
elif p[3] == 'head':
if(len(p) > 6):
raise ErrorMsg(p[3] + ' no debe tener argumetos')
#set the type to the same type of the list
if(tipo == 'list_int'):
tipo ='int'
if(tipo == 'list_bool'):
tipo ='bool'
if(tipo == 'list_float'):
tipo ='float'
#created a temporal with the same type has the list type
addressTemp = GenerarNuevoTemporal(tipo)
values.push(addressTemp)
CrearCuadruplo('HEAD',address,'_',addressTemp)
#------------------Tail---------------------#
elif p[3] == 'tail':
if(len(p) > 6):
raise ErrorMsg(p[3] + ' no debe tener argumetos')
#set the type to the same type of the list
if(tipo == 'list_int'):
tipo ='int'
if(tipo == 'list_bool'):
tipo ='bool'
if(tipo == 'list_float'):
tipo ='float'
#created a temporal with the same type has the list type
addressTemp = GenerarNuevoTemporal(tipo)
values.push(addressTemp)
CrearCuadruplo('TAIL',address,'_',addressTemp) #created the quadruple
#------------------Key---------------------#
#
elif p[3] == 'key':
if(len(p) <= 6):
raise ErrorMsg(p[3] + 'debe tener 1 argumento')
typeCheckTemp = tipos.pop()
typeCheckAns = typeCheckTemp
if typeCheckAns != 'int' :
raise ErrorMsg(p[3] + ' el argmunto debe ser un ' + 'int' + ' se dio un tipo: ' + typeCheckAns)
#set the type to the same type of the list
val = values.pop()
if(tipo == 'list_int'):
tipo ='int'
if(tipo == 'list_bool'):
tipo ='bool'
if(tipo == 'list_float'):
tipo ='float'
#created a temporal with the same type has the list type
addressTemp = GenerarNuevoTemporal(tipo)
values.push(addressTemp)
CrearCuadruplo('KEY',address,val,addressTemp)#created the key quadruple
else:
raise ErrorMsg('No existe el metodo para lista ' + p[3])
p[0] = None
#estatutos repeticion
#loop-statements basic structure
def p_estatutos_repeticion(p):
'''
estatutos_repeticion : estatutos_repeticion_aux
|
'''
p[0]= None
#structure so the statement can be repeat
def p_estatutos_repeticion_aux(p):
'''
estatutos_repeticion_aux : estatutos_repeticion_aux2 estatutos_repeticion
'''
p[0]= None
#structure to select between non conditional and condicional statements
def p_estatutos_repeticion_aux2(p):
'''
estatutos_repeticion_aux2 : repeticion_condicional
| repeticion_no_condicional
'''
p[0]= None
#For non condiciontal loop statement
#basic structure for(exp to exp,exp){body}
def p_repeticion_no_condicional(p):
'''
repeticion_no_condicional : FOR L_PARENTHESIS for_inicio m_exp for_temp TO m_exp for_revision COMMA m_exp for_suma R_PARENTHESIS L_BRACKET cuerpo for_final R_BRACKET
'''
p[0]= None
#neuralgic point to save the quadruple position before starting the for inside the quadruples
def p_for_inicio(p):
'''
for_inicio : empty
'''
Saltos.append(cont)
p[0]= None
#NP to check the first expression and created a temporal local address for that expression
def p_for_temp(p):
'''
for_temp : empty
'''
iz = values.pop()
GenerarNuevoTemporal(tipos.pop())
values.push(Temporales[-1])
if tipos.top() != 'int': #check if the expression is an int
raise ErrorMsg('se esperaba un tipo int or float en la expresion del for')
res = values.top()
TemporalesFor.push(res)
CrearCuadruplo('=', iz, '_', res) #assign the value of the expression to the temporal we created earlier
p[0]= None
#NP to check the second expression
def p_for_revision(p):
'''
for_revision : empty
'''
if tipos.top() != 'int': # check if the expression is an int type result
raise ErrorMsg('se esperaba un tipo int or float en la expresion del for')
popper.push('<=')#push the operation
GenerarCuadruploDeOperador(popper,values,tipos)#generate the condicional quadruple
Saltos.append(cont)#append the jump before the expression, the one that has the result of expression, this is so the VM can now when to stop the for loop
CrearCuadruplo('GOTOF',values.pop(),'_','_') #created the GOTOF with the address of the expression
p[0]= None
def p_for_suma (p):
'''
for_suma : empty
'''
#check if the third expression is of int type
if tipos.top() != 'int':
raise ErrorMsg('se esperaba un tipo int or float en la expresion del for')
p[0]=None
#final NP that add the step to the temporal created
def p_for_final(p):
'''
for_final : empty
'''
#pop out the temporal and its type
variableFor = TemporalesFor.pop()
tipo = tipos.pop()
#created the sum between the temporal and the step
CrearCuadruplo('+',values.pop(),variableFor,variableFor)
global cont
#save the false jump
falseJump = Saltos[-1]
Saltos.pop()#pop out the false jump
Ret = Saltos[-1]#save the starting jump
Saltos.pop()
CrearCuadruplo('GOTO','_','_',Ret+1)#created the queadruple you need to return to the start of for
Fill(falseJump,cont)#fill the quadruple of GOTOF with the position of the exiting quadrupl, so the VM can exit the for when teh condition is fake
p[0] = None
#--------------------------While--------------------
#while loop statement
def p_repeticion_condicional(p):
'''
repeticion_condicional : WHILE startWhile L_PARENTHESIS expresion R_PARENTHESIS checkCond L_BRACKET cuerpo R_BRACKET finalWhile
'''
p[0]= None
#np to save the starting point of the while
def p_startWhile(p):
'''
startWhile : empty
'''
Saltos.append(cont)
p[0] = None
#NP to check the expression inside the while is a bool and save the jumping of the GOTOF and created the quadruple
def p_checkCond(p):
'''
checkCond : empty
'''
global cont
cond = values.pop()
tCond = tipos.top()
if tCond != 'bool':
raise ErrorMsg('Se esperaba un tipo bool en el while')
Saltos.append(cont)
CrearCuadruplo('GOTOF',cond,'_','_')
p[0] = None
#fill the gotof with the exiting number of the queadruple and created the GOTO, so the VM can return to the condiont and check it again
def p_finalWhile(p):
'''
finalWhile : empty
'''
global cont
falseJump = Saltos[-1]
Saltos.pop()
Ret = Saltos[-1]
Saltos.pop()
CrearCuadruplo('GOTO','_','_',Ret)
Fill(falseJump,cont)
p[0] = None
#estatutos funcionales
#input basic structure
def p_input(p):
'''
input : INPUT L_PARENTHESIS input_aux R_PARENTHESIS
'''
p[0] = None
#structure so you can have multiple input values
def p_input_aux(p):
'''
input_aux : input_aux2 leeInput COMMA input_aux
| input_aux2 leeInput
'''
#NP to created the Input quadruple
def p_leeInput(p):
'''
leeInput : empty
'''
res = values.pop()
tipo = tipos.pop()
CrearCuadruplo('INPUT',res,tipo,'_')
p[0] = None
#you can have variables, arrays and object atributes has a input
def p_input_aux2(p):
'''
input_aux2 : variable
| arreglo
| atributo
'''
p[0] = None
#basic structure of a function call
def p_llamada(p):
'''
llamada : llamadaID startCall L_PARENTHESIS llamada_aux2 R_PARENTHESIS endCall
'''
p[0]=None
#check the id of the call
def p_llamadaID(p):
'''
llamadaID : ID
| ID PERIOD ID
'''
popper.push('(') #put a fake in the popper, so you separed teh call operations with the actual expression
global paramChecktype
paramChecktype = []
memoria.ResetLocalMemory()#reset the variable local memory
if(len(p) > 2):
funct.push(p[3]) #push the id
funct.push(p[2])#push the period
funct.push(p[1])#push the actual id of the call
p[0] = None
def p_startCall(p):
'''
startCall : empty
'''
#created the era based for the starting part of the function
Funcion = funct.pop()
if funct.top() == '.':
funct.pop()
objeto = funct.pop()
temp = objeto
objeto = Funcion
Funcion = temp
funct.push(objeto)
funct.push('.')
CrearCuadruplo('ERA',Funcion,'_', objeto)# push the actual object if the function is in a object class
else:
CrearCuadruplo('ERA',Funcion,'_', '_') #Created the era with the function ID
funct.push(Funcion)
p[0]=None
def p_endCall(p):
'''
endCall : empty
'''
global DeclVar
Funcion = funct.pop()
objeto = None
popper.pop()
#check if the function is part of a class
if funct.top() == '.':
funct.pop()
objeto = funct.pop()
tempScope = Tabla.Scope
Tabla.SetScope('class')
#search if the object exist
if not Tabla.CheckIfObjectExists(objeto):
raise ErrorMsg('no existe el objeto')
clase = Tabla.GetObjectAtr(objeto,'Clase')
Tabla.SetClass(clase)
padre = Tabla.GetClassAtribute(clase,'Padre')
#check if the object father class exist
if not Tabla.CheckIfObjectExists(objeto):
raise ErrorMsg('no existe el objeto')
#check if the function exist inside the father
if not Tabla.CheckIfFunctionExists(Funcion) :
if padre == None:
raise ErrorMsg('no existe la llamada')
else:
Tabla.SetClass(padre)
if not Tabla.CheckIfFunctionExists(Funcion):
raise ErrorMsg('no existe la llamada')
#save the parameters of the function
parametrosFunct = Tabla.GetFunctionAttribute(Funcion, 'Parametros')
#see if you have the same lenght of parameters
if len(paramChecktype) != len(parametrosFunct):
raise ErrorMsg('Incorrecto numero de parametros')
#see if the parameters types match the ones with the call
for k in parametrosFunct.values() :
tipo = k.get('Type')
for i in paramChecktype:
if k.get('Type') != i:
raise ErrorMsg('parametros no son del mismo tipo que el instanceado en ' + Funcion + ' se dio un ' + i + ' se esperaba un ' + tipo)
Type = Tabla.GetFunctionAttribute(Funcion, 'Type')
start = Tabla.GetFunctionAttribute(Funcion, 'Start')
CrearCuadruplo('GOSUB',Funcion,objeto,start)#created the quadruple
#see if the function return something
if Type != 'void':
#if it does save the address of the funciton
AddressA = Tabla.GetFunctionAttribute(Funcion, 'Address')
#created a temporal to store the value of the call
GenerarNuevoTemporal(Type)
Resultado = Temporales[-1]
values.push(Resultado)
#save the address of the object
AddressB = Tabla.GetObjectAtr(objeto,'Address')
#make the address of the call be the address of the object . the address of the object ex 1000.1 where 1000 is teh object and 1 the function inside
Address = str(AddressB) + '.'+ str(AddressA)
#created the cuadruple with the address of the object and the new temporal we created
CrearCuadruplo('=',Address,'_',Resultado)
Tabla.SetScope(tempScope)
else:
parametrosFunct = Tabla.GetFunctionAttribute(Funcion, 'Parametros')
#see if you have the same lenght of parameters
if len(paramChecktype) != len(parametrosFunct):
raise ErrorMsg('Incorrecto numero de parametros')
listaTipos = []
#see if the parameters types match the ones with the call
for k in parametrosFunct.values() :
tipo = k.get('Type')
for i in paramChecktype:
if k.get('Type') != i:
raise ErrorMsg('parametros no son del mismo tipo que el instanceado en ' + Funcion + ' se dio un ' + i + ' se esperaba un ' + tipo)
#save the type of the function
start = Tabla.GetFunctionAttribute(Funcion, 'Start')
CrearCuadruplo('GOSUB',Funcion,'_',start)
Type = Tabla.GetFunctionAttribute(Funcion, 'Type')
if Type != 'void':
#if the function has a return created a temporal to store the value
#then created teh quadruple to assing the return value to rhe temporal
Address = Tabla.GetFunctionAttribute(Funcion, 'Address')
GenerarNuevoTemporal(Type)
Resultado = Temporales[-1]
values.push(Resultado)
CrearCuadruplo('=', Address, '_' ,Resultado)
p[0]= None
#structure to have multiples parameters
def p_llamada_aux2(p):
'''
llamada_aux2 : parametros llamada_aux3
|
'''
p[0]=None
#comma to separeted betweeb parameters
def p_llamada_aux3(p):
'''
llamada_aux3 : COMMA llamada_aux2
|
'''
p[0]=None
#-------------- parametros---------------
#save the parameters in a local memory address and created the quadruple
def p_parametros(p):
'''
parametros : expresion
'''
global paramChecktype
paramChecktype.append(tipos.top())
address = memoria.AssignMemoryAddress(tipos.pop(),'LOCAL',Location)
CrearCuadruplo('PARAMETRO', values.pop(),'_',address)
p[0] = None
#PRINT BASIC STRUCTURE
def p_print(p):
'''
escribe : PRINT L_PARENTHESIS print_var R_PARENTHESIS
'''
p[0] = None
#structure to have multiple parameters in print
def p_print_var(p):
'''
print_var : print_var_aux2 finalVar COMMA print_var
| print_var_aux2 finalVar
'''
p[0] = None
#NP to created the quadruple of print
def p_finalVar(p):
'''
finalVar : empty
'''
res = values.pop()
tipos.pop()
CrearCuadruplo('PRINT',res,'_','_')
p[0] = None
#parameters print can have
def p_print_var_aux2(p):
'''
print_var_aux2 : llamada
| expresion
| atributo
| listas
'''
p[0] = None
#allocation basic structure
def p_asignacion(p):
'''
asignacion : igualdadVar
| igualdadArr
| igualdadAtr
'''
#allocation structure for atributes
#created teh quadruple and store it
p[0] = None
def p_igualdadAtr(p):
'''
igualdadAtr : atributo EQUALS asignacion_aux
'''
iz = values.pop()
res = values.pop()
tipo1 = tipos.pop()
tipo2 = tipos.pop()
if tipo1 != tipo2:
raise ErrorMsg('Error: No se pueden asignar '+ tipo1 + ' a un tipo ' + tipo2)
CrearCuadruplo('=', iz, '_', res)
p[0] = None
#atribute basic structure
def p_atributo(p):
'''
atributo : ID PERIOD ID
'''
objeto = p[1]
atributo = p[3]
#check if the object exist
if not Tabla.CheckIfObjectExists(objeto):
raise ErrorMsg('El objeto ' + p[1] + ' no existe')
else:
#save the object address
addressA = Tabla.GetObjectAtr(objeto,'Address')
clase = Tabla.GetObjectAtr(objeto,'Clase')
TempScope = Tabla.Scope
Tabla.SetScope('class')
Tabla.SetClass(clase)
padre = Tabla.GetClassAtribute(clase,'Padre')
#check if teh variable exist in the scope and save the adress
if Tabla.CheckIfVariableExists(atributo,'class'):
addressB = Tabla.GetAttribute(atributo,'Address','class')
addressFinal = str(addressA)+'.' + str(addressB)
tipo = Tabla.GetAttribute(atributo,'Type','class')
values.push(addressFinal)
tipos.push(tipo)
elif padre != None: #check if the object class has a father and search the atribute their
Tabla.SetScope('class')
Tabla.SetClass(padre)
#check if the variable exist
if Tabla.CheckIfVariableExists(atributo,'class'):
#push the value of the address to the values stack
addressB = Tabla.GetAttribute(atributo,'Address','class')
addressFinal = str(addressA)+'.' + str(addressB)
tipo = Tabla.GetAttribute(atributo,'Type','class')
values.push(addressFinal)
tipos.push(tipo)
else:
raise ErrorMsg('El objeto ' + p[1] + ' no contiene '+p[3])
else:
raise ErrorMsg('El objeto ' + p[1] + ' no contiene '+p[3])
Tabla.SetScope(TempScope)
p[0] = None
#pus the array address to the value adn his expression and created the quadruple
def p_igualdadArr(p):
'''
igualdadArr : arreglo EQUALS asignacion_aux
'''
iz = values.pop()
res = values.pop()
tipo1 = tipos.pop()
tipo2 = tipos.pop()
if tipo1 != tipo2:
raise ErrorMsg('Error: No se pueden asignar '+ tipo1 + ' a un tipo ' + tipo2)
CrearCuadruplo(p[2], iz, '_', res)
p[0] = None
#save th
def p_igualdadVar(p):
'''
igualdadVar : ID EQUALS asignacion_aux
'''
#check if the variable exist
if Tabla.CheckIfVariableExists(p[1],Location) :
iz = values.pop()
tipo = tipos.pop()
#see if the variable is the same type has the expression
if tipo != Tabla.GetAttribute(p[1],'Type',Location):
raise ErrorMsg('Error: No se pueden asignar a: ' + p[1] + ' el tipo ' + tipo + ' ya que es de tipo ' + Tabla.GetAttribute(p[1],'Type',Location) )
address = Tabla.GetAttribute(p[1],'Address',Location)
CrearCuadruplo(p[2], iz, '_',address ) #created the quadruple
else:
#see if the variable exist inside the atribute of a class
if Tabla.CheckIfFunctExistInAtribute(p[1],Location):
iz = values.pop()
tipo = tipos.pop()
#see if the variable is the same type has the expression
if tipo != Tabla.GetAttributeForParameters(p[1],'Type',Location):
raise ErrorMsg('Error: No se pueden asignar a ' + p[1] + ' el tipo ' + tipo + ' ya que es de tipo ' + Tabla.GetAttributeForParameters(p[1],'Type',Location) )
address = Tabla.GetAttributeForParameters(p[1],'Address',Location)
CrearCuadruplo(p[2], iz, '_',address )#created the quadruple
else:
raise ErrorMsg('La variable ' + p[1] + ' no existe')
p[0]= None
#right side possible values of the assigment
def p_asignacion_aux(p):
'''
asignacion_aux : expresion
| arreglo
| estatutos_funciones
| atributo
'''
p[0] = None
#empty rule for the NP
def p_empty(p):
'empty :'
pass
#condition basci structure
def p_condicion(p):
'''
condicion : IF L_PARENTHESIS expresion R_PARENTHESIS rp_seen L_BRACKET cuerpo R_BRACKET condicion_aux else_after
'''
p[0] = None
#neuralgic point that save the expression and the positon of the GOTOF, also check if the expresion is bool
def p_rp_seen(p):
'''
rp_seen : empty
'''
result = Temporales[-1] #save teh temporal for the
if tipos.top() != 'bool':
raise ErrorMsg('Se esperaba un tipo bool en el if')
CrearCuadruplo('GOTOF',result,'_','_')#created the GOTOF
Saltos.append(cont-1)
p[0] = None
#fill the gotof with the end of the if body, so you can exit the if
def p_else_after(p):
'''
else_after : empty
'''
end = Saltos.pop()
Fill(end,cont)
p[0] = None
#else basic strcuture
def p_condicion_aux(p):
'''
condicion_aux : ELSE else_seen L_BRACKET cuerpo R_BRACKET
|
'''
p[0] = None
#NP when you have a else
def p_else_seen(p):
'''
else_seen : empty
'''
CrearCuadruplo('GOTO','_','_','_')#created the jump so you can jump the if true part
falseJ = Saltos.pop()#pop out the GOTOF location
Saltos.append(cont-1)#append this new location
Fill(falseJ,cont)#fill the gotof with the location after the GOTO
p[0] = None
#-------------- declaraciones---------------
#declaration basic structure
def p_declaracion_parametros(p):
'''
declaracion_parametros : startDParam declaracion_parametros_aux
|
'''
p[0] = None
#NP that reset the parameter list
def p_startDParam(p):
'''
startDParam : empty
'''
global parametros
parametros.clear()
p[0]= None
#created the addres memory space for each parameter and append the parameter and his type to the paramater list
def p_declaracion_parametros_aux(p):
'''
declaracion_parametros_aux : tipo_retorno ID declaracion_parametros_aux2
|
'''
if len(p ) > 1:
tipo = AuxList[1]
agregarContVarFunciones(tipo,'NORMAL')
name = p[2]
address = memoria.AssignMemoryAddress(tipo,Scope[0],'NORMAL')
parametros[name] = { 'Type' :tipo,'Address':address}
p[0] = None
#basic strucure to have multiple parameters
def p_declaracion_parametros_aux2(p):
'''
declaracion_parametros_aux2 : COMMA declaracion_parametros_aux
|
'''
p[0] = None
#basic structure of a class
def p_declaracion_clases(p):
'''
declaracion_clases : PEQUE guardar_nombre_clase declaracion_clases_aux end_class declaracion_clases
|
'''
p[0] = None
def p_end_class(p):
'''
end_class : empty
'''
p[0]= None
#save teh name of the class and check if there is another class with te same name
def p_guardar_nombre_clase(p):
'''
guardar_nombre_clase : ID
'''
global claseDeclarada
claseDeclarada = p[1]
global atributos
atributos = 0
if Tabla.CheckIfClassExists(claseDeclarada):
raise ErrorMsg('La clase ' + claseDeclarada + ' ya habia sido declarada previamente')
else:
Tabla.AddClase(claseDeclarada)
Tabla.SetClass(claseDeclarada)
p[0] = None
#set the basic structure and declaration of the class
def p_declaracion_clases_aux(p):
'''
declaracion_clases_aux : L_BRACKET declaracion_var declaracion_funciones R_BRACKET
| herencia L_BRACKET declaracion_var declaracion_funciones R_BRACKET
'''
p[0] = None
#Inheritance check if the class to inherance exsit, and save it in the class
def p_herencia(p):
'''
herencia : AGRANDA ID
'''
global atributos
if claseDeclarada == p[2]:
raise ErrorMsg('No puede haber herencia entre si mismo: ' + p[2])
if Tabla.CheckIfClassExists(p[2]):
Tabla.updateHerencia(claseDeclarada,p[2])
size = Tabla.ClassAtribute(p[2],'Space')
atributos = size
else:
raise ErrorMsg('La clase ' + p[2] + ' no existe')
p[0] = None
#basic structure to declarated functions
def p_declaracion_funciones(p):
'''
declaracion_funciones : declaracion_funciones_aux funciones_end declaracion_funciones
|
'''
p[0] = None
#save the location has inside of a fucntion
def p_startF(p):
'''
startF : empty
'''
global Location
Location = 'function'
#NP for the end of the function
def p_funciones_end(p):
'''
funciones_end : empty
'''
#Guarda contador de variables
if Tabla.Scope == 'class':
Tabla.updateClassAtribute(claseDeclarada,'Space',atributos)#update the space of the function to match the atributes in the class
CrearCuadruplo('END PROC','_','_','_')#created the end procedure quadruple
global Location
Location = Tabla.Scope
#save the scope table scope in location
p[0]= None
#basic function structure
def p_declaracion_funciones_aux(p):
'''
declaracion_funciones_aux : startF MINI declaracion_funciones_aux2 guardar_nombre_funcion L_PARENTHESIS declaracion_parametros R_PARENTHESIS L_BRACKET cuerpo R_BRACKET save_variables
|
'''
global Location
global LocationTemp
LocationTemp = Location #save the location before entering the function
Location = 'function' #put the location to funciton
p[0] = None
def p_save_variables(p):
'''
save_variables : empty
'''
global FuncionDeclarada #see which function is and update the parameters with the parameters and the local address
Tabla.updateFunctionAttribute(FuncionDeclarada,'Space',contVarLocal.copy())
Tabla.updateFunctionAttribute(FuncionDeclarada,'Parametros',parametros.copy())
p[0]= None
def p_guardar_nombre_funcion(p):
'''
guardar_nombre_funcion : ID
'''
#reset the local variables adn the temporals
global contVarLocal
resetConVarFunciones()
memoria.ResetLocalMemory()
global FuncionDeclarada
FuncionDeclarada = p[1]
#save the nameof the function
CrearCuadruplo('START PROC','_','_',FuncionDeclarada)
#created the quadruple of the function
#see if there is a function of the same name
if Tabla.CheckIfFunctionExists(FuncionDeclarada):
raise ErrorMsg('La funcion ' + FuncionDeclarada + ' ya habia sido declarada previamente')
else:
#
#if the scope is inside a class add the function to the class
if Tabla.Scope == 'class':
global atributos
atributos = atributos + 1
Tabla.AddFunction(FuncionDeclarada, AuxList[1], atributos,parametros,cont-1)
Tabla.SetCurrentFunction(FuncionDeclarada)
#else added it to the main function scope
else:
address = memoria.AssignMemoryAddress(AuxList[1], 'GLOBAL', 'NORMAL')
Tabla.AddFunction(FuncionDeclarada, AuxList[1], address,parametros,cont-1)
Tabla.SetCurrentFunction(FuncionDeclarada)
def p_declaracion_funciones_aux2(p):
'''
declaracion_funciones_aux2 : VOID
| tipo_retorno
'''
#save the type of the funciton
if p[1] == 'void' :
AuxList[1] = p[1]
p[0] = None
#return basic structure
def p_regreso(p):
'''
regreso : RETURN expresion
|
'''
global FuncionDeclarada
tipo = Tabla.GetFunctionAttribute(FuncionDeclarada, 'Type')#save the type of the function
if len(p) > 1:
if tipo == 'void': # if the class it is void, they cant have return
raise ErrorMsg('Las funciones void (' + FuncionDeclarada + ') no deben tener un return')
else:
CrearCuadruplo('RETURN', '_', '_', values.pop()) #created the quadruple
else:
if tipo != 'void': #if the type is not void, they should always have a return
raise ErrorMsg('Las funciones de tipo ' + tipo + '(' + FuncionDeclarada + ') deben tener un return')
p[0] = None
#function declaration basic structure
def p_declaracion_var(p):
'''
declaracion_var : declaracion_var_aux
'''
#if the declaration is inside the class, match the space to the atributes and update it in the class
if Tabla.Scope == 'class':
Tabla.updateClassAtribute(claseDeclarada,'Space',atributos)
p[0] = None
#var aux so you can ahve multiple declarations
def p_declaracion_var_aux(p):
'''
declaracion_var_aux : PETITE declaracion_var_aux2 assignAddress declaracion_var
|
'''
p[0] = None
#assing I address to the variable
def p_assignAddress(p):
# Funcion que asigna los espacios de memoria faltantes en caso de ser un array o matriz
'''
assignAddress : empty
'''
global sizeVar
# Se ignora el primer espacio ya que fue asignado al momento de guardar la variable por primera vez
if Tabla.Scope != 'class':
for i in range(1, sizeVar):
agregarContVarFunciones(AuxList[1],'NORMAL')
address = memoria.AssignMemoryAddress(AuxList[1], Scope[0], 'NORMAL')
p[0] = None
#multiple posible variable strcuture declaration
def p_declaracion_var_aux2(p):
'''
declaracion_var_aux2 : tipo_retorno idChecker declaracion_var_aux3
| tipo_retorno idChecker declaracion_var_aux5
| tipo_especial idChecker
'''
p[0] = None
#check the id of the variable
def p_idChecker(p):
'''
idChecker : ID
'''
global DeclVar
global sizeVar
global atributos
sizeVar = 1
#check if the id exist in teh main scope
if Tabla.CheckIfVariableExists(p[1],Location):
raise ErrorMsg('La variable ' + p[1] + ' ya habia sido declarada previamente')
else:
#check if the varible it is inside a class
if Tabla.Scope == 'class':
#see if the variable is a list_type
if(AuxList[1] == 'list_int' or AuxList[1] == 'list_bool' or AuxList[1] == 'list_float'):
DeclVar = p[1]
#save the variable of the list and added it to the directory
address = memoria.AssignMemoryAddress(AuxList[1], Scope[0], 'NORMAL')
agregarContVarFunciones(AuxList[1],'NORMAL',sizeVar)
Tabla.AddVariable(DeclVar, AuxList[1], address, sizeVar,Location)
else:#if it is not a list save it as a sum of the atribute + 1 in the class address memory
#this way they can be access has object dir . var dir Object.var or atribute
DeclVar = p[1]
atributos = atributos + 1
address = atributos
Tabla.AddVariable(DeclVar, AuxList[1], address, sizeVar,Location)
else:
#if the variable is not in class scope save it as a non temporal memory
DeclVar = p[1]
address = memoria.AssignMemoryAddress(AuxList[1], Scope[0], 'NORMAL')
agregarContVarFunciones(AuxList[1],'NORMAL',sizeVar)
Tabla.AddVariable(DeclVar, AuxList[1], address, sizeVar,Location)
p[0] = None
#basic structure to have multiple declarations
def p_declaracion_var_aux3(p):
'''
declaracion_var_aux3 : COMMA idChecker declaracion_var_aux3
|
'''
p[0] = None
#basic array declaration structure
def p_declaracion_var_aux5(p):
'''
declaracion_var_aux5 : L_CORCHETE save_size R_CORCHETE declaracion_var_aux7
|
'''
#you cant have arrays in a object
if Tabla.Scope == 'class' and Location == 'class':
raise ErrorMsg('no se pueden declarar arreglos como atributos de objetos')
p[0] = None
def p_save_size(p):
'''
save_size : CTEI
'''
#make the user to just have positive integers has array and update hte size of teh variable
if p[1] > 0:
global sizeVar
sizeVar *= p[1]
Tabla.UpdateSize(DeclVar,sizeVar,Location)
Tabla.UpdateArrayLimit(DeclVar, p[1] - 1,Location)
else:
raise ErrorMsg('No se puede declarar el tamaño de un array como menor que 1')
p[0] = None
#matrix basic structure declaration
def p_declaracion_var_aux7(p):
'''
declaracion_var_aux7 : L_CORCHETE last_size R_CORCHETE
|
'''
p[0] = None
#see if the int inside the array is not negative
def p_last_size(p):
'''
last_size : CTEI
'''
if p[1] > 0:
global sizeVar
tipo = Tabla.GetAttribute(DeclVar,'Type',Location)
currentSize = sizeVar
sizeVar *= p[1] #multiple the size fo the array to match the real space
Tabla.UpdateSize(DeclVar, sizeVar,Location) #update the size
else:
raise ErrorMsg('No se puede declarar el tamaño de una matriz como menor que 1')
p[0] = None
#basic object declarations
def p_instancear_objetos(p):
'''
instancear_objetos : ID EQUALS NEW ID
'''
#you cant have object inside a class
if Tabla.Scope == 'class':
raise ErrorMsg('No se puede declarar objetos dentro de funciones en clases')
#you cant have a object with the same name has a the class
if p[1] == p[4]:
raise ErrorMsg('El objeto no puede tener el mismo nombre que una clase')
clase = p[4]
objeto = p[1]
#check if the class exist
if not Tabla.CheckIfClassExists(clase):
raise ErrorMsg('La clase ' + clase + ' no existe')
else:
#check if there is another object with the same name
if Tabla.CheckIfObjectExists(objeto):
raise ErrorMsg('El Objeto ' + objeto + ' ya existe')
else:
#assign memory and add it to teh directory of objects
address = memoria.AssignMemoryAddressObject()
size = Tabla.ClassAtribute(clase,'Space')
Tabla.AddObject(objeto,clase,size,address)
p[0] = None
#-------------- Variables---------------
#basic a variable call structure
def p_variable(p):
'''
variable : variable_aux2 variable_aux
'''
p[0] = None
#NP to call a variable
def p_variable_aux2(p):
'''
variable_aux2 : ID empty
'''
#check if the variable exist in the main scope and push the address to the value stack
if Tabla.CheckIfVariableExists(p[1],Location):
address = Tabla.GetAttribute(p[1],'Address',Location)
values.push(address)
tipos.push(Tabla.GetAttribute(p[1], 'Type',Location))
else:
#check if the variable is a parameters of a function or a atribute of a class and push the address to the value stack
if Tabla.CheckIfFunctExistInAtribute(p[1],Location):
address = Tabla.GetAttributeForParameters(p[1],'Address',Location)
values.push(address)
tipos.push(Tabla.GetAttributeForParameters(p[1], 'Type',Location))
#see if the atribute is not inside the father atributes and push the address to the value stack
elif not Tabla.CheckIfAtributeExistsInFather(p[1],Location):
clasePadre = Tabla.GetClassAtribute(Tabla.CurrentClass,'Padre')
tempClass = Tabla.CurrentClass
Tabla.SetClass(clasePadre)
address = Tabla.GetAttributeFromFather(p[1],'Address',Location)
values.push(address)
tipos.push(Tabla.GetAttributeFromFather(p[1], 'Type',Location))
Tabla.SetClass(tempClass)
else :
raise ErrorMsg('No existe la variable ' + p[1])
p[0] = None
def p_variable_aux(p):
'''
variable_aux : PERIOD ID
|
'''
p[0] = None
#-------------- Tipos---------------
#types of the list or special types, push the type to the type stack
def p_tipo_especial(p):
'''
tipo_especial : LIST INT
| LIST FLOAT
| LIST BOOL
'''
if(p[2] == 'int'):
AuxList[1] = 'list_int'
elif(p[2] == 'float'):
AuxList[1] = 'list_float'
else:
AuxList[1] = 'list_bool'
p[0] = None
#return types,push the type to the type stack
def p_tipo_retorno(p):
'''
tipo_retorno : INT
| FLOAT
| BOOL
'''
AuxList[1] = p[1]
p[0] = None
#-------------- arreglo---------------
#array basic call
def p_arreglo(p):
'''
arreglo : startArray L_CORCHETE expresion R_CORCHETE checkLimits arreglo2
'''
#save the base adress
dirBase = Tabla.GetAttribute( lastVar, 'Address', Location)
popper.push('+')
address = Constantes.GetMemoryAddress(dirBase,'int')
values.push(address)
tipos.push('int')
#add the base adress to the limit
GenerarCuadruploDeOperador(popper,values,tipos)
fix = values.pop()
#see if the variable exist
if Tabla.CheckIfVariableExists(lastVar,Location):
tipos.push(Tabla.GetAttribute(lastVar, 'Type',Location))
else:
raise ErrorMsg ('No existe la variable: ' + lastVar)
values.push('('+str(fix)+')')
p[0] = None
def p_startArray(p):
'''
startArray : ID
'''
global lastVar
lastVar = p[1]
#fake symbol in popper, to separete the arrays operation from the expresions or actual program
popper.push('(')
p[0]= None
#created the quadruples to check the limits and get teh address
def p_checkLimits(p):
'''
checkLimits : empty
'''
limit = Tabla.GetAttribute( lastVar,'Limit', Location)
size = Tabla.GetAttribute( lastVar,'Size', Location)
tipo = Tabla.GetAttribute( lastVar,'Type', Location)
#created the limit check quadruple
CrearCuadruplo('VER',values.top(),0,limit)
popper.push('*')
row = int(math.ceil(size/(limit+1)))
address = Constantes.GetMemoryAddress(row,'int')
values.push(address)
tipos.push('int')
tipos.push('int')
#multiple the address with the size / limit and pushes the answer to the values stack
GenerarCuadruploDeOperador(popper,values,tipos)
p[0] = None
def p_arreglo2(p):
'''
arreglo2 : L_CORCHETE expresion p_checkLimits2 R_CORCHETE
|
'''
#takes the fake symbol out of the popper stack
if(popper.top() == '('):
popper.pop()
p[0] = None
#operaions to get the address
def p_checkLimits2(p):
'''
p_checkLimits2 : empty
'''
arrSize = Tabla.GetAttribute(lastVar,'Size', Location)
limit = Tabla.GetAttribute(lastVar,'Limit', Location)
columnSize = int(arrSize / (limit + 1))
#created the quadrupl to check the limits
CrearCuadruplo('VER',values.top(),0, columnSize - 1)
popper.push('+')
#sum the limit with the dirbase
GenerarCuadruploDeOperador(popper,values,tipos)
#sum the val of the past address and this one
popper.push('+')
address = Constantes.GetMemoryAddress(1,'int')
#sum 1
values.push(address)
tipos.push('int')
tipos.push('int')
#generated the operation
GenerarCuadruploDeOperador(popper,values,tipos)
p[0]= None
#-------------- expresiones---------------
#expresion structure
def p_expresion(p):
'''
expresion : t_exp expresion_aux2
| t_exp expresion_aux2 expresion_aux expresion
'''
p[0] = None
#push the or to the popper
def p_expresion_aux(p):
'''
expresion_aux : OR
'''
if (len(p) > 1):
popper.push(p[1])
p[0] = None
#generated the quadruple of the operation
def p_expresion_aux2(p):
'''
expresion_aux2 : empty
'''
if popper.top() == '||':
GenerarCuadruploDeOperador(popper, values, tipos)
p[0] = None
#texp basic structure
def p_t_exp(p):
'''
t_exp : g_exp t_exp_aux2
| g_exp t_exp_aux2 t_exp_aux t_exp
'''
p[0] = None
#if the expression has an and push it to the popper
def p_t_exp_aux(p):
'''
t_exp_aux : AND
'''
if (len(p) > 1):
popper.push(p[1])
p[0] = None
#created the cuadruple of the expression
def p_t_exp_aux2(p):
'''
t_exp_aux2 : empty
'''
if popper.top() == '&&':
GenerarCuadruploDeOperador(popper, values, tipos)
p[0] = None
#basic sctructure of comparison symbols
def p_g_exp(p):
'''
g_exp : m_exp g_exp_aux2
| m_exp g_exp_aux2 g_exp_aux g_exp
'''
p[0] = None
#push the symbol to the popper
def p_g_exp_aux(p):
'''
g_exp_aux : BIGGER
| LESS
| BIGGER_EQUAL
| LESS_EQUAL
| EQUAL
| DIFFERENT
'''
if (len(p) > 1):
popper.push(p[1])
p[0] = None
#created the quadruple
def p_g_exp_aux2(p):
'''
g_exp_aux2 : empty
'''
operadores = ['>', '<', '>=', '<=', '==', '<>']
if popper.top() in operadores:
GenerarCuadruploDeOperador(popper, values, tipos)
p[0] = None
#term basic expression
def p_m_exp(p):
'''
m_exp : termino m_exp_aux2
| termino m_exp_aux2 m_exp_aux m_exp
'''
p[0] = None
#if the operation has a + or - add it to the popper
def p_m_exp_aux(p):
'''
m_exp_aux : PLUS
| MINUS
'''
if (len(p) > 1):
popper.push(p[1])
p[0] = None
#created the quadruple
def p_m_exp_aux2(p):
'''
m_exp_aux2 : empty
'''
if popper.top() == '+' or popper.top() == '-':
GenerarCuadruploDeOperador(popper, values, tipos)
p[0] = None
#term basic structure
def p_termino(p):
'''
termino : factor termino_aux2 termino_aux termino
| factor termino_aux2
'''
p[0] = None
#if the expression has a * or / add it to the popper
def p_termino_aux(p):
'''
termino_aux : TIMES
| DIVIDE
'''
if (len(p) > 1):
popper.push(p[1])
p[0] = None
#generated the quadruple
def p_termino_aux2(p):
'''
termino_aux2 : empty
'''
if popper.top() == '*' or popper.top() == '/':
GenerarCuadruploDeOperador(popper, values, tipos)
p[0] = None
#factor basic possible values
def p_factor(p):
'''
factor : L_PARENTHESIS factor_aux expresion R_PARENTHESIS factor_aux2
| variable
| llamada
| arreglo
| CTEI
| CTEF
| CTES
| TRUE
| FALSE
'''
#push the address of the expression if there is not a fake symbol
if p[1] != '(':
if isinstance(p[1],int):
tipos.push('int')
address = Constantes.GetMemoryAddress(int(p[1]), 'int')
values.push(address)
elif isinstance(p[1],float):
tipos.push('float')
address = Constantes.GetMemoryAddress(float(p[1]), 'float')
values.push(address)
elif isinstance(p[1],str):
tipos.push('string')
string = p[1][1:-1]
address = Constantes.GetMemoryAddress(str(string), 'string')
values.push(address)
p[0] = None
#NP to elimated the fake symbol
def p_factor_aux(p):
'''
factor_aux : empty
'''
popper.push('(')
p[0] = None
#NP to take out the popper
def p_factor_aux2(p):
'''
factor_aux2 : empty
'''
popper.pop()
p[0] = None
#-------------- error---------------
#ERROR MESSAGE CLASS
class ErrorMsg(Exception):
def __init__(self, message):
self.message = message
#structure to return the error token, value and line
def p_error(p):
if p:
print("Syntax error at token", p.type)
print("Syntax error at '%s'" % p.value)
print("line : '%s'" % p.lineno)
print("column: '%s'" % p.lexpos)
else:
print("Syntax error at EOF")
#--------------------------------------- Error---------------------------------------
#rule to print
def imprimirP(p):
for i in range(len(p)):
if (i != 0):
print(p[i], end=" ")
print()
#function to generate quadruples that pop out the values adn type and generated the quadruple and temporals
def GenerarCuadruploDeOperador(operandos, valores, tipos):
der = valores.pop()
iz = valores.pop()
op = operandos.pop()
tipoDer = tipos.pop()
tipoIzq = tipos.pop()
tipoResultado = cuboSemantico[tipoIzq][tipoDer][op]
if (tipoResultado != 'err'):
GenerarNuevoTemporal(tipoResultado)
result = Temporales[-1]
CrearCuadruplo(op, iz, der, result)
valores.push(result)
else:
raise ErrorMsg('Error en los tipos de la operacion: '
+ iz + ' (' + tipoIzq + ') '
+ op + ' '
+ der + ' (' + tipoDer + ') ')
#created a quadaruple adn add it 1 to the cont of the quadruple
def CrearCuadruplo(op, iz, der, res):
global cont
cont += 1
Cuartetos.append({'op': Operadores.GetNumber(op), 'iz': iz, 'de': der, 'res':res})
#generated a new temporal and assign a memory address to it
def GenerarNuevoTemporal(tipo):
agregarContVarFunciones(tipo,'TEMPORAL')
addressTemporal = memoria.AssignMemoryAddress(tipo,Scope[0],'TEMPORAL')
Temporales.append(addressTemporal)
tipos.push(tipo)
return addressTemporal
#fill update a quadruple res part
def Fill(cuarteto, llenado):
global Cuartetos
Cuartetos[cuarteto]['res'] = llenado
#function to add 1 to the counter of a address
def agregarContVarFunciones(type,location,size=1):
global contVarLocal
if location == 'NORMAL':
if(type == 'int'):
contVarLocal[0] = contVarLocal[0] + size
elif(type == 'float'):
contVarLocal[1] = contVarLocal[1] + size
elif(type == 'bool'):
contVarLocal[2] = contVarLocal[2] + size
elif(type == 'list_int'):
contVarLocal[3] = contVarLocal[3] + size
elif(type == 'list_float'):
contVarLocal[4] = contVarLocal[4] + size
elif(type == 'list_bool'):
contVarLocal[5] = contVarLocal[5] + size
else:
if(type == 'int'):
contVarLocal[6] = contVarLocal[6] + size
elif(type == 'float'):
contVarLocal[7] = contVarLocal[7] + size
elif(type == 'bool'):
contVarLocal[8] = contVarLocal[8] + size
#function to reset the temporal
def resetConVarFunciones():
global contVarLocal
global Temporales
contVarLocal.clear()
contVarLocal = [0]*9
# crear el parser
parser = yacc.yacc()
|
[
"math.ceil",
"asignadorMemoria.AsignadorMemoria",
"stack.Stack",
"ply.yacc.yacc",
"directory.Directory",
"tablaConstantes.TablaConstantes",
"tablaOperaciones.TablaOperaciones"
] |
[((842, 849), 'stack.Stack', 'Stack', ([], {}), '()\n', (847, 849), False, 'from stack import Stack\n'), ((859, 866), 'stack.Stack', 'Stack', ([], {}), '()\n', (864, 866), False, 'from stack import Stack\n'), ((875, 882), 'stack.Stack', 'Stack', ([], {}), '()\n', (880, 882), False, 'from stack import Stack\n'), ((891, 898), 'stack.Stack', 'Stack', ([], {}), '()\n', (896, 898), False, 'from stack import Stack\n'), ((915, 922), 'stack.Stack', 'Stack', ([], {}), '()\n', (920, 922), False, 'from stack import Stack\n'), ((1230, 1255), 'directory.Directory', 'Directory', (['{}', '{}', '{}', '{}'], {}), '({}, {}, {}, {})\n', (1239, 1255), False, 'from directory import Directory\n'), ((1397, 1415), 'asignadorMemoria.AsignadorMemoria', 'AsignadorMemoria', ([], {}), '()\n', (1413, 1415), False, 'from asignadorMemoria import AsignadorMemoria\n'), ((1552, 1569), 'tablaConstantes.TablaConstantes', 'TablaConstantes', ([], {}), '()\n', (1567, 1569), False, 'from tablaConstantes import TablaConstantes\n'), ((1630, 1648), 'tablaOperaciones.TablaOperaciones', 'TablaOperaciones', ([], {}), '()\n', (1646, 1648), False, 'from tablaOperaciones import TablaOperaciones\n'), ((51273, 51284), 'ply.yacc.yacc', 'yacc.yacc', ([], {}), '()\n', (51282, 51284), True, 'import ply.yacc as yacc\n'), ((42896, 42925), 'math.ceil', 'math.ceil', (['(size / (limit + 1))'], {}), '(size / (limit + 1))\n', (42905, 42925), True, 'import math as math\n')]
|
import os
import csv
import sys
import time
import json
import h5py
import pickle as pkl
import logging
import argparse
import random
from collections import OrderedDict
import torch
import numpy as np
from tqdm import tqdm, trange
from nglib.common import utils
def get_arguments(argv):
parser = argparse.ArgumentParser(description='more intrinsic evaluations')
parser.add_argument('config_file', metavar='CONFIG_FILE',
help='ng config file')
parser.add_argument('input_dir', metavar='INPUT_DIR',
help='input dir')
parser.add_argument('prefix', metavar='PREFIX',
help='output file prefix')
parser.add_argument('output_dir', metavar='OUTPUT_DIR',
help='output dir')
parser.add_argument("--n_choices", type=int, default=8,
help="number of choices")
parser.add_argument("--seed", type=int, default=135,
help="random seed for initialization")
parser.add_argument('-v', '--verbose', action='store_true', default=False,
help='show info messages')
parser.add_argument('-d', '--debug', action='store_true', default=False,
help='show debug messages')
args = parser.parse_args(argv)
return args
def set_seed(gpu, seed):
random.seed(seed)
np.random.seed(seed)
torch.manual_seed(seed)
# if gpu != -1:
# torch.cuda.manual_seed_all(seed)
def _get_indices_by_rtypes(ng_edges, rtype_idxs):
n_edges = ng_edges.shape[1]
ret_idxs = []
for i in range(n_edges):
e = tuple(ng_edges[:3, i].astype('int64'))
if e[1] in rtype_idxs:
ret_idxs.append(i)
return ret_idxs
def _get_tail_node_repr_by_eidx(
gid, cand_idx, ng_edges, bert_nid2rows, bert_inputs, bert_target_idxs):
_edge = ng_edges[:, cand_idx] # outputs
_nid = int(_edge[2])
_row = bert_nid2rows[_nid]
_row = _row[_row != -1]
assert _row.shape == (1, ) # for pp links
binputs = bert_inputs[:, _row, :].squeeze()
target_col = bert_target_idxs[_row]
# choice format
ret = {
'gid': gid,
'eidx': cand_idx, # we need this because we might need to remove the edge
'edge': _edge,
'nid': _nid,
'bert_inputs': binputs,
'target_col': target_col
}
return ret
def sample_node_multiple_choice(ng_edges,
bert_inputs, bert_target_idxs, bert_nid2rows,
interested_rel_idxs, fr, gid):
interested_eidxs = _get_indices_by_rtypes(ng_edges, interested_rel_idxs)
if len(interested_eidxs) == 0:
return None
# sample a target node which is the tail node of the selected edge
eidx = interested_eidxs[random.randint(0, len(interested_eidxs)-1)]
answer = _get_tail_node_repr_by_eidx(
gid, eidx, ng_edges, bert_nid2rows, bert_inputs, bert_target_idxs
)
target_e = ng_edges[:3, eidx].astype('int64')
n_nodes = bert_nid2rows.shape[0]
choices = [answer]
gid_pool = [k for k in fr.keys()]
while len(choices) < args.n_choices:
# random a graph
rgid = gid_pool[random.randint(0, len(gid_pool)-1)]
rgid = int(rgid.split('_')[1])
if rgid == gid:
continue
key = 'graph_{}'.format(rgid)
r_binputs = fr[key]['bert_inputs'][:]
r_target_idxs = fr[key]['bert_target_idxs'][:]
r_nid2rows = fr[key]['bert_nid2rows'][:]
r_ng_edges = fr[key]['ng_edges'][:]
# sample a predicate node using the same manner as the answer
interested_eidxs = _get_indices_by_rtypes(r_ng_edges, interested_rel_idxs)
if len(interested_eidxs) == 0:
continue
eidx = interested_eidxs[random.randint(0, len(interested_eidxs)-1)]
c = _get_tail_node_repr_by_eidx(
rgid, eidx, r_ng_edges, r_nid2rows, r_binputs, r_target_idxs
)
choices.append(c)
# shuffle choices
choice_idxs = list(range(args.n_choices))
random.shuffle(choice_idxs)
correct = choice_idxs.index(0)
choices = [choices[cidx] for cidx in choice_idxs]
return (correct, choices)
def sample_node_multiple_choice_v2(
ng_edges,
bert_inputs, bert_target_idxs, bert_nid2rows,
interested_rel_idxs, fr, gid):
interested_eidxs = _get_indices_by_rtypes(ng_edges, interested_rel_idxs)
if len(interested_eidxs) == 0:
return None
pos_edges = set()
related_edges = ng_edges[:3, interested_eidxs].astype('int64')
for i in range(related_edges.shape[1]):
e = tuple(related_edges[:, i].flatten())
pos_edges.add(e)
eidx = interested_eidxs[random.randint(0, len(interested_eidxs)-1)]
new_edges = np.concatenate((ng_edges[:, :eidx], ng_edges[:, eidx+1:]), axis=1)
target_e = tuple(ng_edges[:3, eidx].astype('int64'))
n_nodes = bert_nid2rows.shape[0]
choices = [target_e]
while len(choices) < args.n_choices:
# try in-doc sampling
# random a node
r_nid = random.randint(0, n_nodes-1) # we don't separate entity or predicate
r_e = (target_e[0], target_e[1], r_nid)
if r_e in pos_edges:
continue
choices.append(r_e)
# shuffle choices
choice_idxs = list(range(args.n_choices))
random.shuffle(choice_idxs)
correct = choice_idxs.index(0)
choices = [choices[cidx] for cidx in choice_idxs]
return (new_edges, correct, choices)
def sample_ep_questions(ng_edges, rtype2idx):
# join ep_edges by src node
src_nodes = {}
ep_ridxs = {rtype2idx['s'], rtype2idx['o'], rtype2idx['prep']}
n_edges = ng_edges.shape[1]
all_pos_edges = set()
entity_nids = set()
for i in range(n_edges):
e = tuple(ng_edges[:3, i].astype('int64'))
all_pos_edges.add(e)
if e[1] in ep_ridxs:
if e[0] not in src_nodes:
src_nodes[e[0]] = []
src_nodes[e[0]].append((i, e))
entity_nids.add(int(e[2]))
if len(entity_nids) < args.n_choices:
return None, None
# Task1, random a event node with 3 ep edges, predict edge types
candidate_sources = {src: es for src, es in src_nodes.items() if len(es) >= 3}
if len(candidate_sources) == 0:
return None, None
keys = list(candidate_sources.keys())
src_nid = keys[random.randint(0, len(candidate_sources)-1)]
edges = candidate_sources[src_nid]
q_link = (src_nid, edges) # question
# Task2, random one edge, predict one entity
entity_nid_list = list(entity_nids)
r_eidx, r_edge = edges[random.randint(0, len(edges)-1)]
answer = r_edge[2]
choices = [answer]
while len(choices) < args.n_choices:
r_nid = entity_nid_list[random.randint(0, len(entity_nid_list)-1)]
r_tri = (r_edge[0], r_edge[1], r_nid)
if r_tri in all_pos_edges:
continue
choices.append(r_nid)
idxs = list(range(len(choices)))
random.shuffle(idxs)
correct = idxs.index(0)
choices = [choices[i] for i in idxs]
q_entity = (r_eidx, r_edge, correct, choices) # question
return q_link, q_entity
def main():
config = json.load(open(args.config_file))
assert config["config_target"] == "narrative_graph"
rtype2idx = config['rtype2idx']
if config['no_entity']:
ep_rtype_rev = {}
ent_pred_ridxs = set()
else:
ep_rtype_rev = {rtype2idx[v]: rtype2idx[k] for k, v in
config['entity_predicate_rtypes'].items()}
ent_pred_ridxs = set(ep_rtype_rev.keys())
n_rtypes = len(rtype2idx)
pred_pred_ridxs = set(range(n_rtypes)) - ent_pred_ridxs
disc_pred_pred_ridxs = pred_pred_ridxs - {rtype2idx['next'], rtype2idx['cnext']}
t2 = time.time()
q_counts = {
'pp_coref_next': 0,
'pp_next': 0,
'pp_discourse_next': {},
'ep_link': 0,
'ep_entity': {}
}
count_gids = 0
fs = sorted([f for f in os.listdir(args.input_dir) if f.endswith('.h5')])
for f in fs:
fpath = os.path.join(args.input_dir, f)
logger.info('processing {}...'.format(fpath))
fr = h5py.File(fpath, 'r')
questions = OrderedDict()
for gn in tqdm(fr.keys()):
questions[gn] = {}
gid = int(gn.split('_')[-1])
bert_inputs = fr[gn]['bert_inputs'][:]
bert_target_idxs = fr[gn]['bert_target_idxs'][:]
bert_nid2rows = fr[gn]['bert_nid2rows'][:]
ng_edges = fr[gn]['ng_edges'][:]
n_nodes = bert_nid2rows.shape[0]
# # sample PP_COREF_NEXT task
q = sample_node_multiple_choice_v2(
ng_edges, bert_inputs, bert_target_idxs, bert_nid2rows,
{rtype2idx['cnext']}, fr, gid)
questions[gn]['pp_coref_next'] = q
if q is not None:
q_counts['pp_coref_next'] += 1
# sample PP_NEXT task
q = sample_node_multiple_choice_v2(
ng_edges, bert_inputs, bert_target_idxs, bert_nid2rows,
{rtype2idx['next']}, fr, gid)
questions[gn]['pp_next'] = q
if q is not None:
q_counts['pp_next'] += 1
# sample PP_DISCOURSE_NEXT task
q = sample_node_multiple_choice_v2(
ng_edges, bert_inputs, bert_target_idxs, bert_nid2rows,
disc_pred_pred_ridxs, fr, gid)
questions[gn]['pp_discourse_next'] = q
if q is not None: # count by rtypes
ans = q[2][q[1]]
rtype = ans[1]
# ans = q[1][q[0]]
# rtype = int(ans['edge'][1])
if rtype not in q_counts['pp_discourse_next']:
q_counts['pp_discourse_next'][rtype] = 0
q_counts['pp_discourse_next'][rtype] += 1
# sample PP_DISCOURSE_LINK_TYPE task
# reuse the above links
# sample PP_DISCOURSE_TRIPLET task
# evaluate on the sampled test set
if not config['no_entity']:
# sample EP_LINK_TYPE
q_link, q_entity = sample_ep_questions(ng_edges, rtype2idx)
questions[gn]['ep_link'] = q_link
if q_link is not None:
q_counts['ep_link'] += 1
# # sample EP_NODE task
questions[gn]['ep_entity'] = q_entity
if q_entity is not None:
rtype = int(q_entity[1][1])
if rtype not in q_counts['ep_entity']:
q_counts['ep_entity'][rtype] = 0
q_counts['ep_entity'][rtype] += 1
count_gids += 1
fr.close()
# dump questions for a file
fn = '.'.join(f.split('.')[:-1])
fpath = os.path.join(args.output_dir, 'q_{}.pkl'.format(fn))
logger.info('dumping {}...'.format(fpath))
pkl.dump(questions, open(fpath, 'wb'))
logger.info('#graphs = {}'.format(count_gids))
logger.info('q_counts = {}'.format(q_counts))
if __name__ == "__main__":
args = utils.bin_config(get_arguments)
logger = utils.get_root_logger(args)
main()
|
[
"nglib.common.utils.get_root_logger",
"nglib.common.utils.bin_config",
"h5py.File",
"numpy.random.seed",
"argparse.ArgumentParser",
"random.randint",
"torch.manual_seed",
"random.shuffle",
"time.time",
"random.seed",
"collections.OrderedDict",
"os.path.join",
"os.listdir",
"numpy.concatenate"
] |
[((305, 370), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""more intrinsic evaluations"""'}), "(description='more intrinsic evaluations')\n", (328, 370), False, 'import argparse\n'), ((1360, 1377), 'random.seed', 'random.seed', (['seed'], {}), '(seed)\n', (1371, 1377), False, 'import random\n'), ((1382, 1402), 'numpy.random.seed', 'np.random.seed', (['seed'], {}), '(seed)\n', (1396, 1402), True, 'import numpy as np\n'), ((1407, 1430), 'torch.manual_seed', 'torch.manual_seed', (['seed'], {}), '(seed)\n', (1424, 1430), False, 'import torch\n'), ((4091, 4118), 'random.shuffle', 'random.shuffle', (['choice_idxs'], {}), '(choice_idxs)\n', (4105, 4118), False, 'import random\n'), ((4818, 4886), 'numpy.concatenate', 'np.concatenate', (['(ng_edges[:, :eidx], ng_edges[:, eidx + 1:])'], {'axis': '(1)'}), '((ng_edges[:, :eidx], ng_edges[:, eidx + 1:]), axis=1)\n', (4832, 4886), True, 'import numpy as np\n'), ((5385, 5412), 'random.shuffle', 'random.shuffle', (['choice_idxs'], {}), '(choice_idxs)\n', (5399, 5412), False, 'import random\n'), ((7045, 7065), 'random.shuffle', 'random.shuffle', (['idxs'], {}), '(idxs)\n', (7059, 7065), False, 'import random\n'), ((7844, 7855), 'time.time', 'time.time', ([], {}), '()\n', (7853, 7855), False, 'import time\n'), ((11230, 11261), 'nglib.common.utils.bin_config', 'utils.bin_config', (['get_arguments'], {}), '(get_arguments)\n', (11246, 11261), False, 'from nglib.common import utils\n'), ((11275, 11302), 'nglib.common.utils.get_root_logger', 'utils.get_root_logger', (['args'], {}), '(args)\n', (11296, 11302), False, 'from nglib.common import utils\n'), ((5117, 5147), 'random.randint', 'random.randint', (['(0)', '(n_nodes - 1)'], {}), '(0, n_nodes - 1)\n', (5131, 5147), False, 'import random\n'), ((8138, 8169), 'os.path.join', 'os.path.join', (['args.input_dir', 'f'], {}), '(args.input_dir, f)\n', (8150, 8169), False, 'import os\n'), ((8238, 8259), 'h5py.File', 'h5py.File', (['fpath', '"""r"""'], {}), "(fpath, 'r')\n", (8247, 8259), False, 'import h5py\n'), ((8280, 8293), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (8291, 8293), False, 'from collections import OrderedDict\n'), ((8055, 8081), 'os.listdir', 'os.listdir', (['args.input_dir'], {}), '(args.input_dir)\n', (8065, 8081), False, 'import os\n')]
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Detection model evaluator.
This file provides a generic evaluation method that can be used to evaluate a
DetectionModel.
"""
import tensorflow as tf
from collections import namedtuple
from module import *
import re
TOWER_NAME = 'tower'
def activation_summary(x):
"""Helper to create summaries for activations.
Creates a summary that provides a histogram of activations.
Creates a summary that measures the sparsity of activations.
Args:
x: Tensor
Returns:
nothing
"""
# Remove 'tower_[0-9]/' from the name in case this is a multi-GPU training
# session. This helps the clarity of presentation on tensorboard.
tensor_name = re.sub('%s_[0-9]*/' % TOWER_NAME, '', x.op.name)
tf.summary.histogram(tensor_name + '/activations', x)
tf.summary.scalar(tensor_name + '/sparsity',
tf.nn.zero_fraction(x))
def gated_denoise(images, FLAGS, is_training=False, noisy_batch_size=1):
# Discriminator Network
# We are just mixing the two images
losses_dict = {}
summaries_dict = {}
OPTIONS = namedtuple('OPTIONS', 'gf_dim df_dim output_c_dim is_training')
options = OPTIONS._make((FLAGS.ngf, FLAGS.ndf, 3, is_training))
criterionGAN = mae_criterion
criterionGAN2 = sce_criterion
if is_training:
noisy_images = images[:noisy_batch_size]
original_images = images[noisy_batch_size:]
# Filter
summaries_dict['prefiltered_noisy_images'] = tf.summary.image('prefiltered_noisy_images', images, max_outputs=1)
if is_training:
summaries_dict['prefiltered_clean_images'] = tf.summary.image('prefiltered_clean_images', images[noisy_batch_size:], max_outputs=1)
filtered_images = images
if FLAGS.average_filter:
print('average filter is applied')
if FLAGS.mixture_of_filters:
with tf.variable_scope('filter_gate') as scope:
print('Building mixture of filters model')
filtered_images0 = average_filter(images, 2)
filtered_images1 = average_filter(images, 3)
filtered_images2 = average_filter(images, 4)
probs = gate(images, FLAGS.ndf, num_classes=4, reuse=False, name='gate')
activation_summary(probs)
filtered_images = tf.add_n([probs[:,0,None,None,None]*images,
probs[:,1,None,None,None]*filtered_images0,
probs[:,2,None,None,None]*filtered_images1,
probs[:,3,None,None,None]*filtered_images2])
else:
filtered_images = average_filter(images, FLAGS.filter_size)
summaries_dict['filtered_noisy_images'] = tf.summary.image('filtered_noisy_images', filtered_images, max_outputs=1)
if is_training:
summaries_dict['filtered_clean_images'] = tf.summary.image('filtered_clean_images', filtered_images[noisy_batch_size:], max_outputs=1)
if FLAGS.denoise:
print('denoise is applied')
with tf.variable_scope('denoise') as scope:
if FLAGS.generator_separate_channel:
denoised_images = generator_separate_resnet(filtered_images, options, res_depth=FLAGS.res_depth, reuse=False, name='generator')
else:
denoised_images = generator_resnet(filtered_images, options, res_depth=FLAGS.res_depth, output_c_dim=3, reuse=False, name='generator')
if is_training:
denoise_sim_loss = abs_criterion(denoised_images[:noisy_batch_size], original_images, name='g_loss/sim_loss')
g_loss = FLAGS.denoise_loss_factor * denoise_sim_loss
losses_dict['g_loss'] = g_loss
summaries_dict['g_loss/sim_loss'] = tf.summary.scalar('g_loss/sim_loss', denoise_sim_loss)
if FLAGS.denoise_discrim:
noisy_and_denoised = tf.concat([noisy_images, denoised_images], 3)
d_denoised = discriminator(noisy_and_denoised, FLAGS.ndf, reuse=False, name='discriminator')
if is_training:
noisy_and_original = tf.concat([noisy_images, original_images], 3)
d_original = discriminator(noisy_and_original, FLAGS.ndf, reuse=True, name='discriminator')
# generator loss
g_gan_loss = criterionGAN(d_denoised, tf.ones_like(d_denoised))
summaries_dict['g_gan_loss'] = tf.summary.scalar('g_loss/g_gan_loss', g_gan_loss)
g_loss += FLAGS.denoise_gan_loss_factor * g_gan_loss
losses_dict['g_loss'] = g_loss
# discriminator loss
d_loss_real = criterionGAN(d_original, tf.ones_like(d_original))
d_loss_fake = criterionGAN(d_denoised, tf.zeros_like(d_denoised))
d_loss = FLAGS.denoise_gan_loss_factor * (d_loss_real + d_loss_fake) / 2
losses_dict['d_loss'] = d_loss
summaries_dict['d_loss'] = tf.summary.scalar('d_loss', d_loss)
if is_training:
summaries_dict['g_loss'] = tf.summary.scalar('g_loss', g_loss)
filtered_images = denoised_images
summaries_dict['filtered_noisy_images'] = tf.summary.image('filtered_noisy_images', filtered_images, max_outputs=1)
if is_training:
summaries_dict['filtered_clean_images'] = tf.summary.image('filtered_clean_images', filtered_images[noisy_batch_size:], max_outputs=1)
# Gate Operation
if FLAGS.discrim:
print('gate network is applied')
with tf.variable_scope('input_discrim') as scope:
d_in_logits = discriminator(images, FLAGS.ndf, reuse=False, name='discriminator')
if is_training:
d_in_loss_real = criterionGAN2(d_in_logits[noisy_batch_size:], tf.ones_like(d_in_logits[noisy_batch_size:]))
d_in_loss_fake = criterionGAN2(d_in_logits[:noisy_batch_size], tf.zeros_like(d_in_logits[:noisy_batch_size]))
d_in_loss = FLAGS.discrim_loss_factor * (d_in_loss_real + d_in_loss_fake) / 2
losses_dict['d_in_loss'] = d_in_loss
summaries_dict['d_in_loss'] = tf.summary.scalar('d_in_loss', d_in_loss)
#tf.add_to_collection('losses', losses_dict['d_in_loss'])
d_in_logits = tf.reduce_mean(d_in_logits, [1, 2, 3])
d_in_sigmoid = tf.nn.sigmoid(d_in_logits, name='is_clean')
activation_summary(d_in_sigmoid)
images = tf.add(d_in_sigmoid[:,None,None,None] * images,
(1 - d_in_sigmoid[:,None,None,None]) * filtered_images)
else:
images = filtered_images
summaries_dict['preprocessed_noisy_images'] = tf.summary.image('preprocessed_noisy_images', images, max_outputs=1)
if is_training:
summaries_dict['preprocessed_clean_images'] = tf.summary.image('preprocessed_clean_images', images[noisy_batch_size:], max_outputs=1)
return images, losses_dict, summaries_dict
|
[
"tensorflow.summary.image",
"tensorflow.nn.zero_fraction",
"tensorflow.add_n",
"tensorflow.summary.scalar",
"tensorflow.reduce_mean",
"tensorflow.variable_scope",
"tensorflow.add",
"tensorflow.concat",
"tensorflow.ones_like",
"tensorflow.zeros_like",
"tensorflow.summary.histogram",
"collections.namedtuple",
"tensorflow.nn.sigmoid",
"re.sub"
] |
[((1347, 1395), 're.sub', 're.sub', (["('%s_[0-9]*/' % TOWER_NAME)", '""""""', 'x.op.name'], {}), "('%s_[0-9]*/' % TOWER_NAME, '', x.op.name)\n", (1353, 1395), False, 'import re\n'), ((1398, 1451), 'tensorflow.summary.histogram', 'tf.summary.histogram', (["(tensor_name + '/activations')", 'x'], {}), "(tensor_name + '/activations', x)\n", (1418, 1451), True, 'import tensorflow as tf\n'), ((1754, 1817), 'collections.namedtuple', 'namedtuple', (['"""OPTIONS"""', '"""gf_dim df_dim output_c_dim is_training"""'], {}), "('OPTIONS', 'gf_dim df_dim output_c_dim is_training')\n", (1764, 1817), False, 'from collections import namedtuple\n'), ((2120, 2187), 'tensorflow.summary.image', 'tf.summary.image', (['"""prefiltered_noisy_images"""', 'images'], {'max_outputs': '(1)'}), "('prefiltered_noisy_images', images, max_outputs=1)\n", (2136, 2187), True, 'import tensorflow as tf\n'), ((6955, 7023), 'tensorflow.summary.image', 'tf.summary.image', (['"""preprocessed_noisy_images"""', 'images'], {'max_outputs': '(1)'}), "('preprocessed_noisy_images', images, max_outputs=1)\n", (6971, 7023), True, 'import tensorflow as tf\n'), ((1538, 1560), 'tensorflow.nn.zero_fraction', 'tf.nn.zero_fraction', (['x'], {}), '(x)\n', (1557, 1560), True, 'import tensorflow as tf\n'), ((2255, 2345), 'tensorflow.summary.image', 'tf.summary.image', (['"""prefiltered_clean_images"""', 'images[noisy_batch_size:]'], {'max_outputs': '(1)'}), "('prefiltered_clean_images', images[noisy_batch_size:],\n max_outputs=1)\n", (2271, 2345), True, 'import tensorflow as tf\n'), ((3281, 3354), 'tensorflow.summary.image', 'tf.summary.image', (['"""filtered_noisy_images"""', 'filtered_images'], {'max_outputs': '(1)'}), "('filtered_noisy_images', filtered_images, max_outputs=1)\n", (3297, 3354), True, 'import tensorflow as tf\n'), ((5568, 5641), 'tensorflow.summary.image', 'tf.summary.image', (['"""filtered_noisy_images"""', 'filtered_images'], {'max_outputs': '(1)'}), "('filtered_noisy_images', filtered_images, max_outputs=1)\n", (5584, 5641), True, 'import tensorflow as tf\n'), ((7092, 7183), 'tensorflow.summary.image', 'tf.summary.image', (['"""preprocessed_clean_images"""', 'images[noisy_batch_size:]'], {'max_outputs': '(1)'}), "('preprocessed_clean_images', images[noisy_batch_size:],\n max_outputs=1)\n", (7108, 7183), True, 'import tensorflow as tf\n'), ((3423, 3520), 'tensorflow.summary.image', 'tf.summary.image', (['"""filtered_clean_images"""', 'filtered_images[noisy_batch_size:]'], {'max_outputs': '(1)'}), "('filtered_clean_images', filtered_images[noisy_batch_size:\n ], max_outputs=1)\n", (3439, 3520), True, 'import tensorflow as tf\n'), ((3578, 3606), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""denoise"""'], {}), "('denoise')\n", (3595, 3606), True, 'import tensorflow as tf\n'), ((5710, 5807), 'tensorflow.summary.image', 'tf.summary.image', (['"""filtered_clean_images"""', 'filtered_images[noisy_batch_size:]'], {'max_outputs': '(1)'}), "('filtered_clean_images', filtered_images[noisy_batch_size:\n ], max_outputs=1)\n", (5726, 5807), True, 'import tensorflow as tf\n'), ((5889, 5923), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""input_discrim"""'], {}), "('input_discrim')\n", (5906, 5923), True, 'import tensorflow as tf\n'), ((6583, 6621), 'tensorflow.reduce_mean', 'tf.reduce_mean', (['d_in_logits', '[1, 2, 3]'], {}), '(d_in_logits, [1, 2, 3])\n', (6597, 6621), True, 'import tensorflow as tf\n'), ((6643, 6686), 'tensorflow.nn.sigmoid', 'tf.nn.sigmoid', (['d_in_logits'], {'name': '"""is_clean"""'}), "(d_in_logits, name='is_clean')\n", (6656, 6686), True, 'import tensorflow as tf\n'), ((6742, 6855), 'tensorflow.add', 'tf.add', (['(d_in_sigmoid[:, None, None, None] * images)', '((1 - d_in_sigmoid[:, None, None, None]) * filtered_images)'], {}), '(d_in_sigmoid[:, None, None, None] * images, (1 - d_in_sigmoid[:,\n None, None, None]) * filtered_images)\n', (6748, 6855), True, 'import tensorflow as tf\n'), ((2480, 2512), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""filter_gate"""'], {}), "('filter_gate')\n", (2497, 2512), True, 'import tensorflow as tf\n'), ((2874, 3082), 'tensorflow.add_n', 'tf.add_n', (['[probs[:, 0, None, None, None] * images, probs[:, 1, None, None, None] *\n filtered_images0, probs[:, 2, None, None, None] * filtered_images1, \n probs[:, 3, None, None, None] * filtered_images2]'], {}), '([probs[:, 0, None, None, None] * images, probs[:, 1, None, None,\n None] * filtered_images0, probs[:, 2, None, None, None] *\n filtered_images1, probs[:, 3, None, None, None] * filtered_images2])\n', (2882, 3082), True, 'import tensorflow as tf\n'), ((4238, 4292), 'tensorflow.summary.scalar', 'tf.summary.scalar', (['"""g_loss/sim_loss"""', 'denoise_sim_loss'], {}), "('g_loss/sim_loss', denoise_sim_loss)\n", (4255, 4292), True, 'import tensorflow as tf\n'), ((4354, 4399), 'tensorflow.concat', 'tf.concat', (['[noisy_images, denoised_images]', '(3)'], {}), '([noisy_images, denoised_images], 3)\n', (4363, 4399), True, 'import tensorflow as tf\n'), ((5445, 5480), 'tensorflow.summary.scalar', 'tf.summary.scalar', (['"""g_loss"""', 'g_loss'], {}), "('g_loss', g_loss)\n", (5462, 5480), True, 'import tensorflow as tf\n'), ((6452, 6493), 'tensorflow.summary.scalar', 'tf.summary.scalar', (['"""d_in_loss"""', 'd_in_loss'], {}), "('d_in_loss', d_in_loss)\n", (6469, 6493), True, 'import tensorflow as tf\n'), ((4556, 4601), 'tensorflow.concat', 'tf.concat', (['[noisy_images, original_images]', '(3)'], {}), '([noisy_images, original_images], 3)\n', (4565, 4601), True, 'import tensorflow as tf\n'), ((4848, 4898), 'tensorflow.summary.scalar', 'tf.summary.scalar', (['"""g_loss/g_gan_loss"""', 'g_gan_loss'], {}), "('g_loss/g_gan_loss', g_gan_loss)\n", (4865, 4898), True, 'import tensorflow as tf\n'), ((5351, 5386), 'tensorflow.summary.scalar', 'tf.summary.scalar', (['"""d_loss"""', 'd_loss'], {}), "('d_loss', d_loss)\n", (5368, 5386), True, 'import tensorflow as tf\n'), ((6117, 6161), 'tensorflow.ones_like', 'tf.ones_like', (['d_in_logits[noisy_batch_size:]'], {}), '(d_in_logits[noisy_batch_size:])\n', (6129, 6161), True, 'import tensorflow as tf\n'), ((6234, 6279), 'tensorflow.zeros_like', 'tf.zeros_like', (['d_in_logits[:noisy_batch_size]'], {}), '(d_in_logits[:noisy_batch_size])\n', (6247, 6279), True, 'import tensorflow as tf\n'), ((4781, 4805), 'tensorflow.ones_like', 'tf.ones_like', (['d_denoised'], {}), '(d_denoised)\n', (4793, 4805), True, 'import tensorflow as tf\n'), ((5085, 5109), 'tensorflow.ones_like', 'tf.ones_like', (['d_original'], {}), '(d_original)\n', (5097, 5109), True, 'import tensorflow as tf\n'), ((5160, 5185), 'tensorflow.zeros_like', 'tf.zeros_like', (['d_denoised'], {}), '(d_denoised)\n', (5173, 5185), True, 'import tensorflow as tf\n')]
|
import unicodedata
def filter_accents(text):
"""Return a sequence of accented characters found in
the passed in lowercased text string
"""
# decomposition return base char + added symbol or ''
# you could also use unicodedata.normalize
return {char for char in text.lower() if unicodedata.decomposition(char)}
|
[
"unicodedata.decomposition"
] |
[((306, 337), 'unicodedata.decomposition', 'unicodedata.decomposition', (['char'], {}), '(char)\n', (331, 337), False, 'import unicodedata\n')]
|
# RUN: %PYTHON %s
import numpy as np
from shark.shark_importer import SharkImporter
import pytest
model_path = "https://tfhub.dev/tensorflow/lite-model/albert_lite_base/squadv1/1?lite-format=tflite"
# Inputs modified to be useful albert inputs.
def generate_inputs(input_details):
for input in input_details:
print(str(input["shape"]), input["dtype"].__name__)
args = []
args.append(
np.random.randint(
low=0,
high=256,
size=input_details[0]["shape"],
dtype=input_details[0]["dtype"],
)
)
args.append(
np.ones(
shape=input_details[1]["shape"], dtype=input_details[1]["dtype"]
)
)
args.append(
np.zeros(
shape=input_details[2]["shape"], dtype=input_details[2]["dtype"]
)
)
return args
if __name__ == "__main__":
my_shark_importer = SharkImporter(
model_path=model_path,
model_type="tflite",
model_source_hub="tfhub",
device="cpu",
dynamic=False,
jit_trace=True,
)
# Case1: Use default inputs
my_shark_importer.compile()
shark_results = my_shark_importer.forward()
# Case2: Use manually set inputs
input_details, output_details = my_shark_importer.get_model_details()
inputs = generate_inputs(input_details) # device_inputs
my_shark_importer.compile(inputs)
shark_results = my_shark_importer.forward(inputs)
# print(shark_results)
|
[
"numpy.random.randint",
"numpy.zeros",
"numpy.ones",
"shark.shark_importer.SharkImporter"
] |
[((905, 1038), 'shark.shark_importer.SharkImporter', 'SharkImporter', ([], {'model_path': 'model_path', 'model_type': '"""tflite"""', 'model_source_hub': '"""tfhub"""', 'device': '"""cpu"""', 'dynamic': '(False)', 'jit_trace': '(True)'}), "(model_path=model_path, model_type='tflite', model_source_hub=\n 'tfhub', device='cpu', dynamic=False, jit_trace=True)\n", (918, 1038), False, 'from shark.shark_importer import SharkImporter\n'), ((416, 520), 'numpy.random.randint', 'np.random.randint', ([], {'low': '(0)', 'high': '(256)', 'size': "input_details[0]['shape']", 'dtype': "input_details[0]['dtype']"}), "(low=0, high=256, size=input_details[0]['shape'], dtype=\n input_details[0]['dtype'])\n", (433, 520), True, 'import numpy as np\n'), ((606, 679), 'numpy.ones', 'np.ones', ([], {'shape': "input_details[1]['shape']", 'dtype': "input_details[1]['dtype']"}), "(shape=input_details[1]['shape'], dtype=input_details[1]['dtype'])\n", (613, 679), True, 'import numpy as np\n'), ((733, 807), 'numpy.zeros', 'np.zeros', ([], {'shape': "input_details[2]['shape']", 'dtype': "input_details[2]['dtype']"}), "(shape=input_details[2]['shape'], dtype=input_details[2]['dtype'])\n", (741, 807), True, 'import numpy as np\n')]
|
from typing import List, Tuple, Optional
import numpy as np
import os
import torch
from torch import nn
from environments.environment_abstract import Environment, State
from collections import OrderedDict
import re
from random import shuffle
from torch import Tensor
import torch.optim as optim
from torch.optim.optimizer import Optimizer
from torch.multiprocessing import Queue, get_context
import time
# training
def states_nnet_to_pytorch_input(states_nnet: List[np.ndarray], device) -> List[Tensor]:
states_nnet_tensors = []
for tensor_np in states_nnet:
tensor = torch.tensor(tensor_np, device=device)
states_nnet_tensors.append(tensor)
return states_nnet_tensors
def make_batches(states_nnet: List[np.ndarray], outputs: np.ndarray,
batch_size: int) -> List[Tuple[List[np.ndarray], np.ndarray]]:
num_examples = outputs.shape[0]
rand_idxs = np.random.choice(num_examples, num_examples, replace=False)
outputs = outputs.astype(np.float32)
start_idx = 0
batches = []
while (start_idx + batch_size) <= num_examples:
end_idx = start_idx + batch_size
idxs = rand_idxs[start_idx:end_idx]
inputs_batch = [x[idxs] for x in states_nnet]
outputs_batch = outputs[idxs]
batches.append((inputs_batch, outputs_batch))
start_idx = end_idx
return batches
def train_nnet(nnet: nn.Module, states_nnet: List[np.ndarray], outputs: np.ndarray, device: torch.device,
batch_size: int, num_itrs: int, train_itr: int, lr: float, lr_d: float, display: bool = True) -> float:
# optimization
display_itrs = 100
criterion = nn.MSELoss()
optimizer: Optimizer = optim.Adam(nnet.parameters(), lr=lr)
# initialize status tracking
start_time = time.time()
# train network
batches: List[Tuple[List, np.ndarray]] = make_batches(states_nnet, outputs, batch_size)
nnet.train()
max_itrs: int = train_itr + num_itrs
last_loss: float = np.inf
batch_idx: int = 0
while train_itr < max_itrs:
# zero the parameter gradients
optimizer.zero_grad()
lr_itr: float = lr * (lr_d ** train_itr)
for param_group in optimizer.param_groups:
param_group['lr'] = lr_itr
# get data
inputs_batch, targets_batch_np = batches[batch_idx]
targets_batch_np = targets_batch_np.astype(np.float32)
# send data to device
states_batch: List[Tensor] = states_nnet_to_pytorch_input(inputs_batch, device)
targets_batch: Tensor = torch.tensor(targets_batch_np, device=device)
# forward
nnet_outputs_batch: Tensor = nnet(*states_batch)
# cost
nnet_cost_to_go = nnet_outputs_batch[:, 0]
target_cost_to_go = targets_batch[:, 0]
loss = criterion(nnet_cost_to_go, target_cost_to_go)
# backwards
loss.backward()
# step
optimizer.step()
last_loss = loss.item()
# display progress
if (train_itr % display_itrs == 0) and display:
print("Itr: %i, lr: %.2E, loss: %.2f, targ_ctg: %.2f, nnet_ctg: %.2f, "
"Time: %.2f" % (
train_itr, lr_itr, loss.item(), target_cost_to_go.mean().item(), nnet_cost_to_go.mean().item(),
time.time() - start_time))
start_time = time.time()
train_itr = train_itr + 1
batch_idx += 1
if batch_idx >= len(batches):
shuffle(batches)
batch_idx = 0
return last_loss
# pytorch device
def get_device() -> Tuple[torch.device, List[int], bool]:
device: torch.device = torch.device("cpu")
devices: List[int] = get_available_gpu_nums()
on_gpu: bool = False
if devices and torch.cuda.is_available():
device = torch.device("cuda:%i" % 0)
on_gpu = True
return device, devices, on_gpu
# loading nnet
def load_nnet(model_file: str, nnet: nn.Module, device: torch.device = None) -> nn.Module:
# get state dict
if device is None:
state_dict = torch.load(model_file)
else:
state_dict = torch.load(model_file, map_location=device)
# remove module prefix
new_state_dict = OrderedDict()
for k, v in state_dict.items():
k = re.sub('^module\.', '', k)
new_state_dict[k] = v
# set state dict
nnet.load_state_dict(new_state_dict)
nnet.eval()
return nnet
# heuristic
def get_heuristic_fn(nnet: nn.Module, device: torch.device, env: Environment, clip_zero: bool = False,
batch_size: Optional[int] = None):
nnet.eval()
def heuristic_fn(states: List, is_nnet_format: bool = False) -> np.ndarray:
cost_to_go: np.ndarray = np.zeros(0)
if not is_nnet_format:
num_states: int = len(states)
else:
num_states: int = states[0].shape[0]
batch_size_inst: int = num_states
if batch_size is not None:
batch_size_inst = batch_size
start_idx: int = 0
while start_idx < num_states:
# get batch
end_idx: int = min(start_idx + batch_size_inst, num_states)
# convert to nnet input
if not is_nnet_format:
states_batch: List = states[start_idx:end_idx]
states_nnet_batch: List[np.ndarray] = env.state_to_nnet_input(states_batch)
else:
states_nnet_batch = [x[start_idx:end_idx] for x in states]
# get nnet output
states_nnet_batch_tensors = states_nnet_to_pytorch_input(states_nnet_batch, device)
cost_to_go_batch: np.ndarray = nnet(*states_nnet_batch_tensors).cpu().data.numpy()
cost_to_go: np.ndarray = np.concatenate((cost_to_go, cost_to_go_batch[:, 0]), axis=0)
start_idx: int = end_idx
assert (cost_to_go.shape[0] == num_states)
if clip_zero:
cost_to_go = np.maximum(cost_to_go, 0.0)
return cost_to_go
return heuristic_fn
def get_available_gpu_nums() -> List[int]:
devices: Optional[str] = os.environ.get('CUDA_VISIBLE_DEVICES')
return [int(x) for x in devices.split(',')] if devices else []
def load_heuristic_fn(nnet_dir: str, device: torch.device, on_gpu: bool, nnet: nn.Module, env: Environment,
clip_zero: bool = False, gpu_num: int = -1, batch_size: Optional[int] = None):
if (gpu_num >= 0) and on_gpu:
os.environ['CUDA_VISIBLE_DEVICES'] = str(gpu_num)
model_file = "%s/model_state_dict.pt" % nnet_dir
nnet = load_nnet(model_file, nnet, device=device)
nnet.eval()
nnet.to(device)
if on_gpu:
nnet = nn.DataParallel(nnet)
heuristic_fn = get_heuristic_fn(nnet, device, env, clip_zero=clip_zero, batch_size=batch_size)
return heuristic_fn
def heuristic_fn_par(states: List[State], env: Environment, heur_fn_i_q, heur_fn_o_qs):
num_parallel: int = len(heur_fn_o_qs)
# Write data
states_nnet: List[np.ndarray] = env.state_to_nnet_input(states)
parallel_nums = range(min(num_parallel, len(states)))
split_idxs = np.array_split(np.arange(len(states)), len(parallel_nums))
for idx in parallel_nums:
states_nnet_idx = [x[split_idxs[idx]] for x in states_nnet]
heur_fn_i_q.put((idx, states_nnet_idx))
# Check until all data is obtaied
results = [None]*len(parallel_nums)
for idx in parallel_nums:
results[idx] = heur_fn_o_qs[idx].get()
results = np.concatenate(results, axis=0)
return results
# parallel training
def heuristic_fn_queue(heuristic_fn_input_queue, heuristic_fn_output_queue, proc_id, env: Environment):
def heuristic_fn(states):
states_nnet = env.state_to_nnet_input(states)
heuristic_fn_input_queue.put((proc_id, states_nnet))
heuristics = heuristic_fn_output_queue.get()
return heuristics
return heuristic_fn
def heuristic_fn_runner(heuristic_fn_input_queue: Queue, heuristic_fn_output_queues, nnet_dir: str,
device, on_gpu: bool, gpu_num: int, env: Environment, all_zeros: bool,
clip_zero: bool, batch_size: Optional[int]):
heuristic_fn = None
if not all_zeros:
heuristic_fn = load_heuristic_fn(nnet_dir, device, on_gpu, env.get_nnet_model(), env, gpu_num=gpu_num,
clip_zero=clip_zero, batch_size=batch_size)
while True:
proc_id, states_nnet = heuristic_fn_input_queue.get()
if proc_id is None:
break
if all_zeros:
heuristics = np.zeros(states_nnet[0].shape[0], dtype=np.float)
else:
heuristics = heuristic_fn(states_nnet, is_nnet_format=True)
heuristic_fn_output_queues[proc_id].put(heuristics)
return heuristic_fn
def start_heur_fn_runners(num_procs: int, nnet_dir: str, device, on_gpu: bool, env: Environment,
all_zeros: bool = False, clip_zero: bool = False, batch_size: Optional[int] = None):
ctx = get_context("spawn")
heuristic_fn_input_queue: ctx.Queue = ctx.Queue()
heuristic_fn_output_queues: List[ctx.Queue] = []
for _ in range(num_procs):
heuristic_fn_output_queue: ctx.Queue = ctx.Queue(1)
heuristic_fn_output_queues.append(heuristic_fn_output_queue)
# initialize heuristic procs
gpu_nums = get_available_gpu_nums() or [-1]
heur_procs: List[ctx.Process] = []
for gpu_num in gpu_nums:
heur_proc = ctx.Process(target=heuristic_fn_runner,
args=(heuristic_fn_input_queue, heuristic_fn_output_queues,
nnet_dir, device, on_gpu, gpu_num, env, all_zeros, clip_zero, batch_size))
heur_proc.daemon = True
heur_proc.start()
heur_procs.append(heur_proc)
return heuristic_fn_input_queue, heuristic_fn_output_queues, heur_procs
def stop_heuristic_fn_runners(heur_procs, heuristic_fn_input_queue):
for _ in heur_procs:
heuristic_fn_input_queue.put((None, None))
for heur_proc in heur_procs:
heur_proc.join()
|
[
"numpy.random.choice",
"torch.nn.MSELoss",
"numpy.maximum",
"random.shuffle",
"torch.load",
"torch.multiprocessing.get_context",
"numpy.zeros",
"torch.nn.DataParallel",
"time.time",
"os.environ.get",
"torch.cuda.is_available",
"torch.device",
"collections.OrderedDict",
"torch.tensor",
"re.sub",
"numpy.concatenate"
] |
[((904, 963), 'numpy.random.choice', 'np.random.choice', (['num_examples', 'num_examples'], {'replace': '(False)'}), '(num_examples, num_examples, replace=False)\n', (920, 963), True, 'import numpy as np\n'), ((1661, 1673), 'torch.nn.MSELoss', 'nn.MSELoss', ([], {}), '()\n', (1671, 1673), False, 'from torch import nn\n'), ((1789, 1800), 'time.time', 'time.time', ([], {}), '()\n', (1798, 1800), False, 'import time\n'), ((3664, 3683), 'torch.device', 'torch.device', (['"""cpu"""'], {}), "('cpu')\n", (3676, 3683), False, 'import torch\n'), ((4228, 4241), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (4239, 4241), False, 'from collections import OrderedDict\n'), ((6109, 6147), 'os.environ.get', 'os.environ.get', (['"""CUDA_VISIBLE_DEVICES"""'], {}), "('CUDA_VISIBLE_DEVICES')\n", (6123, 6147), False, 'import os\n'), ((7510, 7541), 'numpy.concatenate', 'np.concatenate', (['results'], {'axis': '(0)'}), '(results, axis=0)\n', (7524, 7541), True, 'import numpy as np\n'), ((9061, 9081), 'torch.multiprocessing.get_context', 'get_context', (['"""spawn"""'], {}), "('spawn')\n", (9072, 9081), False, 'from torch.multiprocessing import Queue, get_context\n'), ((586, 624), 'torch.tensor', 'torch.tensor', (['tensor_np'], {'device': 'device'}), '(tensor_np, device=device)\n', (598, 624), False, 'import torch\n'), ((2561, 2606), 'torch.tensor', 'torch.tensor', (['targets_batch_np'], {'device': 'device'}), '(targets_batch_np, device=device)\n', (2573, 2606), False, 'import torch\n'), ((3778, 3803), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (3801, 3803), False, 'import torch\n'), ((3822, 3849), 'torch.device', 'torch.device', (["('cuda:%i' % 0)"], {}), "('cuda:%i' % 0)\n", (3834, 3849), False, 'import torch\n'), ((4081, 4103), 'torch.load', 'torch.load', (['model_file'], {}), '(model_file)\n', (4091, 4103), False, 'import torch\n'), ((4135, 4178), 'torch.load', 'torch.load', (['model_file'], {'map_location': 'device'}), '(model_file, map_location=device)\n', (4145, 4178), False, 'import torch\n'), ((4290, 4317), 're.sub', 're.sub', (['"""^module\\\\."""', '""""""', 'k'], {}), "('^module\\\\.', '', k)\n", (4296, 4317), False, 'import re\n'), ((4747, 4758), 'numpy.zeros', 'np.zeros', (['(0)'], {}), '(0)\n', (4755, 4758), True, 'import numpy as np\n'), ((6693, 6714), 'torch.nn.DataParallel', 'nn.DataParallel', (['nnet'], {}), '(nnet)\n', (6708, 6714), False, 'from torch import nn\n'), ((3374, 3385), 'time.time', 'time.time', ([], {}), '()\n', (3383, 3385), False, 'import time\n'), ((3495, 3511), 'random.shuffle', 'shuffle', (['batches'], {}), '(batches)\n', (3502, 3511), False, 'from random import shuffle\n'), ((5756, 5816), 'numpy.concatenate', 'np.concatenate', (['(cost_to_go, cost_to_go_batch[:, 0])'], {'axis': '(0)'}), '((cost_to_go, cost_to_go_batch[:, 0]), axis=0)\n', (5770, 5816), True, 'import numpy as np\n'), ((5955, 5982), 'numpy.maximum', 'np.maximum', (['cost_to_go', '(0.0)'], {}), '(cost_to_go, 0.0)\n', (5965, 5982), True, 'import numpy as np\n'), ((8619, 8668), 'numpy.zeros', 'np.zeros', (['states_nnet[0].shape[0]'], {'dtype': 'np.float'}), '(states_nnet[0].shape[0], dtype=np.float)\n', (8627, 8668), True, 'import numpy as np\n'), ((3321, 3332), 'time.time', 'time.time', ([], {}), '()\n', (3330, 3332), False, 'import time\n')]
|
from cms.models import CMSPlugin
from django.db import models
from django.utils.translation import ugettext_lazy as _
class CarouselPlugin(CMSPlugin):
interval = models.PositiveIntegerField(_('Interval'), default=5)
title = models.CharField(_('Title'), max_length=255, default='', blank=True)
def __str__(self):
return self.title or str(self.pk)
|
[
"django.utils.translation.ugettext_lazy"
] |
[((196, 209), 'django.utils.translation.ugettext_lazy', '_', (['"""Interval"""'], {}), "('Interval')\n", (197, 209), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((251, 261), 'django.utils.translation.ugettext_lazy', '_', (['"""Title"""'], {}), "('Title')\n", (252, 261), True, 'from django.utils.translation import ugettext_lazy as _\n')]
|
"""
because tests are good?
"""
import pytorch_lightning as pl
from pytorch_lightning import Trainer
from pytorch_lightning.callbacks import ModelCheckpoint
from simple_qnet import QNetLightning
import gym
import pandas as pd
env = gym.make("CartPole-v0")
# from env_catch import CatchEnv
# env = CatchEnv({"simplify": True})
model = QNetLightning(env)
trainer = Trainer(max_epochs=1000)
trainer.fit(model)
print(model.test_model())
|
[
"pytorch_lightning.Trainer",
"gym.make",
"simple_qnet.QNetLightning"
] |
[((236, 259), 'gym.make', 'gym.make', (['"""CartPole-v0"""'], {}), "('CartPole-v0')\n", (244, 259), False, 'import gym\n'), ((341, 359), 'simple_qnet.QNetLightning', 'QNetLightning', (['env'], {}), '(env)\n', (354, 359), False, 'from simple_qnet import QNetLightning\n'), ((371, 395), 'pytorch_lightning.Trainer', 'Trainer', ([], {'max_epochs': '(1000)'}), '(max_epochs=1000)\n', (378, 395), False, 'from pytorch_lightning import Trainer\n')]
|
from report1 import main_report1
from report2 import main_report2
from report3 import main_report3
from report4 import main_report4
def banner(message, border='*'):
line = border * 73
print("\n")
print(line)
print(message)
print(line)
def report1():
print("Iniciando informe 1...")
main_report1()
def report2():
print("Iniciando informe 2...")
main_report2()
def report3():
print("Iniciando informe 3...")
main_report3()
def report4():
print("Iniciando informe 4...")
main_report4()
def callreport(selectedrep):
if selectedrep == 1:
report1()
elif selectedrep == 2:
report2()
elif selectedrep == 3:
report3()
elif selectedrep == 4:
report4()
else:
print("\n\nOpción no valida!")
main()
def main():
message = '\nBIENVENIDO A YGROUP -- INFORMES DE BIXI\n\nPorfavor, escriba el numero del informe desado.\n\n1. Histograma de tiempos de viaje para un año dado\n2. Listado del Top N de estaciones más utilizadas para un año dado\n3. Listado del Top N de viajes más comunes para un año dado\n4. Identificación de horas punta para un año\n\nPresione Ctrl+Z y ENTER para salir.\n'
banner(message)
selectedrep = input("Escriba el numero (1-4): ")
print("Informe ", selectedrep, " seleccionado")
callreport(int(selectedrep))
main()
if __name__ == '__main__':
main()
|
[
"report1.main_report1",
"report2.main_report2",
"report4.main_report4",
"report3.main_report3"
] |
[((328, 342), 'report1.main_report1', 'main_report1', ([], {}), '()\n', (340, 342), False, 'from report1 import main_report1\n'), ((401, 415), 'report2.main_report2', 'main_report2', ([], {}), '()\n', (413, 415), False, 'from report2 import main_report2\n'), ((474, 488), 'report3.main_report3', 'main_report3', ([], {}), '()\n', (486, 488), False, 'from report3 import main_report3\n'), ((547, 561), 'report4.main_report4', 'main_report4', ([], {}), '()\n', (559, 561), False, 'from report4 import main_report4\n')]
|
import lights
from tools import xbmclog
class AmbilightController(lights.Controller):
def __init__(self, *args, **kwargs):
super(AmbilightController, self).__init__(*args, **kwargs)
def on_playback_start(self):
if self.settings.ambilight_start_dim_enable:
self.save_state_as_initial()
xbmclog('Kodi Hue: In AmbilightController.on_playback_start() '
'dimming ambilight group')
self.set_state(
bri=self.settings.ambilight_start_dim,
force_on=self.settings.force_light_on,
)
def on_playback_pause(self):
if self.settings.ambilight_start_dim_enable:
xbmclog('Kodi Hue: In AmbilightController.on_playback_pause() '
'undimming ambilight group')
if self.settings.ambilight_pause_bri_override:
bri = self.settings.ambilight_pause_bri
self.set_state(
bri=bri,
force_on=self.settings.force_light_on,
)
else:
self.restore_initial_state(
force_on=self.settings.force_light_on,
)
def on_playback_stop(self):
if self.settings.ambilight_start_dim_enable:
xbmclog('Kodi Hue: In AmbilightController.on_playback_stop() '
'undimming ambilight group')
if self.settings.ambilight_stop_bri_override:
self.set_state(
bri=self.settings.ambilight_stop_bri,
force_on=self.settings.force_light_on,
)
else:
self.restore_initial_state(
force_on=self.settings.force_light_on,
)
else:
self.restore_initial_state(
force_on=self.settings.force_light_on,
)
|
[
"tools.xbmclog"
] |
[((337, 434), 'tools.xbmclog', 'xbmclog', (['"""Kodi Hue: In AmbilightController.on_playback_start() dimming ambilight group"""'], {}), "(\n 'Kodi Hue: In AmbilightController.on_playback_start() dimming ambilight group'\n )\n", (344, 434), False, 'from tools import xbmclog\n'), ((699, 798), 'tools.xbmclog', 'xbmclog', (['"""Kodi Hue: In AmbilightController.on_playback_pause() undimming ambilight group"""'], {}), "(\n 'Kodi Hue: In AmbilightController.on_playback_pause() undimming ambilight group'\n )\n", (706, 798), False, 'from tools import xbmclog\n'), ((1302, 1400), 'tools.xbmclog', 'xbmclog', (['"""Kodi Hue: In AmbilightController.on_playback_stop() undimming ambilight group"""'], {}), "(\n 'Kodi Hue: In AmbilightController.on_playback_stop() undimming ambilight group'\n )\n", (1309, 1400), False, 'from tools import xbmclog\n')]
|
import cv2
import numpy as np
from imutils import perspective, rotate_bound
from pymatting import estimate_alpha_knn, estimate_foreground_ml, stack_images
from typing import Tuple
PAPER_SIZE = (1485, 1050)
def find_paper(image_bgr: np.ndarray) -> np.ndarray:
image_hsv = cv2.cvtColor(image_bgr, cv2.COLOR_BGR2HSV)
paper_mask = cv2.inRange(image_hsv, (0, 0, 90), (180, 60, 255))
contours, _ = cv2.findContours(paper_mask, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
paper_contour = max(contours, key=cv2.contourArea)
eps = 1
while paper_contour.shape[0] > 4:
paper_contour = cv2.approxPolyDP(paper_contour, eps, True)
eps += 1
paper_contour = np.squeeze(paper_contour)
paper_image_bgr = perspective.four_point_transform(image_bgr, paper_contour)
return cv2.resize(paper_image_bgr, PAPER_SIZE if image_bgr.shape[1] > image_bgr.shape[0] else PAPER_SIZE[::-1])
def get_object_trimap(paper_image_bgr: np.ndarray) -> Tuple[np.ndarray, np.ndarray]:
paper_image_gray = cv2.cvtColor(paper_image_bgr, cv2.COLOR_BGR2GRAY)
# Reshaping the image into a 2D array of pixels and 3 color values (RGB)
pixel_vals = paper_image_gray.reshape((-1, 1))
# Convert to float type
pixel_vals = np.float32(pixel_vals)
k = 3
retval, labels, centers = cv2.kmeans(pixel_vals, k, None, None, None, cv2.KMEANS_PP_CENTERS)
# convert data into 8-bit values
centers = np.uint8(centers)
darkest_component_mask = np.uint8(np.ones(paper_image_gray.shape) * 255)
darkest_component_mask[labels.reshape(paper_image_gray.shape) == np.argmin(centers)] = 0
contours, _ = cv2.findContours(darkest_component_mask, cv2.RETR_LIST, cv2.CHAIN_APPROX_SIMPLE)
contours_new = []
border_size = 5
for contour in contours:
if np.min(contour[:, :, 0]) > border_size \
and np.min(contour[:, :, 1]) > border_size \
and np.max(contour[:, :, 0]) < darkest_component_mask.shape[1] - border_size \
and np.max(contour[:, :, 1]) < darkest_component_mask.shape[0] - border_size \
and cv2.contourArea(contour) > 150:
contours_new.append(contour)
convex_hulls = []
for contour_new in contours_new:
convex_hulls.append(cv2.convexHull(contour_new))
convex_hull = cv2.convexHull(np.concatenate(convex_hulls))
mask_by_countour = np.uint8(np.ones(paper_image_gray.shape) * 255)
cv2.drawContours(mask_by_countour, [convex_hull], -1, 0, -1)
eroded_mask_by_countour = cv2.erode(mask_by_countour, (30, 30), iterations=9)
trimap = 255 - eroded_mask_by_countour
trimap[trimap == 255] = 128
trimap[np.logical_and(trimap == 128, labels.reshape(paper_image_gray.shape) == np.argmin(centers))] = 255
return trimap, convex_hull
def find_object(image: np.ndarray) -> Tuple[np.ndarray, np.ndarray, np.ndarray]:
image_bgr = image
image_bgr = cv2.resize(image_bgr, (1920, 1080) if image_bgr.shape[1] > image_bgr.shape[0] else (1080, 1920))
paper_image_bgr = find_paper(image_bgr)
trimap, convex_hull = get_object_trimap(paper_image_bgr)
paper_image_bgr_scaled = cv2.cvtColor(paper_image_bgr, cv2.COLOR_BGR2RGB) / 255.0
trimap_scaled = trimap / 255.0
# alpha = estimate_alpha_knn(paper_image_bgr_scaled, trimap_scaled)
alpha = np.zeros_like(trimap_scaled)
alpha[trimap_scaled > 0] = 1
return paper_image_bgr, np.squeeze(convex_hull, 1), np.uint8(alpha * 255)
|
[
"cv2.approxPolyDP",
"numpy.ones",
"numpy.argmin",
"cv2.erode",
"cv2.inRange",
"cv2.contourArea",
"numpy.zeros_like",
"cv2.cvtColor",
"numpy.max",
"imutils.perspective.four_point_transform",
"cv2.drawContours",
"cv2.resize",
"numpy.uint8",
"numpy.min",
"cv2.convexHull",
"numpy.squeeze",
"numpy.concatenate",
"numpy.float32",
"cv2.kmeans",
"cv2.findContours"
] |
[((280, 322), 'cv2.cvtColor', 'cv2.cvtColor', (['image_bgr', 'cv2.COLOR_BGR2HSV'], {}), '(image_bgr, cv2.COLOR_BGR2HSV)\n', (292, 322), False, 'import cv2\n'), ((340, 390), 'cv2.inRange', 'cv2.inRange', (['image_hsv', '(0, 0, 90)', '(180, 60, 255)'], {}), '(image_hsv, (0, 0, 90), (180, 60, 255))\n', (351, 390), False, 'import cv2\n'), ((409, 481), 'cv2.findContours', 'cv2.findContours', (['paper_mask', 'cv2.RETR_EXTERNAL', 'cv2.CHAIN_APPROX_SIMPLE'], {}), '(paper_mask, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)\n', (425, 481), False, 'import cv2\n'), ((691, 716), 'numpy.squeeze', 'np.squeeze', (['paper_contour'], {}), '(paper_contour)\n', (701, 716), True, 'import numpy as np\n'), ((739, 797), 'imutils.perspective.four_point_transform', 'perspective.four_point_transform', (['image_bgr', 'paper_contour'], {}), '(image_bgr, paper_contour)\n', (771, 797), False, 'from imutils import perspective, rotate_bound\n'), ((809, 918), 'cv2.resize', 'cv2.resize', (['paper_image_bgr', '(PAPER_SIZE if image_bgr.shape[1] > image_bgr.shape[0] else PAPER_SIZE[::-1])'], {}), '(paper_image_bgr, PAPER_SIZE if image_bgr.shape[1] > image_bgr.\n shape[0] else PAPER_SIZE[::-1])\n', (819, 918), False, 'import cv2\n'), ((1024, 1073), 'cv2.cvtColor', 'cv2.cvtColor', (['paper_image_bgr', 'cv2.COLOR_BGR2GRAY'], {}), '(paper_image_bgr, cv2.COLOR_BGR2GRAY)\n', (1036, 1073), False, 'import cv2\n'), ((1247, 1269), 'numpy.float32', 'np.float32', (['pixel_vals'], {}), '(pixel_vals)\n', (1257, 1269), True, 'import numpy as np\n'), ((1310, 1376), 'cv2.kmeans', 'cv2.kmeans', (['pixel_vals', 'k', 'None', 'None', 'None', 'cv2.KMEANS_PP_CENTERS'], {}), '(pixel_vals, k, None, None, None, cv2.KMEANS_PP_CENTERS)\n', (1320, 1376), False, 'import cv2\n'), ((1428, 1445), 'numpy.uint8', 'np.uint8', (['centers'], {}), '(centers)\n', (1436, 1445), True, 'import numpy as np\n'), ((1636, 1721), 'cv2.findContours', 'cv2.findContours', (['darkest_component_mask', 'cv2.RETR_LIST', 'cv2.CHAIN_APPROX_SIMPLE'], {}), '(darkest_component_mask, cv2.RETR_LIST, cv2.CHAIN_APPROX_SIMPLE\n )\n', (1652, 1721), False, 'import cv2\n'), ((2440, 2500), 'cv2.drawContours', 'cv2.drawContours', (['mask_by_countour', '[convex_hull]', '(-1)', '(0)', '(-1)'], {}), '(mask_by_countour, [convex_hull], -1, 0, -1)\n', (2456, 2500), False, 'import cv2\n'), ((2531, 2582), 'cv2.erode', 'cv2.erode', (['mask_by_countour', '(30, 30)'], {'iterations': '(9)'}), '(mask_by_countour, (30, 30), iterations=9)\n', (2540, 2582), False, 'import cv2\n'), ((2920, 3021), 'cv2.resize', 'cv2.resize', (['image_bgr', '((1920, 1080) if image_bgr.shape[1] > image_bgr.shape[0] else (1080, 1920))'], {}), '(image_bgr, (1920, 1080) if image_bgr.shape[1] > image_bgr.shape[\n 0] else (1080, 1920))\n', (2930, 3021), False, 'import cv2\n'), ((3330, 3358), 'numpy.zeros_like', 'np.zeros_like', (['trimap_scaled'], {}), '(trimap_scaled)\n', (3343, 3358), True, 'import numpy as np\n'), ((611, 653), 'cv2.approxPolyDP', 'cv2.approxPolyDP', (['paper_contour', 'eps', '(True)'], {}), '(paper_contour, eps, True)\n', (627, 653), False, 'import cv2\n'), ((2334, 2362), 'numpy.concatenate', 'np.concatenate', (['convex_hulls'], {}), '(convex_hulls)\n', (2348, 2362), True, 'import numpy as np\n'), ((3153, 3201), 'cv2.cvtColor', 'cv2.cvtColor', (['paper_image_bgr', 'cv2.COLOR_BGR2RGB'], {}), '(paper_image_bgr, cv2.COLOR_BGR2RGB)\n', (3165, 3201), False, 'import cv2\n'), ((3421, 3447), 'numpy.squeeze', 'np.squeeze', (['convex_hull', '(1)'], {}), '(convex_hull, 1)\n', (3431, 3447), True, 'import numpy as np\n'), ((3449, 3470), 'numpy.uint8', 'np.uint8', (['(alpha * 255)'], {}), '(alpha * 255)\n', (3457, 3470), True, 'import numpy as np\n'), ((1485, 1516), 'numpy.ones', 'np.ones', (['paper_image_gray.shape'], {}), '(paper_image_gray.shape)\n', (1492, 1516), True, 'import numpy as np\n'), ((1593, 1611), 'numpy.argmin', 'np.argmin', (['centers'], {}), '(centers)\n', (1602, 1611), True, 'import numpy as np\n'), ((2272, 2299), 'cv2.convexHull', 'cv2.convexHull', (['contour_new'], {}), '(contour_new)\n', (2286, 2299), False, 'import cv2\n'), ((2397, 2428), 'numpy.ones', 'np.ones', (['paper_image_gray.shape'], {}), '(paper_image_gray.shape)\n', (2404, 2428), True, 'import numpy as np\n'), ((1799, 1823), 'numpy.min', 'np.min', (['contour[:, :, 0]'], {}), '(contour[:, :, 0])\n', (1805, 1823), True, 'import numpy as np\n'), ((1860, 1884), 'numpy.min', 'np.min', (['contour[:, :, 1]'], {}), '(contour[:, :, 1])\n', (1866, 1884), True, 'import numpy as np\n'), ((1921, 1945), 'numpy.max', 'np.max', (['contour[:, :, 0]'], {}), '(contour[:, :, 0])\n', (1927, 1945), True, 'import numpy as np\n'), ((2016, 2040), 'numpy.max', 'np.max', (['contour[:, :, 1]'], {}), '(contour[:, :, 1])\n', (2022, 2040), True, 'import numpy as np\n'), ((2111, 2135), 'cv2.contourArea', 'cv2.contourArea', (['contour'], {}), '(contour)\n', (2126, 2135), False, 'import cv2\n'), ((2741, 2759), 'numpy.argmin', 'np.argmin', (['centers'], {}), '(centers)\n', (2750, 2759), True, 'import numpy as np\n')]
|
from collections import namedtuple
from envs.custom_tol_env_dir.tol_2d.state import TolState, BallPositions
"""
Maps State to the ball positions on the pegs.
Possible ball positions are as follows:
- - 33
- 22 32
11 21 31
First number denotes position of the red ball,
second number denotes position if the green ball
and third number is position of the blue ball
"""
state_ball_mapper = {
TolState(1, 1): BallPositions(31, 32, 33),
TolState(1, 2): BallPositions(31, 32, 11),
TolState(1, 3): BallPositions(31, 32, 21),
TolState(1, 4): BallPositions(31, 22, 21),
TolState(1, 5): BallPositions(31, 11, 21),
TolState(1, 6): BallPositions(22, 11, 21),
TolState(2, 1): BallPositions(31, 33, 32),
TolState(2, 2): BallPositions(31, 11, 32),
TolState(2, 3): BallPositions(31, 21, 32),
TolState(2, 4): BallPositions(31, 21, 22),
TolState(2, 5): BallPositions(31, 21, 11),
TolState(2, 6): BallPositions(22, 21, 11),
TolState(3, 1): BallPositions(32, 32, 31),
TolState(3, 2): BallPositions(32, 11, 31),
TolState(3, 3): BallPositions(32, 21, 31),
TolState(3, 4): BallPositions(22, 21, 31),
TolState(3, 5): BallPositions(11, 21, 31),
TolState(3, 6): BallPositions(11, 21, 22),
TolState(4, 1): BallPositions(33, 32, 31),
TolState(4, 2): BallPositions(11, 32, 31),
TolState(4, 3): BallPositions(21, 32, 31),
TolState(4, 4): BallPositions(21, 22, 31),
TolState(4, 5): BallPositions(21, 11, 31),
TolState(4, 6): BallPositions(21, 11, 22),
TolState(5, 1): BallPositions(33, 31, 32),
TolState(5, 2): BallPositions(11, 31, 32),
TolState(5, 3): BallPositions(21, 31, 32),
TolState(5, 4): BallPositions(21, 31, 22),
TolState(5, 5): BallPositions(21, 31, 11),
TolState(5, 6): BallPositions(21, 22, 11),
TolState(6, 1): BallPositions(32, 31, 33),
TolState(6, 2): BallPositions(32, 31, 11),
TolState(6, 3): BallPositions(32, 31, 21),
TolState(6, 4): BallPositions(22, 31, 21),
TolState(6, 5): BallPositions(11, 31, 21),
TolState(6, 6): BallPositions(11, 22, 21),
}
class ObservationSpaceCoordinates:
"""
Class that holds matrix of all possible ball positions (x, y)
of the Tower of London Task.
"""
def __init__(self, x, y, height, aspect_ratio):
"""
Creates a coordinate matrix with x, y, r positions
:param x: Starting position x
:param height: Height
:param aspect_ratio: Aspect ratio
"""
self.PositionCoordinates = namedtuple('PositionCoordinates', ['x', 'y'])
length = height / aspect_ratio
self.x = x
self.height = height
self.length = length
self.radius = (length - height) / 5
self.y = y
self.coordinate_matrix = self._create_coordinates()
def _create_coordinates(self):
add_x = self.length - self.height
r0_b0 = self.PositionCoordinates(self.x, self.y)
r1_b0 = self.PositionCoordinates(self.x + add_x, self.y)
r1_b1 = self.PositionCoordinates(self.x + add_x, self.y + self.radius * 2)
r2_b0 = self.PositionCoordinates(self.x + add_x * 2, self.y)
r2_b1 = self.PositionCoordinates(self.x + add_x * 2, self.y + self.radius * 2)
r2_b2 = self.PositionCoordinates(self.x + add_x * 2, self.y + self.radius * 4)
matrix_dict = {11: r0_b0, 21: r1_b0, 22: r1_b1, 31: r2_b0, 32: r2_b1, 33: r2_b2}
return matrix_dict
def get_position_coordinates(self, position):
"""
Returns named tuple which represents the coordinates
of particular position in the observation space.
Position is an integer where first number represents
the peg, and second number represents the position of
the peg.
:param position: integer can be 11, 21, 22, 31, 32, 33
:return: Named tuple: PositionCoordinates which holds
x, y and r positions to draw a ball there.
"""
return self.coordinate_matrix.get(position)
|
[
"envs.custom_tol_env_dir.tol_2d.state.BallPositions",
"collections.namedtuple",
"envs.custom_tol_env_dir.tol_2d.state.TolState"
] |
[((416, 430), 'envs.custom_tol_env_dir.tol_2d.state.TolState', 'TolState', (['(1)', '(1)'], {}), '(1, 1)\n', (424, 430), False, 'from envs.custom_tol_env_dir.tol_2d.state import TolState, BallPositions\n'), ((464, 478), 'envs.custom_tol_env_dir.tol_2d.state.TolState', 'TolState', (['(1)', '(2)'], {}), '(1, 2)\n', (472, 478), False, 'from envs.custom_tol_env_dir.tol_2d.state import TolState, BallPositions\n'), ((512, 526), 'envs.custom_tol_env_dir.tol_2d.state.TolState', 'TolState', (['(1)', '(3)'], {}), '(1, 3)\n', (520, 526), False, 'from envs.custom_tol_env_dir.tol_2d.state import TolState, BallPositions\n'), ((560, 574), 'envs.custom_tol_env_dir.tol_2d.state.TolState', 'TolState', (['(1)', '(4)'], {}), '(1, 4)\n', (568, 574), False, 'from envs.custom_tol_env_dir.tol_2d.state import TolState, BallPositions\n'), ((608, 622), 'envs.custom_tol_env_dir.tol_2d.state.TolState', 'TolState', (['(1)', '(5)'], {}), '(1, 5)\n', (616, 622), False, 'from envs.custom_tol_env_dir.tol_2d.state import TolState, BallPositions\n'), ((656, 670), 'envs.custom_tol_env_dir.tol_2d.state.TolState', 'TolState', (['(1)', '(6)'], {}), '(1, 6)\n', (664, 670), False, 'from envs.custom_tol_env_dir.tol_2d.state import TolState, BallPositions\n'), ((706, 720), 'envs.custom_tol_env_dir.tol_2d.state.TolState', 'TolState', (['(2)', '(1)'], {}), '(2, 1)\n', (714, 720), False, 'from envs.custom_tol_env_dir.tol_2d.state import TolState, BallPositions\n'), ((754, 768), 'envs.custom_tol_env_dir.tol_2d.state.TolState', 'TolState', (['(2)', '(2)'], {}), '(2, 2)\n', (762, 768), False, 'from envs.custom_tol_env_dir.tol_2d.state import TolState, BallPositions\n'), ((802, 816), 'envs.custom_tol_env_dir.tol_2d.state.TolState', 'TolState', (['(2)', '(3)'], {}), '(2, 3)\n', (810, 816), False, 'from envs.custom_tol_env_dir.tol_2d.state import TolState, BallPositions\n'), ((850, 864), 'envs.custom_tol_env_dir.tol_2d.state.TolState', 'TolState', (['(2)', '(4)'], {}), '(2, 4)\n', (858, 864), False, 'from envs.custom_tol_env_dir.tol_2d.state import TolState, BallPositions\n'), ((898, 912), 'envs.custom_tol_env_dir.tol_2d.state.TolState', 'TolState', (['(2)', '(5)'], {}), '(2, 5)\n', (906, 912), False, 'from envs.custom_tol_env_dir.tol_2d.state import TolState, BallPositions\n'), ((946, 960), 'envs.custom_tol_env_dir.tol_2d.state.TolState', 'TolState', (['(2)', '(6)'], {}), '(2, 6)\n', (954, 960), False, 'from envs.custom_tol_env_dir.tol_2d.state import TolState, BallPositions\n'), ((996, 1010), 'envs.custom_tol_env_dir.tol_2d.state.TolState', 'TolState', (['(3)', '(1)'], {}), '(3, 1)\n', (1004, 1010), False, 'from envs.custom_tol_env_dir.tol_2d.state import TolState, BallPositions\n'), ((1044, 1058), 'envs.custom_tol_env_dir.tol_2d.state.TolState', 'TolState', (['(3)', '(2)'], {}), '(3, 2)\n', (1052, 1058), False, 'from envs.custom_tol_env_dir.tol_2d.state import TolState, BallPositions\n'), ((1092, 1106), 'envs.custom_tol_env_dir.tol_2d.state.TolState', 'TolState', (['(3)', '(3)'], {}), '(3, 3)\n', (1100, 1106), False, 'from envs.custom_tol_env_dir.tol_2d.state import TolState, BallPositions\n'), ((1140, 1154), 'envs.custom_tol_env_dir.tol_2d.state.TolState', 'TolState', (['(3)', '(4)'], {}), '(3, 4)\n', (1148, 1154), False, 'from envs.custom_tol_env_dir.tol_2d.state import TolState, BallPositions\n'), ((1188, 1202), 'envs.custom_tol_env_dir.tol_2d.state.TolState', 'TolState', (['(3)', '(5)'], {}), '(3, 5)\n', (1196, 1202), False, 'from envs.custom_tol_env_dir.tol_2d.state import TolState, BallPositions\n'), ((1236, 1250), 'envs.custom_tol_env_dir.tol_2d.state.TolState', 'TolState', (['(3)', '(6)'], {}), '(3, 6)\n', (1244, 1250), False, 'from envs.custom_tol_env_dir.tol_2d.state import TolState, BallPositions\n'), ((1286, 1300), 'envs.custom_tol_env_dir.tol_2d.state.TolState', 'TolState', (['(4)', '(1)'], {}), '(4, 1)\n', (1294, 1300), False, 'from envs.custom_tol_env_dir.tol_2d.state import TolState, BallPositions\n'), ((1334, 1348), 'envs.custom_tol_env_dir.tol_2d.state.TolState', 'TolState', (['(4)', '(2)'], {}), '(4, 2)\n', (1342, 1348), False, 'from envs.custom_tol_env_dir.tol_2d.state import TolState, BallPositions\n'), ((1382, 1396), 'envs.custom_tol_env_dir.tol_2d.state.TolState', 'TolState', (['(4)', '(3)'], {}), '(4, 3)\n', (1390, 1396), False, 'from envs.custom_tol_env_dir.tol_2d.state import TolState, BallPositions\n'), ((1430, 1444), 'envs.custom_tol_env_dir.tol_2d.state.TolState', 'TolState', (['(4)', '(4)'], {}), '(4, 4)\n', (1438, 1444), False, 'from envs.custom_tol_env_dir.tol_2d.state import TolState, BallPositions\n'), ((1478, 1492), 'envs.custom_tol_env_dir.tol_2d.state.TolState', 'TolState', (['(4)', '(5)'], {}), '(4, 5)\n', (1486, 1492), False, 'from envs.custom_tol_env_dir.tol_2d.state import TolState, BallPositions\n'), ((1526, 1540), 'envs.custom_tol_env_dir.tol_2d.state.TolState', 'TolState', (['(4)', '(6)'], {}), '(4, 6)\n', (1534, 1540), False, 'from envs.custom_tol_env_dir.tol_2d.state import TolState, BallPositions\n'), ((1576, 1590), 'envs.custom_tol_env_dir.tol_2d.state.TolState', 'TolState', (['(5)', '(1)'], {}), '(5, 1)\n', (1584, 1590), False, 'from envs.custom_tol_env_dir.tol_2d.state import TolState, BallPositions\n'), ((1624, 1638), 'envs.custom_tol_env_dir.tol_2d.state.TolState', 'TolState', (['(5)', '(2)'], {}), '(5, 2)\n', (1632, 1638), False, 'from envs.custom_tol_env_dir.tol_2d.state import TolState, BallPositions\n'), ((1672, 1686), 'envs.custom_tol_env_dir.tol_2d.state.TolState', 'TolState', (['(5)', '(3)'], {}), '(5, 3)\n', (1680, 1686), False, 'from envs.custom_tol_env_dir.tol_2d.state import TolState, BallPositions\n'), ((1720, 1734), 'envs.custom_tol_env_dir.tol_2d.state.TolState', 'TolState', (['(5)', '(4)'], {}), '(5, 4)\n', (1728, 1734), False, 'from envs.custom_tol_env_dir.tol_2d.state import TolState, BallPositions\n'), ((1768, 1782), 'envs.custom_tol_env_dir.tol_2d.state.TolState', 'TolState', (['(5)', '(5)'], {}), '(5, 5)\n', (1776, 1782), False, 'from envs.custom_tol_env_dir.tol_2d.state import TolState, BallPositions\n'), ((1816, 1830), 'envs.custom_tol_env_dir.tol_2d.state.TolState', 'TolState', (['(5)', '(6)'], {}), '(5, 6)\n', (1824, 1830), False, 'from envs.custom_tol_env_dir.tol_2d.state import TolState, BallPositions\n'), ((1866, 1880), 'envs.custom_tol_env_dir.tol_2d.state.TolState', 'TolState', (['(6)', '(1)'], {}), '(6, 1)\n', (1874, 1880), False, 'from envs.custom_tol_env_dir.tol_2d.state import TolState, BallPositions\n'), ((1914, 1928), 'envs.custom_tol_env_dir.tol_2d.state.TolState', 'TolState', (['(6)', '(2)'], {}), '(6, 2)\n', (1922, 1928), False, 'from envs.custom_tol_env_dir.tol_2d.state import TolState, BallPositions\n'), ((1962, 1976), 'envs.custom_tol_env_dir.tol_2d.state.TolState', 'TolState', (['(6)', '(3)'], {}), '(6, 3)\n', (1970, 1976), False, 'from envs.custom_tol_env_dir.tol_2d.state import TolState, BallPositions\n'), ((2010, 2024), 'envs.custom_tol_env_dir.tol_2d.state.TolState', 'TolState', (['(6)', '(4)'], {}), '(6, 4)\n', (2018, 2024), False, 'from envs.custom_tol_env_dir.tol_2d.state import TolState, BallPositions\n'), ((2058, 2072), 'envs.custom_tol_env_dir.tol_2d.state.TolState', 'TolState', (['(6)', '(5)'], {}), '(6, 5)\n', (2066, 2072), False, 'from envs.custom_tol_env_dir.tol_2d.state import TolState, BallPositions\n'), ((2106, 2120), 'envs.custom_tol_env_dir.tol_2d.state.TolState', 'TolState', (['(6)', '(6)'], {}), '(6, 6)\n', (2114, 2120), False, 'from envs.custom_tol_env_dir.tol_2d.state import TolState, BallPositions\n'), ((432, 457), 'envs.custom_tol_env_dir.tol_2d.state.BallPositions', 'BallPositions', (['(31)', '(32)', '(33)'], {}), '(31, 32, 33)\n', (445, 457), False, 'from envs.custom_tol_env_dir.tol_2d.state import TolState, BallPositions\n'), ((480, 505), 'envs.custom_tol_env_dir.tol_2d.state.BallPositions', 'BallPositions', (['(31)', '(32)', '(11)'], {}), '(31, 32, 11)\n', (493, 505), False, 'from envs.custom_tol_env_dir.tol_2d.state import TolState, BallPositions\n'), ((528, 553), 'envs.custom_tol_env_dir.tol_2d.state.BallPositions', 'BallPositions', (['(31)', '(32)', '(21)'], {}), '(31, 32, 21)\n', (541, 553), False, 'from envs.custom_tol_env_dir.tol_2d.state import TolState, BallPositions\n'), ((576, 601), 'envs.custom_tol_env_dir.tol_2d.state.BallPositions', 'BallPositions', (['(31)', '(22)', '(21)'], {}), '(31, 22, 21)\n', (589, 601), False, 'from envs.custom_tol_env_dir.tol_2d.state import TolState, BallPositions\n'), ((624, 649), 'envs.custom_tol_env_dir.tol_2d.state.BallPositions', 'BallPositions', (['(31)', '(11)', '(21)'], {}), '(31, 11, 21)\n', (637, 649), False, 'from envs.custom_tol_env_dir.tol_2d.state import TolState, BallPositions\n'), ((672, 697), 'envs.custom_tol_env_dir.tol_2d.state.BallPositions', 'BallPositions', (['(22)', '(11)', '(21)'], {}), '(22, 11, 21)\n', (685, 697), False, 'from envs.custom_tol_env_dir.tol_2d.state import TolState, BallPositions\n'), ((722, 747), 'envs.custom_tol_env_dir.tol_2d.state.BallPositions', 'BallPositions', (['(31)', '(33)', '(32)'], {}), '(31, 33, 32)\n', (735, 747), False, 'from envs.custom_tol_env_dir.tol_2d.state import TolState, BallPositions\n'), ((770, 795), 'envs.custom_tol_env_dir.tol_2d.state.BallPositions', 'BallPositions', (['(31)', '(11)', '(32)'], {}), '(31, 11, 32)\n', (783, 795), False, 'from envs.custom_tol_env_dir.tol_2d.state import TolState, BallPositions\n'), ((818, 843), 'envs.custom_tol_env_dir.tol_2d.state.BallPositions', 'BallPositions', (['(31)', '(21)', '(32)'], {}), '(31, 21, 32)\n', (831, 843), False, 'from envs.custom_tol_env_dir.tol_2d.state import TolState, BallPositions\n'), ((866, 891), 'envs.custom_tol_env_dir.tol_2d.state.BallPositions', 'BallPositions', (['(31)', '(21)', '(22)'], {}), '(31, 21, 22)\n', (879, 891), False, 'from envs.custom_tol_env_dir.tol_2d.state import TolState, BallPositions\n'), ((914, 939), 'envs.custom_tol_env_dir.tol_2d.state.BallPositions', 'BallPositions', (['(31)', '(21)', '(11)'], {}), '(31, 21, 11)\n', (927, 939), False, 'from envs.custom_tol_env_dir.tol_2d.state import TolState, BallPositions\n'), ((962, 987), 'envs.custom_tol_env_dir.tol_2d.state.BallPositions', 'BallPositions', (['(22)', '(21)', '(11)'], {}), '(22, 21, 11)\n', (975, 987), False, 'from envs.custom_tol_env_dir.tol_2d.state import TolState, BallPositions\n'), ((1012, 1037), 'envs.custom_tol_env_dir.tol_2d.state.BallPositions', 'BallPositions', (['(32)', '(32)', '(31)'], {}), '(32, 32, 31)\n', (1025, 1037), False, 'from envs.custom_tol_env_dir.tol_2d.state import TolState, BallPositions\n'), ((1060, 1085), 'envs.custom_tol_env_dir.tol_2d.state.BallPositions', 'BallPositions', (['(32)', '(11)', '(31)'], {}), '(32, 11, 31)\n', (1073, 1085), False, 'from envs.custom_tol_env_dir.tol_2d.state import TolState, BallPositions\n'), ((1108, 1133), 'envs.custom_tol_env_dir.tol_2d.state.BallPositions', 'BallPositions', (['(32)', '(21)', '(31)'], {}), '(32, 21, 31)\n', (1121, 1133), False, 'from envs.custom_tol_env_dir.tol_2d.state import TolState, BallPositions\n'), ((1156, 1181), 'envs.custom_tol_env_dir.tol_2d.state.BallPositions', 'BallPositions', (['(22)', '(21)', '(31)'], {}), '(22, 21, 31)\n', (1169, 1181), False, 'from envs.custom_tol_env_dir.tol_2d.state import TolState, BallPositions\n'), ((1204, 1229), 'envs.custom_tol_env_dir.tol_2d.state.BallPositions', 'BallPositions', (['(11)', '(21)', '(31)'], {}), '(11, 21, 31)\n', (1217, 1229), False, 'from envs.custom_tol_env_dir.tol_2d.state import TolState, BallPositions\n'), ((1252, 1277), 'envs.custom_tol_env_dir.tol_2d.state.BallPositions', 'BallPositions', (['(11)', '(21)', '(22)'], {}), '(11, 21, 22)\n', (1265, 1277), False, 'from envs.custom_tol_env_dir.tol_2d.state import TolState, BallPositions\n'), ((1302, 1327), 'envs.custom_tol_env_dir.tol_2d.state.BallPositions', 'BallPositions', (['(33)', '(32)', '(31)'], {}), '(33, 32, 31)\n', (1315, 1327), False, 'from envs.custom_tol_env_dir.tol_2d.state import TolState, BallPositions\n'), ((1350, 1375), 'envs.custom_tol_env_dir.tol_2d.state.BallPositions', 'BallPositions', (['(11)', '(32)', '(31)'], {}), '(11, 32, 31)\n', (1363, 1375), False, 'from envs.custom_tol_env_dir.tol_2d.state import TolState, BallPositions\n'), ((1398, 1423), 'envs.custom_tol_env_dir.tol_2d.state.BallPositions', 'BallPositions', (['(21)', '(32)', '(31)'], {}), '(21, 32, 31)\n', (1411, 1423), False, 'from envs.custom_tol_env_dir.tol_2d.state import TolState, BallPositions\n'), ((1446, 1471), 'envs.custom_tol_env_dir.tol_2d.state.BallPositions', 'BallPositions', (['(21)', '(22)', '(31)'], {}), '(21, 22, 31)\n', (1459, 1471), False, 'from envs.custom_tol_env_dir.tol_2d.state import TolState, BallPositions\n'), ((1494, 1519), 'envs.custom_tol_env_dir.tol_2d.state.BallPositions', 'BallPositions', (['(21)', '(11)', '(31)'], {}), '(21, 11, 31)\n', (1507, 1519), False, 'from envs.custom_tol_env_dir.tol_2d.state import TolState, BallPositions\n'), ((1542, 1567), 'envs.custom_tol_env_dir.tol_2d.state.BallPositions', 'BallPositions', (['(21)', '(11)', '(22)'], {}), '(21, 11, 22)\n', (1555, 1567), False, 'from envs.custom_tol_env_dir.tol_2d.state import TolState, BallPositions\n'), ((1592, 1617), 'envs.custom_tol_env_dir.tol_2d.state.BallPositions', 'BallPositions', (['(33)', '(31)', '(32)'], {}), '(33, 31, 32)\n', (1605, 1617), False, 'from envs.custom_tol_env_dir.tol_2d.state import TolState, BallPositions\n'), ((1640, 1665), 'envs.custom_tol_env_dir.tol_2d.state.BallPositions', 'BallPositions', (['(11)', '(31)', '(32)'], {}), '(11, 31, 32)\n', (1653, 1665), False, 'from envs.custom_tol_env_dir.tol_2d.state import TolState, BallPositions\n'), ((1688, 1713), 'envs.custom_tol_env_dir.tol_2d.state.BallPositions', 'BallPositions', (['(21)', '(31)', '(32)'], {}), '(21, 31, 32)\n', (1701, 1713), False, 'from envs.custom_tol_env_dir.tol_2d.state import TolState, BallPositions\n'), ((1736, 1761), 'envs.custom_tol_env_dir.tol_2d.state.BallPositions', 'BallPositions', (['(21)', '(31)', '(22)'], {}), '(21, 31, 22)\n', (1749, 1761), False, 'from envs.custom_tol_env_dir.tol_2d.state import TolState, BallPositions\n'), ((1784, 1809), 'envs.custom_tol_env_dir.tol_2d.state.BallPositions', 'BallPositions', (['(21)', '(31)', '(11)'], {}), '(21, 31, 11)\n', (1797, 1809), False, 'from envs.custom_tol_env_dir.tol_2d.state import TolState, BallPositions\n'), ((1832, 1857), 'envs.custom_tol_env_dir.tol_2d.state.BallPositions', 'BallPositions', (['(21)', '(22)', '(11)'], {}), '(21, 22, 11)\n', (1845, 1857), False, 'from envs.custom_tol_env_dir.tol_2d.state import TolState, BallPositions\n'), ((1882, 1907), 'envs.custom_tol_env_dir.tol_2d.state.BallPositions', 'BallPositions', (['(32)', '(31)', '(33)'], {}), '(32, 31, 33)\n', (1895, 1907), False, 'from envs.custom_tol_env_dir.tol_2d.state import TolState, BallPositions\n'), ((1930, 1955), 'envs.custom_tol_env_dir.tol_2d.state.BallPositions', 'BallPositions', (['(32)', '(31)', '(11)'], {}), '(32, 31, 11)\n', (1943, 1955), False, 'from envs.custom_tol_env_dir.tol_2d.state import TolState, BallPositions\n'), ((1978, 2003), 'envs.custom_tol_env_dir.tol_2d.state.BallPositions', 'BallPositions', (['(32)', '(31)', '(21)'], {}), '(32, 31, 21)\n', (1991, 2003), False, 'from envs.custom_tol_env_dir.tol_2d.state import TolState, BallPositions\n'), ((2026, 2051), 'envs.custom_tol_env_dir.tol_2d.state.BallPositions', 'BallPositions', (['(22)', '(31)', '(21)'], {}), '(22, 31, 21)\n', (2039, 2051), False, 'from envs.custom_tol_env_dir.tol_2d.state import TolState, BallPositions\n'), ((2074, 2099), 'envs.custom_tol_env_dir.tol_2d.state.BallPositions', 'BallPositions', (['(11)', '(31)', '(21)'], {}), '(11, 31, 21)\n', (2087, 2099), False, 'from envs.custom_tol_env_dir.tol_2d.state import TolState, BallPositions\n'), ((2122, 2147), 'envs.custom_tol_env_dir.tol_2d.state.BallPositions', 'BallPositions', (['(11)', '(22)', '(21)'], {}), '(11, 22, 21)\n', (2135, 2147), False, 'from envs.custom_tol_env_dir.tol_2d.state import TolState, BallPositions\n'), ((2601, 2646), 'collections.namedtuple', 'namedtuple', (['"""PositionCoordinates"""', "['x', 'y']"], {}), "('PositionCoordinates', ['x', 'y'])\n", (2611, 2646), False, 'from collections import namedtuple\n')]
|
from chips.api.api import Input, Output
from pytun import TunTapDevice, IFF_TAP, IFF_NO_PI
import Queue
import threading
class VirtualNetworkCard:
def __init__(self, ip='192.168.1.0', netmask='255.255.255.0'):
tap = TunTapDevice(flags=IFF_TAP|IFF_NO_PI, name="tap0")
tap.mtu = 1500
tap.addr = ip
tap.netmask = netmask
self.tap = tap
self.tap.up()
class NetworkOut(Output):
def __init__(self, chip, name, net):
Output.__init__(self, chip, name)
self.net = net
self.packet_len=0
self.buff=""
def data_sink(self, data):
if not self.packet_len:
self.packet_len = data
else:
byte = (data >> 8) & 0xff
self.buff += chr(byte)
self.packet_len -= 1
if self.packet_len:
byte = data & 0xff
self.buff += chr(byte)
self.packet_len -= 1
if not self.packet_len and self.buff:
self.net.tap.write(self.buff)
self.buff = ""
class NetworkIn(Input):
def __init__(self, chip, name, net):
Input.__init__(self, chip, name)
self.packet_len=0
self.buff = ""
self.src_rdy=False
self.queue = Queue.Queue(1)
self.net = net
t = threading.Thread(target=self.read_network)
t.start()
def read_network(self):
while 1:
self.queue.put(self.net.tap.read(1500))
self.next_src_rdy=True
def simulation_update(self):
self.dst_rdy = self.next_dst_rdy
self.src_rdy = self.next_src_rdy
if self.update_data:
self.q = self.data_source()
def data_source(self):
if not self.buff:
self.buff = self.queue.get()
self.queue.task_done()
self.next_src_rdy=False
return len(self.buff)
else:
byte = ord(self.buff[0])
word = byte << 8
self.buff = self.buff[1:]
if self.buff:
byte = ord(self.buff[0])
word |= byte & 0xff
self.buff = self.buff[1:]
return word
|
[
"threading.Thread",
"chips.api.api.Output.__init__",
"Queue.Queue",
"pytun.TunTapDevice",
"chips.api.api.Input.__init__"
] |
[((230, 282), 'pytun.TunTapDevice', 'TunTapDevice', ([], {'flags': '(IFF_TAP | IFF_NO_PI)', 'name': '"""tap0"""'}), "(flags=IFF_TAP | IFF_NO_PI, name='tap0')\n", (242, 282), False, 'from pytun import TunTapDevice, IFF_TAP, IFF_NO_PI\n'), ((477, 510), 'chips.api.api.Output.__init__', 'Output.__init__', (['self', 'chip', 'name'], {}), '(self, chip, name)\n', (492, 510), False, 'from chips.api.api import Input, Output\n'), ((1137, 1169), 'chips.api.api.Input.__init__', 'Input.__init__', (['self', 'chip', 'name'], {}), '(self, chip, name)\n', (1151, 1169), False, 'from chips.api.api import Input, Output\n'), ((1267, 1281), 'Queue.Queue', 'Queue.Queue', (['(1)'], {}), '(1)\n', (1278, 1281), False, 'import Queue\n'), ((1317, 1359), 'threading.Thread', 'threading.Thread', ([], {'target': 'self.read_network'}), '(target=self.read_network)\n', (1333, 1359), False, 'import threading\n')]
|
# -*- coding: utf-8 -*-
import pytest
from pyramid.path import DottedNameResolver
from shapely.geometry import MultiPolygon, Polygon
from pyramid_oereb.lib.config import Config
from pyramid_oereb.lib.records.extract import ExtractRecord
from pyramid_oereb.lib.records.plr import PlrRecord
from pyramid_oereb.lib.records.real_estate import RealEstateRecord
from pyramid_oereb.lib.records.view_service import ViewServiceRecord
from pyramid_oereb.lib.records.municipality import MunicipalityRecord
from pyramid_oereb.lib.readers.extract import ExtractReader
from tests.mockrequest import MockParameter
plr_cadastre_authority = Config.get_plr_cadastre_authority()
plr_sources = []
for plr in Config.get('plrs'):
plr_source_class = DottedNameResolver().maybe_resolve(plr.get('source').get('class'))
plr_sources.append(plr_source_class(**plr))
real_estate = RealEstateRecord(u'test', u'BL', u'Laufen', 2770, 1000,
MultiPolygon([Polygon([(0, 0), (4, 4), (4, 0)])]),
ViewServiceRecord(
{'de': 'test_link'},
1,
1.0,
))
municipality = MunicipalityRecord(
2771,
u'FantasyMunicipality',
True,
geom=MultiPolygon()
)
@pytest.mark.run(order=2)
def test_init():
reader = ExtractReader(plr_sources, plr_cadastre_authority)
assert isinstance(reader._plr_sources_, list)
@pytest.mark.run(order=2)
def test_read():
reader = ExtractReader(plr_sources, plr_cadastre_authority)
extract = reader.read(MockParameter(), real_estate, municipality)
assert isinstance(extract, ExtractRecord)
plrs = extract.real_estate.public_law_restrictions
assert isinstance(plrs, list)
assert isinstance(plrs[0], PlrRecord)
assert plrs[3].theme.code == 'ch.BaulinienNationalstrassen'
assert plrs[3].law_status.code == 'inForce'
|
[
"shapely.geometry.Polygon",
"pyramid_oereb.lib.config.Config.get",
"pyramid_oereb.lib.records.view_service.ViewServiceRecord",
"pyramid_oereb.lib.config.Config.get_plr_cadastre_authority",
"shapely.geometry.MultiPolygon",
"tests.mockrequest.MockParameter",
"pyramid_oereb.lib.readers.extract.ExtractReader",
"pytest.mark.run",
"pyramid.path.DottedNameResolver"
] |
[((627, 662), 'pyramid_oereb.lib.config.Config.get_plr_cadastre_authority', 'Config.get_plr_cadastre_authority', ([], {}), '()\n', (660, 662), False, 'from pyramid_oereb.lib.config import Config\n'), ((692, 710), 'pyramid_oereb.lib.config.Config.get', 'Config.get', (['"""plrs"""'], {}), "('plrs')\n", (702, 710), False, 'from pyramid_oereb.lib.config import Config\n'), ((1354, 1378), 'pytest.mark.run', 'pytest.mark.run', ([], {'order': '(2)'}), '(order=2)\n', (1369, 1378), False, 'import pytest\n'), ((1513, 1537), 'pytest.mark.run', 'pytest.mark.run', ([], {'order': '(2)'}), '(order=2)\n', (1528, 1537), False, 'import pytest\n'), ((1034, 1080), 'pyramid_oereb.lib.records.view_service.ViewServiceRecord', 'ViewServiceRecord', (["{'de': 'test_link'}", '(1)', '(1.0)'], {}), "({'de': 'test_link'}, 1, 1.0)\n", (1051, 1080), False, 'from pyramid_oereb.lib.records.view_service import ViewServiceRecord\n'), ((1409, 1459), 'pyramid_oereb.lib.readers.extract.ExtractReader', 'ExtractReader', (['plr_sources', 'plr_cadastre_authority'], {}), '(plr_sources, plr_cadastre_authority)\n', (1422, 1459), False, 'from pyramid_oereb.lib.readers.extract import ExtractReader\n'), ((1568, 1618), 'pyramid_oereb.lib.readers.extract.ExtractReader', 'ExtractReader', (['plr_sources', 'plr_cadastre_authority'], {}), '(plr_sources, plr_cadastre_authority)\n', (1581, 1618), False, 'from pyramid_oereb.lib.readers.extract import ExtractReader\n'), ((1330, 1344), 'shapely.geometry.MultiPolygon', 'MultiPolygon', ([], {}), '()\n', (1342, 1344), False, 'from shapely.geometry import MultiPolygon, Polygon\n'), ((1645, 1660), 'tests.mockrequest.MockParameter', 'MockParameter', ([], {}), '()\n', (1658, 1660), False, 'from tests.mockrequest import MockParameter\n'), ((735, 755), 'pyramid.path.DottedNameResolver', 'DottedNameResolver', ([], {}), '()\n', (753, 755), False, 'from pyramid.path import DottedNameResolver\n'), ((966, 999), 'shapely.geometry.Polygon', 'Polygon', (['[(0, 0), (4, 4), (4, 0)]'], {}), '([(0, 0), (4, 4), (4, 0)])\n', (973, 999), False, 'from shapely.geometry import MultiPolygon, Polygon\n')]
|
import os
os.path.dirname(os.path.abspath(__file__)+'/../../')
from QNetbots.core_bot_api.matrix_bot_api import MatrixBotAPI
from QNetbots.core_bot_api.mregex_handler import MRegexHandler
from QNetbots.core_bot_api.mcommand_handler import MCommandHandler
class Bot(object):
def __init__(self, USERNAME,PASSWORD,SERVER):
self.bot = MatrixBotAPI(USERNAME,PASSWORD,SERVER)
def add_handler(self, handler):
self.bot.add_handler(handler)
def add_general_listener(self,handler):
self.bot.add_general_listener(handler)
def start_polling(self):
self.bot.start_polling()
def add_handlers(self, handlers):
for handler in handlers:
self.bot.add_handler(handler)
@staticmethod
def create_regex_base(regex, callback_f):
return MRegexHandler(regex, callback_f)
@staticmethod
def create_command(command, callback_f):
return MCommandHandler(command, callback_f)
|
[
"os.path.abspath",
"QNetbots.core_bot_api.matrix_bot_api.MatrixBotAPI",
"QNetbots.core_bot_api.mcommand_handler.MCommandHandler",
"QNetbots.core_bot_api.mregex_handler.MRegexHandler"
] |
[((26, 51), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (41, 51), False, 'import os\n'), ((346, 386), 'QNetbots.core_bot_api.matrix_bot_api.MatrixBotAPI', 'MatrixBotAPI', (['USERNAME', 'PASSWORD', 'SERVER'], {}), '(USERNAME, PASSWORD, SERVER)\n', (358, 386), False, 'from QNetbots.core_bot_api.matrix_bot_api import MatrixBotAPI\n'), ((809, 841), 'QNetbots.core_bot_api.mregex_handler.MRegexHandler', 'MRegexHandler', (['regex', 'callback_f'], {}), '(regex, callback_f)\n', (822, 841), False, 'from QNetbots.core_bot_api.mregex_handler import MRegexHandler\n'), ((921, 957), 'QNetbots.core_bot_api.mcommand_handler.MCommandHandler', 'MCommandHandler', (['command', 'callback_f'], {}), '(command, callback_f)\n', (936, 957), False, 'from QNetbots.core_bot_api.mcommand_handler import MCommandHandler\n')]
|
#!/usr/bin/env python3
from unittest.mock import call, patch
import pytest
from pytest import raises
from vang.tfs.get_projects import get_projects, main, parse_args
def test_get_projects():
assert [] == get_projects(None)
assert [] == get_projects([])
with patch(
'vang.tfs.get_projects.call',
return_value={
'count':
1,
'value': [{
'id': 'id',
'name': 'project',
'revision': 272509,
'state': 'wellFormed',
'url': 'remoteUrl',
'visibility': 'private'
}]
},
autospec=True,
):
assert [('organisation', {
'id': 'id',
'name': 'project',
'revision': 272509,
'state': 'wellFormed',
'url': 'remoteUrl',
'visibility': 'private'
})] == get_projects(['organisation'])
assert ['organisation/project'] == get_projects(['organisation'],
project_specs=True)
assert ['project'] == get_projects(['organisation'], names=True)
assert ['project'] == get_projects(['organisation'],
project_specs=True,
names=True)
@pytest.mark.parametrize("args", [
'',
'-n n -p -p',
])
def test_parse_args_raises(args):
with raises(SystemExit):
parse_args(args.split(' ') if args else args)
@pytest.mark.parametrize("args, expected", [
[
'o1 o2',
{
'names': False,
'organisations': ['o1', 'o2'],
'project_specs': False
}
],
['o1 -n', {
'names': True,
'organisations': ['o1'],
'project_specs': False
}],
['o1 -p', {
'names': False,
'organisations': ['o1'],
'project_specs': True
}],
])
def test_parse_args_valid(args, expected):
assert expected == parse_args(args.split(' ')).__dict__
def test_main():
with patch(
'vang.tfs.get_projects.get_projects',
return_value=['project1', 'project2'],
autospec=True,
) as mock_get_projects:
with patch('vang.tfs.get_projects.print') as mock_print:
main('organisations', False, True)
assert [call('organisations', False,
True)] == mock_get_projects.mock_calls
assert [call('project1'), call('project2')] == mock_print.mock_calls
|
[
"unittest.mock.patch",
"pytest.raises",
"vang.tfs.get_projects.main",
"vang.tfs.get_projects.get_projects",
"pytest.mark.parametrize",
"unittest.mock.call"
] |
[((1405, 1456), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""args"""', "['', '-n n -p -p']"], {}), "('args', ['', '-n n -p -p'])\n", (1428, 1456), False, 'import pytest\n'), ((1588, 1888), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""args, expected"""', "[['o1 o2', {'names': False, 'organisations': ['o1', 'o2'], 'project_specs':\n False}], ['o1 -n', {'names': True, 'organisations': ['o1'],\n 'project_specs': False}], ['o1 -p', {'names': False, 'organisations': [\n 'o1'], 'project_specs': True}]]"], {}), "('args, expected', [['o1 o2', {'names': False,\n 'organisations': ['o1', 'o2'], 'project_specs': False}], ['o1 -n', {\n 'names': True, 'organisations': ['o1'], 'project_specs': False}], [\n 'o1 -p', {'names': False, 'organisations': ['o1'], 'project_specs': True}]]\n )\n", (1611, 1888), False, 'import pytest\n'), ((213, 231), 'vang.tfs.get_projects.get_projects', 'get_projects', (['None'], {}), '(None)\n', (225, 231), False, 'from vang.tfs.get_projects import get_projects, main, parse_args\n'), ((249, 265), 'vang.tfs.get_projects.get_projects', 'get_projects', (['[]'], {}), '([])\n', (261, 265), False, 'from vang.tfs.get_projects import get_projects, main, parse_args\n'), ((275, 498), 'unittest.mock.patch', 'patch', (['"""vang.tfs.get_projects.call"""'], {'return_value': "{'count': 1, 'value': [{'id': 'id', 'name': 'project', 'revision': 272509,\n 'state': 'wellFormed', 'url': 'remoteUrl', 'visibility': 'private'}]}", 'autospec': '(True)'}), "('vang.tfs.get_projects.call', return_value={'count': 1, 'value': [{\n 'id': 'id', 'name': 'project', 'revision': 272509, 'state':\n 'wellFormed', 'url': 'remoteUrl', 'visibility': 'private'}]}, autospec=True\n )\n", (280, 498), False, 'from unittest.mock import call, patch\n'), ((1511, 1529), 'pytest.raises', 'raises', (['SystemExit'], {}), '(SystemExit)\n', (1517, 1529), False, 'from pytest import raises\n'), ((2144, 2245), 'unittest.mock.patch', 'patch', (['"""vang.tfs.get_projects.get_projects"""'], {'return_value': "['project1', 'project2']", 'autospec': '(True)'}), "('vang.tfs.get_projects.get_projects', return_value=['project1',\n 'project2'], autospec=True)\n", (2149, 2245), False, 'from unittest.mock import call, patch\n'), ((969, 999), 'vang.tfs.get_projects.get_projects', 'get_projects', (["['organisation']"], {}), "(['organisation'])\n", (981, 999), False, 'from vang.tfs.get_projects import get_projects, main, parse_args\n'), ((1043, 1093), 'vang.tfs.get_projects.get_projects', 'get_projects', (["['organisation']"], {'project_specs': '(True)'}), "(['organisation'], project_specs=True)\n", (1055, 1093), False, 'from vang.tfs.get_projects import get_projects, main, parse_args\n'), ((1180, 1222), 'vang.tfs.get_projects.get_projects', 'get_projects', (["['organisation']"], {'names': '(True)'}), "(['organisation'], names=True)\n", (1192, 1222), False, 'from vang.tfs.get_projects import get_projects, main, parse_args\n'), ((1253, 1315), 'vang.tfs.get_projects.get_projects', 'get_projects', (["['organisation']"], {'project_specs': '(True)', 'names': '(True)'}), "(['organisation'], project_specs=True, names=True)\n", (1265, 1315), False, 'from vang.tfs.get_projects import get_projects, main, parse_args\n'), ((2320, 2356), 'unittest.mock.patch', 'patch', (['"""vang.tfs.get_projects.print"""'], {}), "('vang.tfs.get_projects.print')\n", (2325, 2356), False, 'from unittest.mock import call, patch\n'), ((2384, 2418), 'vang.tfs.get_projects.main', 'main', (['"""organisations"""', '(False)', '(True)'], {}), "('organisations', False, True)\n", (2388, 2418), False, 'from vang.tfs.get_projects import get_projects, main, parse_args\n'), ((2439, 2473), 'unittest.mock.call', 'call', (['"""organisations"""', '(False)', '(True)'], {}), "('organisations', False, True)\n", (2443, 2473), False, 'from unittest.mock import call, patch\n'), ((2552, 2568), 'unittest.mock.call', 'call', (['"""project1"""'], {}), "('project1')\n", (2556, 2568), False, 'from unittest.mock import call, patch\n'), ((2570, 2586), 'unittest.mock.call', 'call', (['"""project2"""'], {}), "('project2')\n", (2574, 2586), False, 'from unittest.mock import call, patch\n')]
|
import pandas as pd
import tensorflow as tf
import os
from tensorflow._api.v2 import data
# TODO is jpeg and write log of how many and what is filtered
def load_meta(path):
meta = pd.read_csv(path)
ids = [id-1 for id in meta.id.values]
wnid_to_id = tf.lookup.StaticHashTable(
initializer=tf.lookup.KeyValueTensorInitializer(
values=ids, keys=meta.wnid.values),
default_value=-1)
return wnid_to_id
def get_wnid(path):
file_name = tf.strings.split(path, sep=os.sep)[-1]
split_name = tf.strings.split(file_name, sep='_')
wnid = split_name[0]
return wnid
# TODO random mutations
# TODO test setting shape
def img_decode(img_bytes):
img = tf.io.decode_jpeg(img_bytes, channels=3)
img = tf.image.convert_image_dtype(img, 'float32')
img = tf.image.resize(img, (224, 224))
# TODO check if shape is greater than a certain amount, if so random crop,
# TODO else just resize
# TODO make sure mutations don't go above/below 0/1
return img
# TODO test running al lin parallel rather than stacking
def loader(path, meta_path):
files_ds = tf.data.Dataset.list_files(os.sep.join((path, "*")))
wnid_ds = files_ds.map(get_wnid, tf.data.AUTOTUNE)
wnid_to_id_hash = load_meta(meta_path)
ids_ds = wnid_ds.map(lambda x: wnid_to_id_hash.lookup(x), tf.data.AUTOTUNE) #.cache()
ds = tf.data.Dataset.zip((files_ds, ids_ds)) #.shuffle(len(files_ds), reshuffle_each_iteration=True)
ds = ds.cache()
ds = ds.shuffle(len(files_ds), reshuffle_each_iteration=True)
ds = ds.map(lambda x, y: (tf.io.read_file(x), y), tf.data.AUTOTUNE)
ds = ds.filter(lambda x, _: tf.io.is_jpeg(x))
ds = ds.map(lambda x, y: (img_decode(x), y), tf.data.AUTOTUNE)
return ds
|
[
"tensorflow.strings.split",
"pandas.read_csv",
"tensorflow.io.is_jpeg",
"tensorflow.io.decode_jpeg",
"tensorflow.data.Dataset.zip",
"os.sep.join",
"tensorflow.io.read_file",
"tensorflow.image.resize",
"tensorflow.lookup.KeyValueTensorInitializer",
"tensorflow.image.convert_image_dtype"
] |
[((188, 205), 'pandas.read_csv', 'pd.read_csv', (['path'], {}), '(path)\n', (199, 205), True, 'import pandas as pd\n'), ((540, 576), 'tensorflow.strings.split', 'tf.strings.split', (['file_name'], {'sep': '"""_"""'}), "(file_name, sep='_')\n", (556, 576), True, 'import tensorflow as tf\n'), ((707, 747), 'tensorflow.io.decode_jpeg', 'tf.io.decode_jpeg', (['img_bytes'], {'channels': '(3)'}), '(img_bytes, channels=3)\n', (724, 747), True, 'import tensorflow as tf\n'), ((758, 802), 'tensorflow.image.convert_image_dtype', 'tf.image.convert_image_dtype', (['img', '"""float32"""'], {}), "(img, 'float32')\n", (786, 802), True, 'import tensorflow as tf\n'), ((813, 845), 'tensorflow.image.resize', 'tf.image.resize', (['img', '(224, 224)'], {}), '(img, (224, 224))\n', (828, 845), True, 'import tensorflow as tf\n'), ((1379, 1418), 'tensorflow.data.Dataset.zip', 'tf.data.Dataset.zip', (['(files_ds, ids_ds)'], {}), '((files_ds, ids_ds))\n', (1398, 1418), True, 'import tensorflow as tf\n'), ((484, 518), 'tensorflow.strings.split', 'tf.strings.split', (['path'], {'sep': 'os.sep'}), '(path, sep=os.sep)\n', (500, 518), True, 'import tensorflow as tf\n'), ((1154, 1178), 'os.sep.join', 'os.sep.join', (["(path, '*')"], {}), "((path, '*'))\n", (1165, 1178), False, 'import os\n'), ((312, 382), 'tensorflow.lookup.KeyValueTensorInitializer', 'tf.lookup.KeyValueTensorInitializer', ([], {'values': 'ids', 'keys': 'meta.wnid.values'}), '(values=ids, keys=meta.wnid.values)\n', (347, 382), True, 'import tensorflow as tf\n'), ((1665, 1681), 'tensorflow.io.is_jpeg', 'tf.io.is_jpeg', (['x'], {}), '(x)\n', (1678, 1681), True, 'import tensorflow as tf\n'), ((1591, 1609), 'tensorflow.io.read_file', 'tf.io.read_file', (['x'], {}), '(x)\n', (1606, 1609), True, 'import tensorflow as tf\n')]
|
import config
from models import BlockModel
import datetime
import uuid
from utilities import AppContext
from anuvaad_auditor.loghandler import log_info, log_exception
import time
class FileContentRepositories:
def __init__(self):
self.blockModel = BlockModel()
def create_block_info(self, block, record_id, page_info, data_type, user_id, src_lang, tgt_lang):
new_block = {}
new_block['created_on'] = datetime.datetime.utcnow()
new_block['record_id'] = record_id
new_block['page_no'] = page_info['page_no']
new_block['data_type'] = data_type
new_block['job_id'] = record_id.split('|')[0]
new_block['created_by'] = user_id
new_block['src_lang'] = src_lang
new_block['tgt_lang'] = tgt_lang
'''
' generating block_identifier to uniquely identify individual block
'''
new_block['block_identifier'] = str(uuid.uuid4())
block['block_identifier'] = new_block['block_identifier']
new_block['data'] = block
new_block['data']['page_info'] = page_info
if 'tokenized_sentences' in block:
for elem in block['tokenized_sentences']:
if 'tgt' in elem:
elem['s0_tgt'] = elem['tgt']
elem['s0_src'] = elem['src']
if 'input_subwords' in elem:
del elem['input_subwords']
if 'output_subwords' in elem:
del elem['output_subwords']
if 'pred_score' in elem:
del elem['pred_score']
return new_block
def update_block_info(self, block, update_s0, modifiedSentences=None):
new_block = {}
new_block['data'] = block
# log_info("update_block_info payload {}".format(json.dumps(block)), AppContext.getContext())
if 'tokenized_sentences' in list(block.keys()):
for elem in block['tokenized_sentences']:
#case in which only the targeted setences are modified
if update_s0 and modifiedSentences != None and len(modifiedSentences) != 0:
if 's_id' in elem and elem['s_id'] in modifiedSentences:
if 'tgt' in elem:
elem['s0_tgt'] = elem['tgt']
elem['s0_src'] = elem['src']
# case in which entire block is updated/ updating source file
if update_s0 and (modifiedSentences == None or len(modifiedSentences) == 0) :
if 'tgt' in elem:
elem['s0_tgt'] = elem['tgt']
elem['s0_src'] = elem['src']
if 'input_subwords' in elem:
del elem['input_subwords']
if 'output_subwords' in elem:
del elem['output_subwords']
if 'pred_score' in elem:
del elem['pred_score']
log_info("updating new block for block_identifier {}".format(block['block_identifier']), AppContext.getContext())
return new_block
def store(self, user_id, file_locale, record_id, pages, src_lang, tgt_lang):
blocks = []
for page in pages:
page_info = {}
page_info['page_no'] = page['page_no']
page_info['page_width'] = page['page_width']
page_info['page_height'] = page['page_height']
try:
if 'images' in page and page['images'] != None:
for image in page['images']:
blocks.append(self.create_block_info(image, record_id, page_info, 'images', user_id, src_lang, tgt_lang))
except Exception as e:
AppContext.addRecordID(record_id)
log_exception('images key not present, thats strange:{}'.format(str(e)), AppContext.getContext(), e)
try:
if 'lines' in page and page['lines'] != None:
for line in page['lines']:
blocks.append(self.create_block_info(line, record_id, page_info, 'lines', user_id, src_lang, tgt_lang))
except Exception as e:
AppContext.addRecordID(record_id)
log_info('lines key is not present, ignorning further:{}'.format(str(e)), AppContext.getContext())
pass
try:
if 'text_blocks' in page and page['text_blocks'] != None:
for text in page['text_blocks']:
blocks.append(self.create_block_info(text, record_id, page_info, 'text_blocks', user_id, src_lang, tgt_lang))
except Exception as e:
AppContext.addRecordID(record_id)
log_exception('text_blocks key not present, thats strange:{}'.format(str(e)), AppContext.getContext(), e)
pass
if self.blockModel.store_bulk_blocks(blocks) == False:
return False
return True
def get(self, record_id, start_page=1, end_page=5):
total_page_count = self.blockModel.get_document_total_page_count(record_id)
if start_page == 0 and end_page == 0:
start_page = 1
end_page = total_page_count
if start_page == 0:
start_page = 1
if end_page == 0:
end_page = 5
if start_page > end_page:
return False
data = {}
data['pages'] = []
for i in range(start_page, end_page+1):
page_blocks = self.blockModel.get_blocks_by_page(record_id, i)
page = {}
for block in page_blocks:
page[block['_id']] = block['data']
if len(block['data']) > 0 :
page['page_height'] = block['data'][0]['page_info']['page_height']
page['page_no'] = block['data'][0]['page_info']['page_no']
page['page_width'] = block['data'][0]['page_info']['page_width']
data['pages'].append(page)
data['start_page'] = start_page
data['end_page'] = end_page
data['total'] = total_page_count
return data
def update(self, record_id,user_id, blocks, workflowCode, modifiedSentences=None):
updated_blocks = []
saved_blocks = []
update_s0 = False
'''
- workflowCode:
- WF_S_TR and WF_S_TKTR, changes the sentence structure hence s0 pair needs to be updated
- DP_WFLOW_S_C, doesn't changes the sentence structure hence no need to update the s0 pair
'''
if workflowCode is not None and (workflowCode == 'WF_S_TR' or workflowCode == 'WF_S_TKTR'):
update_s0 = True
log_info("FileContentUpdateRepo -workflowcode : {} | update_S0 : {}".format(workflowCode,update_s0),AppContext.getContext())
for block in blocks:
updated_blocks.append(self.update_block_info(block, update_s0, modifiedSentences))
if len(updated_blocks) > 0:
for updated_block in updated_blocks:
AppContext.addRecordID(record_id)
log_info("FileContentUpdateRepo -updating blocks", AppContext.getContext())
if self.blockModel.update_block(record_id,user_id, updated_block['data']['block_identifier'], updated_block) == False:
return False, saved_blocks
AppContext.addRecordID(record_id)
log_info("FileContentUpdateRepo -fetching back updated blocks", AppContext.getContext())
saved_block_results = self.blockModel.get_block_by_block_identifier(record_id,user_id, updated_block['data']['block_identifier'])
for saved_block in saved_block_results:
saved_blocks.append(saved_block['data'][0])
log_info("FileContentUpdateRepo -updated blocks : {}".format(str(saved_blocks)),AppContext.getContext())
return True, saved_blocks
def store_reference(self,records):
for record in records:
if record.get("job_id") == None and record.get("file_link") == None:
return False
if(self.blockModel.store_s3_link({"job_id":record["job_id"],"file_link":record["file_link"],"timestamp":eval(str(time.time()))})) == False:
return False
return True
def get_reference(self,records):
data=[]
for record in records:
result= self.blockModel.get_s3_link(record)
if not result:
return False
data.append(result)
return data
|
[
"models.BlockModel",
"uuid.uuid4",
"utilities.AppContext.getContext",
"time.time",
"datetime.datetime.utcnow",
"utilities.AppContext.addRecordID"
] |
[((266, 278), 'models.BlockModel', 'BlockModel', ([], {}), '()\n', (276, 278), False, 'from models import BlockModel\n'), ((461, 487), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (485, 487), False, 'import datetime\n'), ((992, 1004), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (1002, 1004), False, 'import uuid\n'), ((3203, 3226), 'utilities.AppContext.getContext', 'AppContext.getContext', ([], {}), '()\n', (3224, 3226), False, 'from utilities import AppContext\n'), ((7144, 7167), 'utilities.AppContext.getContext', 'AppContext.getContext', ([], {}), '()\n', (7165, 7167), False, 'from utilities import AppContext\n'), ((7403, 7436), 'utilities.AppContext.addRecordID', 'AppContext.addRecordID', (['record_id'], {}), '(record_id)\n', (7425, 7436), False, 'from utilities import AppContext\n'), ((7727, 7760), 'utilities.AppContext.addRecordID', 'AppContext.addRecordID', (['record_id'], {}), '(record_id)\n', (7749, 7760), False, 'from utilities import AppContext\n'), ((3920, 3953), 'utilities.AppContext.addRecordID', 'AppContext.addRecordID', (['record_id'], {}), '(record_id)\n', (3942, 3953), False, 'from utilities import AppContext\n'), ((4390, 4423), 'utilities.AppContext.addRecordID', 'AppContext.addRecordID', (['record_id'], {}), '(record_id)\n', (4412, 4423), False, 'from utilities import AppContext\n'), ((4919, 4952), 'utilities.AppContext.addRecordID', 'AppContext.addRecordID', (['record_id'], {}), '(record_id)\n', (4941, 4952), False, 'from utilities import AppContext\n'), ((7504, 7527), 'utilities.AppContext.getContext', 'AppContext.getContext', ([], {}), '()\n', (7525, 7527), False, 'from utilities import AppContext\n'), ((7841, 7864), 'utilities.AppContext.getContext', 'AppContext.getContext', ([], {}), '()\n', (7862, 7864), False, 'from utilities import AppContext\n'), ((8228, 8251), 'utilities.AppContext.getContext', 'AppContext.getContext', ([], {}), '()\n', (8249, 8251), False, 'from utilities import AppContext\n'), ((4043, 4066), 'utilities.AppContext.getContext', 'AppContext.getContext', ([], {}), '()\n', (4064, 4066), False, 'from utilities import AppContext\n'), ((4514, 4537), 'utilities.AppContext.getContext', 'AppContext.getContext', ([], {}), '()\n', (4535, 4537), False, 'from utilities import AppContext\n'), ((5047, 5070), 'utilities.AppContext.getContext', 'AppContext.getContext', ([], {}), '()\n', (5068, 5070), False, 'from utilities import AppContext\n'), ((8602, 8613), 'time.time', 'time.time', ([], {}), '()\n', (8611, 8613), False, 'import time\n')]
|
import os
from pathlib import Path
import pandas as pd
#######################
## folders ##
#######################
def get_result_dir():
folder = Path(Path.home(), 'fao_cropland_results')
folder.mkdir(parents=True, exist_ok=True)
return str(folder)
def get_tmp_dir():
folder = Path(Path.home(), 'fao_cropland_results','tmp')
folder.mkdir(parents=True, exist_ok=True)
return str(folder)
def get_data_dir():
folder = os.path.join(os.path.dirname(__file__), '..', 'data')
return folder
#####################
## variables ##
#####################
ecozones = {
10: 'Cropland, rainfed',
20: 'Cropland, irrigated or post-flooding',
30: 'Mosaic cropland/natural vegetation'
}
#####################
## tmp file ##
#####################
llc_2013_map = os.path.join(get_tmp_dir(), 'LCCS-2013-{}.tif')
#####################
## output file ##
#####################
crop_masked = os.path.join(get_result_dir(), 'crop_masked.tif')
llc_2013_map_masked = os.path.join(get_result_dir(), 'LCCS-2013-{}_masked.tif')
#####################
## input file ##
#####################
cropland_raster = os.path.join(get_data_dir(), 'spam2010V1r1_global_V_agg_VP_CR_AR_A.tif')
llc_2013_raster = os.path.join(get_data_dir(), 'ESACCI-LC-L4-LCCS-Map-300m-P1Y-2013-v2.0.7.tif')
llc_full = os.path.join(get_data_dir(), 'ESACCI-LC-L4-LCCS-Map-300m-P1Y-1992_2015-v2.0.7.tif')
####################
## dataframes ##
####################
country_list = pd.read_csv(os.path.join(os.path.dirname(__file__), '..', 'data', 'countries.csv'), sep=',')
|
[
"os.path.dirname",
"pathlib.Path.home"
] |
[((170, 181), 'pathlib.Path.home', 'Path.home', ([], {}), '()\n', (179, 181), False, 'from pathlib import Path\n'), ((314, 325), 'pathlib.Path.home', 'Path.home', ([], {}), '()\n', (323, 325), False, 'from pathlib import Path\n'), ((473, 498), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (488, 498), False, 'import os\n'), ((1532, 1557), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (1547, 1557), False, 'import os\n')]
|
import tkinter as tk
from tkinter import filedialog
from tkinter import *
from PIL import ImageTk ,Image
import numpy as np
from keras.models import load_model
#load the model
model = load_model('traffic_classifier.h5')
#defince the class labels in the dictionary
classes = { 1:'Speed limit (20km/h)',
2:'Speed limit (30km/h)',
3:'Speed limit (50km/h)',
4:'Speed limit (60km/h)',
5:'Speed limit (70km/h)',
6:'Speed limit (80km/h)',
7:'End of speed limit (80km/h)',
8:'Speed limit (100km/h)',
9:'Speed limit (120km/h)',
10:'No passing',
11:'No passing veh over 3.5 tons',
12:'Right-of-way at intersection',
13:'Priority road',
14:'Yield',
15:'Stop',
16:'No vehicles',
17:'Veh > 3.5 tons prohibited',
18:'No entry',
19:'General caution',
20:'Dangerous curve left',
21:'Dangerous curve right',
22:'Double curve',
23:'Bumpy road',
24:'Slippery road',
25:'Road narrows on the right',
26:'Road work',
27:'Traffic signals',
28:'Pedestrians',
29:'Children crossing',
30:'Bicycles crossing',
31:'Beware of ice/snow',
32:'Wild animals crossing',
33:'End speed + passing limits',
34:'Turn right ahead',
35:'Turn left ahead',
36:'Ahead only',
37:'Go straight or right',
38:'Go straight or left',
39:'Keep right',
40:'Keep left',
41:'Roundabout mandatory',
42:'End of no passing',
43:'End no passing veh > 3.5 tons' }
#initialize GUI
top = tk.Tk()
top.geometry('800x600')
top.title('Traffic Sign Classification')
top.configure(background = "#CDCDCD")
label = Label(top , background = "#CDCDCD" , font = ('arial' , 15, 'bold'))
sign_image = Label(top)
def classify(file_path):
global label_packed
image = Image.open(file_path)
image = image.resize((30 , 30) , Image.NEAREST)
image = np.expand_dims( image , axis = 0)
image = np.array(image)
pred = model.predict_classes([image])[0]
sign = classes[pred+1]
print(sign)
label.configure( foreground = "#011638" , text = sign)
def show_classify_button(file_path):
classify_b = Button( top , text = "Classsify Image" , command = lambda: classify(file_path), padx=10 , pady=5)
classify_b.configure(background= '#364156' , foreground = 'white', font= ('arial',10,'bold'))
classify_b.place(relx = 0.79 , rely = 0.46)
def upload_image():
try:
file_path = filedialog.askopenfilename()
uploaded = Image.open(file_path)
uploaded.thumbnail(((top.winfo_width()) , (top.winfo_height())))
im = ImageTk.PhotoImage(uploaded)
sign_image.configure(image = im)
sign_image.image = im
label.configure(text = '')
show_classify_button(file_path)
except:
pass
upload = Button(top , text = "Upload an Image" , command = upload_image , padx = 10 , pady = 5)
upload.configure( background = "#364156" , foreground = 'white' , font = ('arial' , 10, 'bold'))
upload.pack(side = BOTTOM , pady = 50)
sign_image.pack(side = BOTTOM , expand =True)
label.pack( side =BOTTOM ,expand =True)
heading = Label(top , text = 'Know Your Traffic Sign' , pady = 20 , font = ('arial', 20 ,'bold'))
heading.configure(background = "#CDCDCD", foreground = "#364156")
heading.pack()
top.mainloop()
|
[
"keras.models.load_model",
"PIL.ImageTk.PhotoImage",
"numpy.expand_dims",
"tkinter.filedialog.askopenfilename",
"PIL.Image.open",
"numpy.array",
"tkinter.Tk"
] |
[((185, 220), 'keras.models.load_model', 'load_model', (['"""traffic_classifier.h5"""'], {}), "('traffic_classifier.h5')\n", (195, 220), False, 'from keras.models import load_model\n'), ((1873, 1880), 'tkinter.Tk', 'tk.Tk', ([], {}), '()\n', (1878, 1880), True, 'import tkinter as tk\n'), ((2147, 2168), 'PIL.Image.open', 'Image.open', (['file_path'], {}), '(file_path)\n', (2157, 2168), False, 'from PIL import ImageTk, Image\n'), ((2233, 2262), 'numpy.expand_dims', 'np.expand_dims', (['image'], {'axis': '(0)'}), '(image, axis=0)\n', (2247, 2262), True, 'import numpy as np\n'), ((2279, 2294), 'numpy.array', 'np.array', (['image'], {}), '(image)\n', (2287, 2294), True, 'import numpy as np\n'), ((2791, 2819), 'tkinter.filedialog.askopenfilename', 'filedialog.askopenfilename', ([], {}), '()\n', (2817, 2819), False, 'from tkinter import filedialog\n'), ((2839, 2860), 'PIL.Image.open', 'Image.open', (['file_path'], {}), '(file_path)\n', (2849, 2860), False, 'from PIL import ImageTk, Image\n'), ((2948, 2976), 'PIL.ImageTk.PhotoImage', 'ImageTk.PhotoImage', (['uploaded'], {}), '(uploaded)\n', (2966, 2976), False, 'from PIL import ImageTk, Image\n')]
|
"""
Updated version of the MPyC Coroutine code file
A few alterations have been made to ensure that type hinting can be applied properly
"""
import functools
import sys
from asyncio import Future, Task
from typing import (
Any,
Callable,
Coroutine,
Generator,
Generic,
List,
Optional,
Type,
TypeVar,
Union,
get_type_hints,
)
from mpyc.asyncoro import ( # type: ignore # some classes or methods cannot be found because they; were defined as protected
__reconcile,
_ncopy,
_nested_list,
_ProgramCounterWrapper,
_reconcile,
_wrap_in_coro,
runtime,
)
from mpyc.sectypes import SecureFixedPoint, SecureObject
InnerType = Optional[SecureObject]
# Note that in principle, higher recursion can occur, but lists with a recursion level of higher
# than 2 are very unlikely
ReturnType = Union[InnerType, List[InnerType], List[List[InnerType]]]
SecureElement = TypeVar("SecureElement")
def mpc_coro_ignore(
func: Callable[..., Coroutine[SecureElement, None, SecureElement]]
) -> Callable[..., SecureElement]:
"""
A wrapper for an MPC coroutine that ensures that the behaviour of the code is unaffected by
the type annotations.
:param func: The async function to be wrapped
:return: A placeholder for which a result will automatically be set when the coroutine has
finished running
"""
return mpc_coro(func, apply_program_counter_wrapper=False, ignore_type_hints=True)
def mpc_coro(
func: Callable[..., Coroutine[SecureElement, None, SecureElement]],
apply_program_counter_wrapper: bool = True,
ignore_type_hints: bool = False,
) -> Callable[..., SecureElement]:
"""Decorator turning coroutine func into an MPyC coroutine.
An MPyC coroutine is evaluated asynchronously, returning empty placeholders.
The type of the placeholders is defined either by a return annotation
of the form "-> expression" or by the first await expression in func.
Return annotations can only be used for static types.
:param func: The async function to be wrapped
:param apply_program_counter_wrapper: A boolean value indicating whether a program counter
wrapper should be applied
:param ignore_type_hints: A boolean indicating whether type annotations should be used by the
code to deduce the type of the placeholder
:return: A placeholder for which a result will automatically be set when the coroutine has
finished running
"""
rettype = None if ignore_type_hints else get_type_hints(func).get("return")
@functools.wraps(func)
def typed_asyncoro(*args: Any, **kwargs: Any) -> SecureElement:
"""
This is the function that is returned when the mpc_coro wrapper is applied to an
async function. This function creates the async coroutine that was wrapped using the
positional arguments and keyword arguments and assigns the coroutine to a Task. A place-
holder of the correct type is returned by this function and the value of the placeholder is
substituted for the actual result when the Task has finished running the coroutine.
:param args: positional arguments for the async function being wrapped
:param kwargs: keyword arguments for the async function being wrapped
:return: A placeholder of the right return type
:raise Exception: This occurs when either the coroutine does not call returnType or another
exception is raised while trying to retrieve the right return type.
"""
runtime._pc_level += 1
coro = func(*args, **kwargs)
placeholder: SecureElement
if rettype:
placeholder = returnType_no_wrap(rettype)
else:
try:
# attempting to reach an await returnType(...) statement
placeholder = coro.send(None)
except StopIteration as exc:
# the coroutine returned a value, no returnType encountered
# the value is not the placeholder but the actual result
runtime._pc_level -= 1
return_value: SecureElement = exc.value
return return_value
except Exception:
runtime._pc_level -= 1
raise
# if this should not be done asynchronously, we exhaust the generator until we get a result
if runtime.options.no_async:
while True:
try:
coro.send(None)
except StopIteration as exc:
runtime._pc_level -= 1
if placeholder is not None:
__reconcile(placeholder, exc.value)
return placeholder
except Exception:
runtime._pc_level -= 1 # pylint: disable=W0212
raise
# we start a new Task that runs the coroutine and instruct it to replace the placeholder
# when the coroutine has finished
if apply_program_counter_wrapper:
coro = _wrap_in_coro(
_ProgramCounterWrapper(runtime, coro)
) # pylint: disable=W0212
# start the coroutine in a different task
task = Task(coro, loop=runtime._loop) # pylint: disable=W0212
# enclosing MPyC coroutine call
# noinspection PyUnresolvedReferences
# the method is protected, but we do need it, so the inspection tools will throw an error
task.f_back = sys._getframe(1) # type: ignore # pylint: disable=W0212
# make sure the placeholder is replaced after the coroutine is finished
task.add_done_callback(lambda t: _reconcile(placeholder, t))
placeholder_copy = _ncopy(placeholder)
return placeholder_copy
return typed_asyncoro
SomeType = TypeVar("SomeType")
class YieldAwaitable(
Generic[SomeType]
): # pylint: disable=R0903 # This is a redefinition of an mpyc method.
# The error message claims there are too few public methods for this class
"""
A class to be applied to values that need to be available to an outer function through the
send method.
"""
__slots__ = ["value"]
def __init__(self, value: SomeType) -> None:
self.value = value
def __await__(self) -> Generator[SomeType, None, None]:
"""
Trick to send the stored value to the outer MPyC coroutine wrapper when the
Awaitable is awaited. The yield statement is caught by the outer wrapper by calling
the send method of the coroutine it wraps around.
:return: A generator yielding the stored value
"""
yield self.value
def returnType( # type: ignore # This is redefinition of an mpyc method, so even though the name is
# not camel case, we chose to keep it
# pylint: disable=C0103,W9016,W9012 # Type annotations in overloaded methods
return_type,
*dimensions,
):
"""
Define return type for MPyC coroutines and expose it to send calls in an outer method.
It is used in first await expression in an MPyC coroutine. The YieldAwaitable assures that a
call to await returnType passes the return type to the outer mpc_coro wrapper.
:param return_type: The Class type of the object(s) to be returned
:param dimensions: arguments that describe the dimensions of the nested list to be returned. If
no dimensions are provided, a single placeholder is returned. If one or more dimension is
provided, it returns a nested list containing objects. The nesting is done according to the
dimensions provided.
:return: A placeholder or nested list of placeholders wrapped in a YieldAwaitable to expose the
placeholder to an outer wrapper/coroutine.
"""
return YieldAwaitable(returnType_no_wrap(return_type, *dimensions))
def returnType_no_wrap( # type: ignore # This is redefinition of an mpyc method,
# so even though the name is not camel case, we chose to keep it
# pylint: disable=C0103,W9016,W9012 # Type annotations in overloaded methods
return_type,
*dimensions,
):
"""
Define return type for MPyC coroutines.
:param return_type: The Class type of the placeholder
:param dimensions: arguments that describe the dimensions of the nested list to be returned.
If no dimensions are provided, a single placeholder is returned. If one or more dimension
is provided, it returns a nested list containing objects. The nesting is done according to the
dimensions provided.
:return: A placeholder or nested list of placeholders wrapped.
"""
dimension_list = list(dimensions)
if isinstance(return_type, type(None)):
return None
if isinstance(return_type, tuple):
secure_type, integral = return_type
if secure_type.frac_length:
# we now know that the return_type is a class constructor for a class that is a
# subclass of SecureFixedPoint
def return_placeholder() -> SecureFixedPoint:
"""
Quick workaround to allow a function that returns a placeholder
to be saved in a variable
:return: Secure fixed-point number
"""
return secure_type(None, integral) # type: ignore # we know that this must be a
# SecureFixedPoint, so the constructor can be called with 2 parameters
temp_return_type = return_placeholder
else:
temp_return_type = secure_type
# NOTE: This part is ignored for type hinting
elif issubclass(return_type, Future):
# pylint: disable=W0212
temp_return_type = lambda: return_type(loop=runtime._loop) # type: ignore # This returns a
# Future, but this is not understood by the inspection tools
# pylint: enable=W0212
else:
temp_return_type = return_type
if dimension_list:
# create a nested list of placeholders
return _nested_list(temp_return_type, dimension_list[0], dimension_list[1:])
temp_return_type_no_none: Union[
Type[SecureObject], Callable[..., SecureFixedPoint]
] = temp_return_type
return temp_return_type_no_none()
|
[
"asyncio.Task",
"mpyc.asyncoro.__reconcile",
"typing.get_type_hints",
"sys._getframe",
"mpyc.asyncoro._nested_list",
"mpyc.asyncoro._reconcile",
"functools.wraps",
"mpyc.asyncoro._ProgramCounterWrapper",
"typing.TypeVar",
"mpyc.asyncoro._ncopy"
] |
[((926, 950), 'typing.TypeVar', 'TypeVar', (['"""SecureElement"""'], {}), "('SecureElement')\n", (933, 950), False, 'from typing import Any, Callable, Coroutine, Generator, Generic, List, Optional, Type, TypeVar, Union, get_type_hints\n'), ((5843, 5862), 'typing.TypeVar', 'TypeVar', (['"""SomeType"""'], {}), "('SomeType')\n", (5850, 5862), False, 'from typing import Any, Callable, Coroutine, Generator, Generic, List, Optional, Type, TypeVar, Union, get_type_hints\n'), ((2579, 2600), 'functools.wraps', 'functools.wraps', (['func'], {}), '(func)\n', (2594, 2600), False, 'import functools\n'), ((5255, 5285), 'asyncio.Task', 'Task', (['coro'], {'loop': 'runtime._loop'}), '(coro, loop=runtime._loop)\n', (5259, 5285), False, 'from asyncio import Future, Task\n'), ((5517, 5533), 'sys._getframe', 'sys._getframe', (['(1)'], {}), '(1)\n', (5530, 5533), False, 'import sys\n'), ((5751, 5770), 'mpyc.asyncoro._ncopy', '_ncopy', (['placeholder'], {}), '(placeholder)\n', (5757, 5770), False, 'from mpyc.asyncoro import __reconcile, _ncopy, _nested_list, _ProgramCounterWrapper, _reconcile, _wrap_in_coro, runtime\n'), ((10028, 10097), 'mpyc.asyncoro._nested_list', '_nested_list', (['temp_return_type', 'dimension_list[0]', 'dimension_list[1:]'], {}), '(temp_return_type, dimension_list[0], dimension_list[1:])\n', (10040, 10097), False, 'from mpyc.asyncoro import __reconcile, _ncopy, _nested_list, _ProgramCounterWrapper, _reconcile, _wrap_in_coro, runtime\n'), ((2538, 2558), 'typing.get_type_hints', 'get_type_hints', (['func'], {}), '(func)\n', (2552, 2558), False, 'from typing import Any, Callable, Coroutine, Generator, Generic, List, Optional, Type, TypeVar, Union, get_type_hints\n'), ((5113, 5150), 'mpyc.asyncoro._ProgramCounterWrapper', '_ProgramCounterWrapper', (['runtime', 'coro'], {}), '(runtime, coro)\n', (5135, 5150), False, 'from mpyc.asyncoro import __reconcile, _ncopy, _nested_list, _ProgramCounterWrapper, _reconcile, _wrap_in_coro, runtime\n'), ((5696, 5722), 'mpyc.asyncoro._reconcile', '_reconcile', (['placeholder', 't'], {}), '(placeholder, t)\n', (5706, 5722), False, 'from mpyc.asyncoro import __reconcile, _ncopy, _nested_list, _ProgramCounterWrapper, _reconcile, _wrap_in_coro, runtime\n'), ((4677, 4712), 'mpyc.asyncoro.__reconcile', '__reconcile', (['placeholder', 'exc.value'], {}), '(placeholder, exc.value)\n', (4688, 4712), False, 'from mpyc.asyncoro import __reconcile, _ncopy, _nested_list, _ProgramCounterWrapper, _reconcile, _wrap_in_coro, runtime\n')]
|
from unittest import mock
import pytest
import mongomock
DB_TEST = mongomock.MongoClient().tests_solanches
@pytest.fixture(scope='session', autouse=True)
def teardown():
mock.patch('solanches.authenticate.jwt_required', lambda x: x).start()
mock.patch('solanches.connect2db.DB', DB_TEST).start()
mock.patch('solanches.models.DB', DB_TEST).start()
yield
@pytest.fixture
def db_test():
return DB_TEST
@pytest.fixture
def rest():
from solanches import rest
yield rest
@pytest.fixture
def controller():
from solanches import controller
yield controller
from solanches import models
models.DB.comercio.delete_many({})
models.DB.cardapio.delete_many({})
models.DB.produto.delete_many({})
models.DB.block_list.delete_many({})
@pytest.fixture
def models():
from solanches import models
yield models
models.DB.comercio.delete_many({})
models.DB.cardapio.delete_many({})
models.DB.produto.delete_many({})
models.DB.block_list.delete_many({})
|
[
"solanches.models.DB.produto.delete_many",
"solanches.models.DB.block_list.delete_many",
"solanches.models.DB.cardapio.delete_many",
"pytest.fixture",
"unittest.mock.patch",
"mongomock.MongoClient",
"solanches.models.DB.comercio.delete_many"
] |
[((112, 157), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""', 'autouse': '(True)'}), "(scope='session', autouse=True)\n", (126, 157), False, 'import pytest\n'), ((69, 92), 'mongomock.MongoClient', 'mongomock.MongoClient', ([], {}), '()\n', (90, 92), False, 'import mongomock\n'), ((632, 666), 'solanches.models.DB.comercio.delete_many', 'models.DB.comercio.delete_many', (['{}'], {}), '({})\n', (662, 666), False, 'from solanches import models\n'), ((671, 705), 'solanches.models.DB.cardapio.delete_many', 'models.DB.cardapio.delete_many', (['{}'], {}), '({})\n', (701, 705), False, 'from solanches import models\n'), ((710, 743), 'solanches.models.DB.produto.delete_many', 'models.DB.produto.delete_many', (['{}'], {}), '({})\n', (739, 743), False, 'from solanches import models\n'), ((748, 784), 'solanches.models.DB.block_list.delete_many', 'models.DB.block_list.delete_many', (['{}'], {}), '({})\n', (780, 784), False, 'from solanches import models\n'), ((871, 905), 'solanches.models.DB.comercio.delete_many', 'models.DB.comercio.delete_many', (['{}'], {}), '({})\n', (901, 905), False, 'from solanches import models\n'), ((910, 944), 'solanches.models.DB.cardapio.delete_many', 'models.DB.cardapio.delete_many', (['{}'], {}), '({})\n', (940, 944), False, 'from solanches import models\n'), ((949, 982), 'solanches.models.DB.produto.delete_many', 'models.DB.produto.delete_many', (['{}'], {}), '({})\n', (978, 982), False, 'from solanches import models\n'), ((987, 1023), 'solanches.models.DB.block_list.delete_many', 'models.DB.block_list.delete_many', (['{}'], {}), '({})\n', (1019, 1023), False, 'from solanches import models\n'), ((178, 240), 'unittest.mock.patch', 'mock.patch', (['"""solanches.authenticate.jwt_required"""', '(lambda x: x)'], {}), "('solanches.authenticate.jwt_required', lambda x: x)\n", (188, 240), False, 'from unittest import mock\n'), ((253, 299), 'unittest.mock.patch', 'mock.patch', (['"""solanches.connect2db.DB"""', 'DB_TEST'], {}), "('solanches.connect2db.DB', DB_TEST)\n", (263, 299), False, 'from unittest import mock\n'), ((312, 354), 'unittest.mock.patch', 'mock.patch', (['"""solanches.models.DB"""', 'DB_TEST'], {}), "('solanches.models.DB', DB_TEST)\n", (322, 354), False, 'from unittest import mock\n')]
|
from flask import request, render_template, flash, session, Markup, redirect, url_for
from signage_server_app import app
import json
import yaml
import os
curdir = os.path.dirname(os.path.realpath(__file__))
# HTML Snippets
snippets = dict()
for file in os.listdir(os.path.join(curdir, "templates", "snippets")):
if file.endswith(".html"):
with open(os.path.join(curdir, "templates", "snippets", file), 'r') as f:
snippets[os.path.splitext(file)[0]] = Markup(f.read())
# Only listen to these endpoints
endpoints = ["displays", "content", "playlists"]
# --- HTML Pages ---
@app.route("/")
@app.route("/displays", methods=["GET"])
def displays():
# Returns a page showing all active displays
return render_template("displays.html", **snippets)
@app.route("/<endpoint>/<int:item_id>", methods=["GET"])
def preview(endpoint, item_id):
# Returns a preview of an item based on the ID and the endpoint
if endpoint in endpoints:
endpoint = endpoint[:-1] if endpoint[-1] == "s" else endpoint
return render_template(f"{endpoint}.html", item_id=item_id, **snippets)
else:
return "Page not found", 404
@app.route("/admin/login", methods=["GET", "POST"])
def admin_login():
if request.method == "GET":
# Returns a login page
return render_template("admin_login.html", **snippets)
elif request.method == "POST":
# Check the password and allow user to proceed if correct
with open(os.path.join(curdir, "data", "credentials.yaml"), 'r') as f:
config = yaml.load(f.read(), Loader=yaml.SafeLoader)
if request.form['password'] == config['password']:
# Successful login
session['logged_in'] = True
endpoint = session.get("target_endpoint") if session.get("target_endpoint") else "displays"
return dict(status="SUCCESS", endpoint=f"/admin/{endpoint}")
else:
# Wrong password
return dict(status="FAILED", message="Invalid password.")
@app.route("/admin", methods=["GET"])
@app.route("/admin/<endpoint>", methods=["GET"])
def admin(endpoint="displays"):
# Returns an admin page for the appropriate endpoint
if not session.get("logged_in"):
session['target_endpoint'] = endpoint
return redirect(url_for("admin_login"))
elif endpoint in endpoints:
return render_template(f"admin_{endpoint}.html", **snippets)
else:
return "Page not found", 404
# --- API Calls ---
@app.route('/health')
def health():
# Generic health check for testing purposes
return json.dumps({'healthy': True})
|
[
"os.path.realpath",
"flask.session.get",
"json.dumps",
"flask.url_for",
"os.path.splitext",
"flask.render_template",
"signage_server_app.app.route",
"os.path.join"
] |
[((602, 616), 'signage_server_app.app.route', 'app.route', (['"""/"""'], {}), "('/')\n", (611, 616), False, 'from signage_server_app import app\n'), ((618, 657), 'signage_server_app.app.route', 'app.route', (['"""/displays"""'], {'methods': "['GET']"}), "('/displays', methods=['GET'])\n", (627, 657), False, 'from signage_server_app import app\n'), ((782, 837), 'signage_server_app.app.route', 'app.route', (['"""/<endpoint>/<int:item_id>"""'], {'methods': "['GET']"}), "('/<endpoint>/<int:item_id>', methods=['GET'])\n", (791, 837), False, 'from signage_server_app import app\n'), ((1168, 1218), 'signage_server_app.app.route', 'app.route', (['"""/admin/login"""'], {'methods': "['GET', 'POST']"}), "('/admin/login', methods=['GET', 'POST'])\n", (1177, 1218), False, 'from signage_server_app import app\n'), ((2033, 2069), 'signage_server_app.app.route', 'app.route', (['"""/admin"""'], {'methods': "['GET']"}), "('/admin', methods=['GET'])\n", (2042, 2069), False, 'from signage_server_app import app\n'), ((2071, 2118), 'signage_server_app.app.route', 'app.route', (['"""/admin/<endpoint>"""'], {'methods': "['GET']"}), "('/admin/<endpoint>', methods=['GET'])\n", (2080, 2118), False, 'from signage_server_app import app\n'), ((2511, 2531), 'signage_server_app.app.route', 'app.route', (['"""/health"""'], {}), "('/health')\n", (2520, 2531), False, 'from signage_server_app import app\n'), ((181, 207), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (197, 207), False, 'import os\n'), ((266, 311), 'os.path.join', 'os.path.join', (['curdir', '"""templates"""', '"""snippets"""'], {}), "(curdir, 'templates', 'snippets')\n", (278, 311), False, 'import os\n'), ((734, 778), 'flask.render_template', 'render_template', (['"""displays.html"""'], {}), "('displays.html', **snippets)\n", (749, 778), False, 'from flask import request, render_template, flash, session, Markup, redirect, url_for\n'), ((2605, 2634), 'json.dumps', 'json.dumps', (["{'healthy': True}"], {}), "({'healthy': True})\n", (2615, 2634), False, 'import json\n'), ((1053, 1117), 'flask.render_template', 'render_template', (['f"""{endpoint}.html"""'], {'item_id': 'item_id'}), "(f'{endpoint}.html', item_id=item_id, **snippets)\n", (1068, 1117), False, 'from flask import request, render_template, flash, session, Markup, redirect, url_for\n'), ((1316, 1363), 'flask.render_template', 'render_template', (['"""admin_login.html"""'], {}), "('admin_login.html', **snippets)\n", (1331, 1363), False, 'from flask import request, render_template, flash, session, Markup, redirect, url_for\n'), ((2219, 2243), 'flask.session.get', 'session.get', (['"""logged_in"""'], {}), "('logged_in')\n", (2230, 2243), False, 'from flask import request, render_template, flash, session, Markup, redirect, url_for\n'), ((2315, 2337), 'flask.url_for', 'url_for', (['"""admin_login"""'], {}), "('admin_login')\n", (2322, 2337), False, 'from flask import request, render_template, flash, session, Markup, redirect, url_for\n'), ((2386, 2439), 'flask.render_template', 'render_template', (['f"""admin_{endpoint}.html"""'], {}), "(f'admin_{endpoint}.html', **snippets)\n", (2401, 2439), False, 'from flask import request, render_template, flash, session, Markup, redirect, url_for\n'), ((363, 414), 'os.path.join', 'os.path.join', (['curdir', '"""templates"""', '"""snippets"""', 'file'], {}), "(curdir, 'templates', 'snippets', file)\n", (375, 414), False, 'import os\n'), ((448, 470), 'os.path.splitext', 'os.path.splitext', (['file'], {}), '(file)\n', (464, 470), False, 'import os\n'), ((1483, 1531), 'os.path.join', 'os.path.join', (['curdir', '"""data"""', '"""credentials.yaml"""'], {}), "(curdir, 'data', 'credentials.yaml')\n", (1495, 1531), False, 'import os\n'), ((1796, 1826), 'flask.session.get', 'session.get', (['"""target_endpoint"""'], {}), "('target_endpoint')\n", (1807, 1826), False, 'from flask import request, render_template, flash, session, Markup, redirect, url_for\n'), ((1762, 1792), 'flask.session.get', 'session.get', (['"""target_endpoint"""'], {}), "('target_endpoint')\n", (1773, 1792), False, 'from flask import request, render_template, flash, session, Markup, redirect, url_for\n')]
|
import sys, heapq
input = sys.stdin.readline
# constant
INF = 1234567
# function
def dijkstra(n, x, road):
visited = [False for _ in range(n + 1)]
time = [INF for _ in range(n + 1)]
time[x] = 0
h = [(INF, i) for i in range(1, n + 1)]
h.append((0, x))
heapq.heapify(h)
while h:
_, cur = heapq.heappop(h)
if visited[cur]: continue
visited[cur] = True
for next, t in road[cur]:
if time[cur] + t < time[next]:
time[next] = time[cur] + t
heapq.heappush(h, (time[next], next))
return time
# input
n, m, x = map(int, input().split())
road1 = [[] for _ in range(n + 1)]
road2 = [[] for _ in range(n + 1)]
for _ in range(m):
start, end, t = map(int, input().split())
road1[end].append((start, t))
road2[start].append((end, t))
# process
'''
다익스트라 알고리즘 두 번 사용.
1. 처음에 파티장에 모일 때:
출발지를 X번 마을로 하고 X번 마을에서 각 마을까지 최단거리 구함.
>> 길은 단방향이므로 입력받을 때 거꾸로 받아야 함.
2. 파티장에서 집으로 갈 때:
출발지를 X번 마을로 하고 X번 마을에서 각 마을까지 최단거리 구함.
'''
time1 = dijkstra(n, x, road1)
time2 = dijkstra(n, x, road2)
total_time = [time1[i] + time2[i] for i in range(1, n + 1)]
# output
print(max(total_time))
|
[
"heapq.heappush",
"heapq.heapify",
"heapq.heappop"
] |
[((261, 277), 'heapq.heapify', 'heapq.heapify', (['h'], {}), '(h)\n', (274, 277), False, 'import sys, heapq\n'), ((300, 316), 'heapq.heappop', 'heapq.heappop', (['h'], {}), '(h)\n', (313, 316), False, 'import sys, heapq\n'), ((466, 503), 'heapq.heappush', 'heapq.heappush', (['h', '(time[next], next)'], {}), '(h, (time[next], next))\n', (480, 503), False, 'import sys, heapq\n')]
|
import os
basedir = os.path.abspath(os.path.dirname(__file__))
WTF_CSRF_ENABLED = True
SECRET_KEY = '33stanlake#'
DEBUG = True
TESTING = True
LIVESERVER_PORT = 5000
APP_TITLE = 'Data Driven Simulation Management Database'
VERSION = '0.1-dev'
MONGODB_SETTINGS = {
'db': 'ddsm-integrate',
'host': 'localhost',
'port': 27017
}
|
[
"os.path.dirname"
] |
[((36, 61), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (51, 61), False, 'import os\n')]
|
#!/usr/bin/env python
'''
Autor: <NAME>
Licencia: MIT (Ver License)
Fecha: 5 de febrero de 2021
'''
import vlc
import time
import os
import re
import random
# Reproduccion de canciones
def playMusic():
for cancion in musica:
instance = vlc.Instance()
media = instance.media_new(cancion)
player = instance.media_player_new()
player.set_media(media)
#Start the parser
media.parse_with_options(1,0)
while True:
if str(media.get_parsed_status()) == 'MediaParsedStatus.done':
break
print('REPRODUCIENDO : '+cancion)
player.play()
time.sleep(media.get_duration()/1000)
player.stop()
# Reproduccion de videos
def playVid():
for vid_path in videos:
instance = vlc.Instance()
media = instance.media_new(vid_path)
player = instance.media_player_new()
player.set_media(media)
player.set_fullscreen(True)
#Start the parser
media.parse_with_options(1,0)
while True:
if str(media.get_parsed_status()) == 'MediaParsedStatus.done':
break
print('REPRODUCIENDO : '+vid_path)
player.play()
time.sleep(media.get_duration()/1000)
player.stop()
# Listas de archivos a reproducir
fotos=[]
musica=[]
videos=[]
# Extensiones reconocidas
extFotos = ['jpg']
extMusica = ['mp3', 'wav', 'ogg']
extVideos = ['mp4','mkv','avi']
# Busqueda de archivos reproducibles mediante expresiones regulares
for r,d,f in os.walk("/media/pi/"):
for files in f:
for patternM in extMusica:
if re.search(patternM,files):
musica.append(os.path.join(r,files).replace("._",""))
for patternV in extVideos:
if re.search(patternV, files):
videos.append(os.path.join(r,files).replace("._",""))
for patternF in extFotos:
if re.search(patternF, files):
fotos.append(os.path.join(r,files))
# Eliminar duplicados
aux=[]
for i in musica:
if (i not in aux):
aux.append(i)
musica = aux
aux=[]
for i in videos:
if (i not in aux):
aux.append(i)
videos = aux
aux=[]
for i in fotos:
if (i not in aux):
aux.append(i)
fotos = aux
# Crear listas aleatorias
if (len(musica) > 0):
random.shuffle(musica)
if (len(videos) > 0):
random.shuffle(videos)
if (len(fotos) > 0):
random.shuffle(fotos)
# Funcionamiento
if (len(videos)>0 and len(fotos)<=0 and len(musica)<=0):
playVid()
elif (len(musica)>0 and len(fotos)<=0 and len(videos)<=0):
playMusic()
else:
string = """
Escoja los medios que quiera reproducir:
1. Musica
2. Videos
"""
print(string)
ingresado = raw_input()
if(ingresado == "1"):
playMusic()
elif(ingresado == "2"):
playVid()
else:
print('Opcion invalida. Ejecute el programa nuevamente')
|
[
"random.shuffle",
"os.walk",
"re.search",
"vlc.Instance",
"os.path.join"
] |
[((1537, 1558), 'os.walk', 'os.walk', (['"""/media/pi/"""'], {}), "('/media/pi/')\n", (1544, 1558), False, 'import os\n'), ((2330, 2352), 'random.shuffle', 'random.shuffle', (['musica'], {}), '(musica)\n', (2344, 2352), False, 'import random\n'), ((2380, 2402), 'random.shuffle', 'random.shuffle', (['videos'], {}), '(videos)\n', (2394, 2402), False, 'import random\n'), ((2429, 2450), 'random.shuffle', 'random.shuffle', (['fotos'], {}), '(fotos)\n', (2443, 2450), False, 'import random\n'), ((250, 264), 'vlc.Instance', 'vlc.Instance', ([], {}), '()\n', (262, 264), False, 'import vlc\n'), ((788, 802), 'vlc.Instance', 'vlc.Instance', ([], {}), '()\n', (800, 802), False, 'import vlc\n'), ((1630, 1656), 're.search', 're.search', (['patternM', 'files'], {}), '(patternM, files)\n', (1639, 1656), False, 'import re\n'), ((1777, 1803), 're.search', 're.search', (['patternV', 'files'], {}), '(patternV, files)\n', (1786, 1803), False, 'import re\n'), ((1924, 1950), 're.search', 're.search', (['patternF', 'files'], {}), '(patternF, files)\n', (1933, 1950), False, 'import re\n'), ((1981, 2003), 'os.path.join', 'os.path.join', (['r', 'files'], {}), '(r, files)\n', (1993, 2003), False, 'import os\n'), ((1687, 1709), 'os.path.join', 'os.path.join', (['r', 'files'], {}), '(r, files)\n', (1699, 1709), False, 'import os\n'), ((1835, 1857), 'os.path.join', 'os.path.join', (['r', 'files'], {}), '(r, files)\n', (1847, 1857), False, 'import os\n')]
|
from groupon.services import list_alive_groupon_by_product_ids
from logs.services import create_product_log
from order.selectors import list_order_with_order_details_by_product_id
def list_order_with_order_details_by_product_id_interface(shop_id: int, product_id: int):
"""通过货品ID列出订单,带订单详情"""
order_list = list_order_with_order_details_by_product_id(shop_id, product_id)
return order_list
def list_alive_groupon_by_product_ids_interface(product_ids: list):
"""查询现在或者未来有拼团活动的商品ID"""
product_ids_set = list_alive_groupon_by_product_ids(product_ids)
return product_ids_set
def create_product_log_interface(log_info: dict):
"""创建一条货品模块日志"""
log = create_product_log(log_info)
return log
|
[
"logs.services.create_product_log",
"order.selectors.list_order_with_order_details_by_product_id",
"groupon.services.list_alive_groupon_by_product_ids"
] |
[((316, 380), 'order.selectors.list_order_with_order_details_by_product_id', 'list_order_with_order_details_by_product_id', (['shop_id', 'product_id'], {}), '(shop_id, product_id)\n', (359, 380), False, 'from order.selectors import list_order_with_order_details_by_product_id\n'), ((524, 570), 'groupon.services.list_alive_groupon_by_product_ids', 'list_alive_groupon_by_product_ids', (['product_ids'], {}), '(product_ids)\n', (557, 570), False, 'from groupon.services import list_alive_groupon_by_product_ids\n'), ((681, 709), 'logs.services.create_product_log', 'create_product_log', (['log_info'], {}), '(log_info)\n', (699, 709), False, 'from logs.services import create_product_log\n')]
|
import matplotlib.pyplot as plt
import numpy as np
from typing import Callable
import tensorflow as tf
from tensorflow import keras
from tensorflow.keras import datasets, layers, models
import skimage
from skimage.metrics import structural_similarity as ssim
from sklearn.model_selection import train_test_split
from deep_raman import utils
from deep_raman import metrics
import streamlit as st
def main(num_epochs: int, loss_function: Callable):
x = np.linspace(-200, 200, 1024)
X, y = utils.generate_training_set(x, num_base_examples=64)
X_train, X_test, y_train, y_test = train_test_split(
X, y, test_size=0.33, random_state=42
)
x_train = np.array(X_train).reshape(-1, 1024, 1)
x_test = np.array(X_test).reshape(-1, 1024, 1)
y_train = np.array(y_train).reshape(-1, 1024, 1)
y_test = np.array(y_test).reshape(-1, 1024, 1)
inputs = keras.Input(shape=(32 * 32, 1))
x = layers.BatchNormalization(axis=-1)(inputs)
x = layers.Conv1D(16, 16, input_shape=(32 * 32, 1))(inputs)
x = layers.MaxPooling1D(2)(x)
x = layers.Conv1D(16, 16, 16)(x)
x = layers.MaxPooling1D(3)(x)
x = layers.Conv1D(64, 10)(x)
outputs = layers.Conv1DTranspose(1, 1024)(x)
model = models.Model(inputs=inputs, outputs=outputs, name="cnn_model")
model.compile(
loss=loss_function,
optimizer=keras.optimizers.Nadam(learning_rate=3e-3),
metrics=["mae", "mape"],
)
history = model.fit(
x_train,
y_train,
batch_size=64,
epochs=num_epochs,
validation_split=0.2,
)
test_scores = model.evaluate(x_test, y_test, verbose=2)
st.write("Test loss:", test_scores[0])
st.write("Test accuracy:", test_scores[1])
sample_input, sample_prediction_, sample_target_ = (
x_train[0:1],
model.predict(x_train[0:1]),
y_train[0:1],
)
return sample_input, sample_prediction_, sample_target_
if __name__ == "__main__":
loss_options = {
"peak signal to noise ratio": metrics.psnr_loss,
"mean absolute error": keras.losses.mean_absolute_error,
"mean squared error": keras.losses.mean_squared_error,
}
NUM_EPOCHS = st.selectbox("Number of epochs", [10**i for i in range(0, 3)])
loss_choice = st.selectbox("Loss function", loss_options.keys())
LOSS_FUNCTION = loss_options[loss_choice]
sample_input, sample_prediction_, sample_target_ = main(NUM_EPOCHS, LOSS_FUNCTION)
fig = plt.figure(figsize=(12, 8))
plt.subplot(311)
plt.title("Sample Input")
plt.plot(sample_input.ravel())
plt.subplot(312)
plt.title("Sample Prediction")
plt.plot(sample_prediction_.ravel())
plt.subplot(313)
plt.title("Sample Target")
plt.plot(sample_target_.ravel())
fig.tight_layout()
fig # We call the fig so it will get picked up by streamlit magic.
# TODO: Visualize difference between train loss and test loss - something like tensorboard?
|
[
"matplotlib.pyplot.title",
"matplotlib.pyplot.subplot",
"tensorflow.keras.layers.BatchNormalization",
"deep_raman.utils.generate_training_set",
"sklearn.model_selection.train_test_split",
"tensorflow.keras.Input",
"tensorflow.keras.layers.Conv1D",
"tensorflow.keras.layers.MaxPooling1D",
"tensorflow.keras.layers.Conv1DTranspose",
"streamlit.write",
"tensorflow.keras.optimizers.Nadam",
"tensorflow.keras.models.Model",
"matplotlib.pyplot.figure",
"numpy.array",
"numpy.linspace"
] |
[((461, 489), 'numpy.linspace', 'np.linspace', (['(-200)', '(200)', '(1024)'], {}), '(-200, 200, 1024)\n', (472, 489), True, 'import numpy as np\n'), ((502, 554), 'deep_raman.utils.generate_training_set', 'utils.generate_training_set', (['x'], {'num_base_examples': '(64)'}), '(x, num_base_examples=64)\n', (529, 554), False, 'from deep_raman import utils\n'), ((595, 650), 'sklearn.model_selection.train_test_split', 'train_test_split', (['X', 'y'], {'test_size': '(0.33)', 'random_state': '(42)'}), '(X, y, test_size=0.33, random_state=42)\n', (611, 650), False, 'from sklearn.model_selection import train_test_split\n'), ((889, 920), 'tensorflow.keras.Input', 'keras.Input', ([], {'shape': '(32 * 32, 1)'}), '(shape=(32 * 32, 1))\n', (900, 920), False, 'from tensorflow import keras\n'), ((1238, 1300), 'tensorflow.keras.models.Model', 'models.Model', ([], {'inputs': 'inputs', 'outputs': 'outputs', 'name': '"""cnn_model"""'}), "(inputs=inputs, outputs=outputs, name='cnn_model')\n", (1250, 1300), False, 'from tensorflow.keras import datasets, layers, models\n'), ((1661, 1699), 'streamlit.write', 'st.write', (['"""Test loss:"""', 'test_scores[0]'], {}), "('Test loss:', test_scores[0])\n", (1669, 1699), True, 'import streamlit as st\n'), ((1704, 1746), 'streamlit.write', 'st.write', (['"""Test accuracy:"""', 'test_scores[1]'], {}), "('Test accuracy:', test_scores[1])\n", (1712, 1746), True, 'import streamlit as st\n'), ((2491, 2518), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(12, 8)'}), '(figsize=(12, 8))\n', (2501, 2518), True, 'import matplotlib.pyplot as plt\n'), ((2524, 2540), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(311)'], {}), '(311)\n', (2535, 2540), True, 'import matplotlib.pyplot as plt\n'), ((2545, 2570), 'matplotlib.pyplot.title', 'plt.title', (['"""Sample Input"""'], {}), "('Sample Input')\n", (2554, 2570), True, 'import matplotlib.pyplot as plt\n'), ((2611, 2627), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(312)'], {}), '(312)\n', (2622, 2627), True, 'import matplotlib.pyplot as plt\n'), ((2632, 2662), 'matplotlib.pyplot.title', 'plt.title', (['"""Sample Prediction"""'], {}), "('Sample Prediction')\n", (2641, 2662), True, 'import matplotlib.pyplot as plt\n'), ((2709, 2725), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(313)'], {}), '(313)\n', (2720, 2725), True, 'import matplotlib.pyplot as plt\n'), ((2730, 2756), 'matplotlib.pyplot.title', 'plt.title', (['"""Sample Target"""'], {}), "('Sample Target')\n", (2739, 2756), True, 'import matplotlib.pyplot as plt\n'), ((929, 963), 'tensorflow.keras.layers.BatchNormalization', 'layers.BatchNormalization', ([], {'axis': '(-1)'}), '(axis=-1)\n', (954, 963), False, 'from tensorflow.keras import datasets, layers, models\n'), ((981, 1028), 'tensorflow.keras.layers.Conv1D', 'layers.Conv1D', (['(16)', '(16)'], {'input_shape': '(32 * 32, 1)'}), '(16, 16, input_shape=(32 * 32, 1))\n', (994, 1028), False, 'from tensorflow.keras import datasets, layers, models\n'), ((1045, 1067), 'tensorflow.keras.layers.MaxPooling1D', 'layers.MaxPooling1D', (['(2)'], {}), '(2)\n', (1064, 1067), False, 'from tensorflow.keras import datasets, layers, models\n'), ((1079, 1104), 'tensorflow.keras.layers.Conv1D', 'layers.Conv1D', (['(16)', '(16)', '(16)'], {}), '(16, 16, 16)\n', (1092, 1104), False, 'from tensorflow.keras import datasets, layers, models\n'), ((1116, 1138), 'tensorflow.keras.layers.MaxPooling1D', 'layers.MaxPooling1D', (['(3)'], {}), '(3)\n', (1135, 1138), False, 'from tensorflow.keras import datasets, layers, models\n'), ((1151, 1172), 'tensorflow.keras.layers.Conv1D', 'layers.Conv1D', (['(64)', '(10)'], {}), '(64, 10)\n', (1164, 1172), False, 'from tensorflow.keras import datasets, layers, models\n'), ((1190, 1221), 'tensorflow.keras.layers.Conv1DTranspose', 'layers.Conv1DTranspose', (['(1)', '(1024)'], {}), '(1, 1024)\n', (1212, 1221), False, 'from tensorflow.keras import datasets, layers, models\n'), ((680, 697), 'numpy.array', 'np.array', (['X_train'], {}), '(X_train)\n', (688, 697), True, 'import numpy as np\n'), ((732, 748), 'numpy.array', 'np.array', (['X_test'], {}), '(X_test)\n', (740, 748), True, 'import numpy as np\n'), ((785, 802), 'numpy.array', 'np.array', (['y_train'], {}), '(y_train)\n', (793, 802), True, 'import numpy as np\n'), ((837, 853), 'numpy.array', 'np.array', (['y_test'], {}), '(y_test)\n', (845, 853), True, 'import numpy as np\n'), ((1367, 1410), 'tensorflow.keras.optimizers.Nadam', 'keras.optimizers.Nadam', ([], {'learning_rate': '(0.003)'}), '(learning_rate=0.003)\n', (1389, 1410), False, 'from tensorflow import keras\n')]
|
from flask import Response
from thupoll.models import Vote
from tests.factories import Factory
from tests.utils import marshall, get_past_datetime
def test__marshall(vote):
assert marshall(vote) == dict(
id=vote.id,
created=vote.created_date.isoformat(),
updated=vote.change_date.isoformat(),
people_id=vote.people_id,
pole_id=vote.themepoll.poll_id,
theme_id=vote.themepoll.theme_id,
)
def test__get_votes(db_session, client, user_headers, poll):
themepoll = Factory.themepoll(poll=poll)
Factory.vote(themepoll=themepoll)
r = client.get("/polls/{}/votes".format(poll.id), headers=user_headers)
assert r.status_code == 200, r.get_json()
assert r.get_json() == dict(results=marshall(poll))
def _post_votes(client, poll_id, themes, headers) -> Response:
return client.post(
"/polls/{}/votes".format(poll_id),
json=[dict(theme_id=theme.id) for theme in themes],
headers=headers,
)
def test__set_any_votes__correct(
db_session, client, peoplenamespace, user_headers, poll,
):
people_id = peoplenamespace.people_id
poll_id = poll.id
theme = Factory.themepoll(poll=poll).theme
r = _post_votes(
client=client, poll_id=poll_id, themes=[theme], headers=user_headers,
)
assert r.status_code == 200, r.get_json()
vote = r.get_json()["results"]["votes"][0]
assert vote["people_id"] == people_id
assert vote["pole_id"] == poll_id
assert vote["theme_id"] == theme.id
def test__set_any_votes__expire(
db_session, client, peoplenamespace, user_headers,
):
poll = Factory.poll(
expire_date=get_past_datetime(), namespace=peoplenamespace.namespace,
)
theme = Factory.themepoll(poll=poll).theme
r = _post_votes(
client=client, poll_id=poll.id, themes=[theme], headers=user_headers,
)
assert r.status_code == 422, r.get_json()
assert r.get_json() == {
"_schema": ["Datetime {} from past".format(poll.expire_date)],
}
def test__set_any_votes__not_themepoll(
db_session, client, peoplenamespace, user_headers, poll,
):
theme = Factory.themepoll().theme
r = _post_votes(
client=client, poll_id=poll.id, themes=[theme], headers=user_headers,
)
assert r.status_code == 422, r.get_json()
assert r.get_json() == {
"_schema": [
"ThemePoll with poll_id={} theme_id={} does not exists".format(
poll.id, theme.id,
),
],
}
def test__set_any_votes__two_theme(
db_session, client, peoplenamespace, user_headers, poll,
):
theme1 = Factory.themepoll(poll=poll).theme
theme2 = Factory.themepoll(poll=poll).theme
r = _post_votes(
client=client,
poll_id=poll.id,
themes=[theme1, theme2],
headers=user_headers,
)
assert r.status_code == 200, r.get_json()
assert len(r.get_json()["results"]["votes"]) == 2
def test__set_any_votes__empty_json(
db_session, client, peoplenamespace, user_headers, poll,
):
r = _post_votes(
client=client,
poll_id=poll.id,
themes=[],
headers=user_headers,
)
assert r.status_code == 422, r.get_json()
assert r.get_json() == {"_schema": ["Sequence 'themes' is empty"]}
def test__set_any_votes__drop_old(
db_session, client, peoplenamespace, user_headers, poll,
):
themepoll = Factory.themepoll(poll=poll)
Factory.vote(themepoll=themepoll, people=peoplenamespace.people)
r = client.delete(
"/polls/{}/votes".format(poll.id),
headers=user_headers,
)
assert r.status_code == 200, r.get_json()
assert not db_session.query(Vote).count()
|
[
"tests.factories.Factory.themepoll",
"tests.factories.Factory.vote",
"tests.utils.get_past_datetime",
"tests.utils.marshall"
] |
[((525, 553), 'tests.factories.Factory.themepoll', 'Factory.themepoll', ([], {'poll': 'poll'}), '(poll=poll)\n', (542, 553), False, 'from tests.factories import Factory\n'), ((558, 591), 'tests.factories.Factory.vote', 'Factory.vote', ([], {'themepoll': 'themepoll'}), '(themepoll=themepoll)\n', (570, 591), False, 'from tests.factories import Factory\n'), ((3424, 3452), 'tests.factories.Factory.themepoll', 'Factory.themepoll', ([], {'poll': 'poll'}), '(poll=poll)\n', (3441, 3452), False, 'from tests.factories import Factory\n'), ((3457, 3521), 'tests.factories.Factory.vote', 'Factory.vote', ([], {'themepoll': 'themepoll', 'people': 'peoplenamespace.people'}), '(themepoll=themepoll, people=peoplenamespace.people)\n', (3469, 3521), False, 'from tests.factories import Factory\n'), ((187, 201), 'tests.utils.marshall', 'marshall', (['vote'], {}), '(vote)\n', (195, 201), False, 'from tests.utils import marshall, get_past_datetime\n'), ((1170, 1198), 'tests.factories.Factory.themepoll', 'Factory.themepoll', ([], {'poll': 'poll'}), '(poll=poll)\n', (1187, 1198), False, 'from tests.factories import Factory\n'), ((1743, 1771), 'tests.factories.Factory.themepoll', 'Factory.themepoll', ([], {'poll': 'poll'}), '(poll=poll)\n', (1760, 1771), False, 'from tests.factories import Factory\n'), ((2154, 2173), 'tests.factories.Factory.themepoll', 'Factory.themepoll', ([], {}), '()\n', (2171, 2173), False, 'from tests.factories import Factory\n'), ((2640, 2668), 'tests.factories.Factory.themepoll', 'Factory.themepoll', ([], {'poll': 'poll'}), '(poll=poll)\n', (2657, 2668), False, 'from tests.factories import Factory\n'), ((2688, 2716), 'tests.factories.Factory.themepoll', 'Factory.themepoll', ([], {'poll': 'poll'}), '(poll=poll)\n', (2705, 2716), False, 'from tests.factories import Factory\n'), ((1667, 1686), 'tests.utils.get_past_datetime', 'get_past_datetime', ([], {}), '()\n', (1684, 1686), False, 'from tests.utils import marshall, get_past_datetime\n'), ((754, 768), 'tests.utils.marshall', 'marshall', (['poll'], {}), '(poll)\n', (762, 768), False, 'from tests.utils import marshall, get_past_datetime\n')]
|
import curses
from curses import textpad
import os
import threading
import traceback
import time
import client
import curses_util
class GUI():
def __init__(self, stdscr):
global client_obj
self.client_obj = client_obj
self.username = 'G'
self.password = 'password'
self.y = 10
self.msg_list = []
threading.Thread(target=self.receive_msg).start()
self.to = '$user$:G:'
self.msg = ''
self.last_msg = 'Nothing'
curses.noecho()
curses.curs_set(0)
#curses.mousemask(1) #do not show cursor on click (turn off for debugging)
curses.start_color()
self.stdscr = stdscr
self.stdscr.erase()
#self.stdscr.nodelay(1)
y, x = self.stdscr.getmaxyx()
x1 = round((x/12)*3)
x2 = x1 + round((x/12)*7)
self.tab_focus = 'dashboard'
self.draw_widgets(init = True)
self.main()
def draw_widgets(self, init=False):
self.stdscr.erase()
y, x = self.stdscr.getmaxyx()
print(y, x)
x1 = round((x/12)*3)
x2 = x1 + round((x/12)*7)
height_chats = round(y/2)
groups_height = y - height_chats
self.status_bar = self.stdscr.subwin(2, round((x/12)*7), 0, x1)
self.status_bar.box()
self.text_box_bar = self.stdscr.subwin(3, round((x/12)*7), round(y)-3, x1) #rows, columns, y, x
self.text_box_bar.box()
self.main_win = self.stdscr.subwin(round(y)-5, round((x/12)*7), 2, x1)
self.main_win.box()
try:
self.active_users_win = self.stdscr.subwin(round(y), round((x/12)*2), 0, x2)
except curses.error:
self.active_users_win = self.stdscr.subwin(round(y), round((x/12)*2), 0, x2-1)
self.active_users_win.box()
'''
Chats and Groups
'''
self.chats_win_chats = self.stdscr.subwin(height_chats, round((x/12)*3), 0, 0) #rows, columns, y, x
self.chats_win_chats.box()
self.chats_win_groups = self.stdscr.subwin(groups_height, round((x/12)*3), height_chats, 0) #rows, columns, y, x
self.chats_win_groups.box()
self.stdscr.addstr(0, 2, 'Chats')
self.stdscr.addstr(height_chats, 2, 'Groups')
if init == True:
self.clock = curses_util.Clock(self.stdscr, 0, round((x/12)*3 + 2))
self.tb = curses_util.Textbox(self.stdscr, round(y)-2, x1+1)
self.dashboard = curses_util.Scrollpad(self.stdscr, 1024*1024, round((x/12)*7)-2, uy=3, ux=(x1+1), dy=round(y)-6, dx=(x1+1)+round((x/12)*7)-3)
self.chats_dashboard = curses_util.AdvancedScrollpad(self.stdscr, 1024*1024, round((x/12)*3)-3, uy=2, ux=2, dy=height_chats-2, dx=round((x/12)*3)-3)
#self.chats_dashboard.load_file(os.getcwd() + '\\chats.txt')
self.dashboard.resize(lines=(1024*1024), columns=(round((x/12)*7)-2), uy=3, ux=(x1+1), dy=round(y)-6, dx=(x1+1)+round((x/12)*7)-3)
self.chats_dashboard.resize(lines=(1024*1024), columns=(round((x/12)*3)-3), uy=2, ux=2, dy=height_chats-2, dx=round((x/12)*3)-3)
textbox_text = self.tb.text
#print(textbox_text)
self.tb = None
self.tb = curses_util.Textbox(self.stdscr, round(y)-2, x1+1, text=textbox_text)
self.tb.rewrite(textbox_text)
'''
Status bar
'''
self.clock.redraw(self.stdscr, round((x/12)*3 + 2))
self.stdscr.refresh()
def receive_msg(self):
while True:
time.sleep(0.0001)
self.msg = self.client_obj.msg
#print(self.msg)
if self.msg != None and self.last_msg != self.msg:
print(self.msg)
self.last_msg = self.msg
print('Processing...')
user_list = self.msg.split('@', 1)
user = user_list[0]
msg_list = user_list[1].split(':', 1)
where = msg_list[0]
msg = msg_list[1]
y, x = self.stdscr.getmaxyx()
self.y += 2
self.x = round((x/12)*3)
print(user, msg)
'''
Format msg
'''
self.msg = self.msg.replace('\\n', '\n')
self.dashboard.add_text(str(user) + ': ', 4)
self.dashboard.add_text(str(msg), 2)
self.stdscr.refresh()
def main(self):
try:
self.draw_widgets()
while True:
time.sleep(0.0001)
key = self.stdscr.getch()
print(key)
if self.tab_focus == 'dashboard':
self.dashboard.input(key)
elif self.tab_focus == 'chats':
self.chats_dashboard.input(key)
elif self.tab_focus == 'groups':
pass
else:
self.tab_focus = 'dashboard'
#print('INPUT')
if key != curses.KEY_RESIZE:# and key != curses.KEY_UP and key != curses.KEY_DOWN:
if key == 351:
if self.tab_focus == 'dashboard':
self.tab_focus = 'chats'
elif self.tab_focus == 'chats':
self.tab_focus = 'groups'
elif self.tab_focus == 'groups':
self.tab_focus = 'dashboard'
else:
self.tab_focus = 'dashboard'
if key == curses.KEY_MOUSE:
pass
if key != curses.KEY_UP and key != curses.KEY_DOWN and key != curses.KEY_MOUSE and key != 351:
self.tb.key_input(key)
#self.stdscr.refresh()
#self.stdscr.doupdate()
#print(key) #for debugging
if key == 289:# or curses.KEY_F1: #for debugging
break
elif key == 8:
#self.draw_widgets()
pass
elif key == curses.KEY_ENTER or key == 10 or key == 13:
self.client_obj.to = self.to
self.client_obj.send(self.tb.text)
self.dashboard.add_text('$YOU$' + ': ', 6)
self.dashboard.add_text(str(self.tb.text), 3)
self.tb.empty()
else:
pass
else:
curses.resize_term(0, 0)
self.stdscr.erase()
self.draw_widgets()
#self.tb.key_input(key)
#self.msg = self.msg + chr(key)
#self.stdscr.addstr(30, len(self.msg), chr(key))
except Exception as e:
traceback.print_exc()
curses.endwin()
os._exit(1)
class Window():
def __init__(self, stdscr, height, width, y, x):
self.stdscr = stdscr
self.win = self.stdscr.subwin(height, width, y, x)
def redraw(self, height, width, y, x):
self.win.erase()
self.win = None
self.win = self.stdscr.subwin(height, width, y, x)
def main():
curses.wrapper(GUI)
def start(client_obj_):
global client_obj
client_obj = client_obj_
main()
|
[
"threading.Thread",
"traceback.print_exc",
"curses.resize_term",
"curses.noecho",
"curses.wrapper",
"curses.endwin",
"curses.start_color",
"time.sleep",
"os._exit",
"curses.curs_set"
] |
[((5978, 5997), 'curses.wrapper', 'curses.wrapper', (['GUI'], {}), '(GUI)\n', (5992, 5997), False, 'import curses\n'), ((466, 481), 'curses.noecho', 'curses.noecho', ([], {}), '()\n', (479, 481), False, 'import curses\n'), ((485, 503), 'curses.curs_set', 'curses.curs_set', (['(0)'], {}), '(0)\n', (500, 503), False, 'import curses\n'), ((585, 605), 'curses.start_color', 'curses.start_color', ([], {}), '()\n', (603, 605), False, 'import curses\n'), ((5646, 5661), 'curses.endwin', 'curses.endwin', ([], {}), '()\n', (5659, 5661), False, 'import curses\n'), ((5665, 5676), 'os._exit', 'os._exit', (['(1)'], {}), '(1)\n', (5673, 5676), False, 'import os\n'), ((3190, 3208), 'time.sleep', 'time.sleep', (['(0.0001)'], {}), '(0.0001)\n', (3200, 3208), False, 'import time\n'), ((338, 379), 'threading.Thread', 'threading.Thread', ([], {'target': 'self.receive_msg'}), '(target=self.receive_msg)\n', (354, 379), False, 'import threading\n'), ((3951, 3969), 'time.sleep', 'time.sleep', (['(0.0001)'], {}), '(0.0001)\n', (3961, 3969), False, 'import time\n'), ((5621, 5642), 'traceback.print_exc', 'traceback.print_exc', ([], {}), '()\n', (5640, 5642), False, 'import traceback\n'), ((5387, 5411), 'curses.resize_term', 'curses.resize_term', (['(0)', '(0)'], {}), '(0, 0)\n', (5405, 5411), False, 'import curses\n')]
|
import os
import nlp
import json
import random
import datetime
import tokenizers
import numpy as np
import transformers
import pandas as pd
import tensorflow as tf
import plotly.express as px
import matplotlib.pyplot as plt
from sklearn.utils import shuffle
def getUnbatchedDataset(trainDataSet, modelName, maxLength=64):
if type(trainDataSet) == list:
trainDataSet = {k: None for k in trainDataSet}
trainDataSet = {k: v for k, v in trainDataSet.items() if k in raw_ds_mapping}
tokenizer = transformers.AutoTokenizer.from_pretrained(modelName, use_fast=True)
# This is a list of generators
raw_datasets = [get_raw_dataset(x) for x in trainDataSet]
nb_examples = 0
labels = []
sentence_pairs = []
for name in trainDataSet:
raw_ds = get_raw_dataset(name)
nb_examples_to_use = raw_ds_mapping[name][2]
if trainDataSet[name]:
nb_examples_to_use = min(trainDataSet[name], nb_examples_to_use)
nb_examples += nb_examples_to_use
n = 0
for x in raw_ds:
sentence_pairs.append((x['premise'], x['hypothesis']))
labels.append(x['label'])
n += 1
if n >= nb_examples_to_use:
break
# `transformers.tokenization_utils_base.BatchEncoding` object -> `dict`
r = dict(tokenizer.batch_encode_plus(batch_text_or_text_pairs = sentence_pairs, max_length = maxLength, padding = 'max_length', truncation = True))
# This is very slow
dataset = tf.data.Dataset.from_tensor_slices((r, labels))
return dataset, nb_examples
def getBatchedTrainingDataset(dataset, nb_examples, batch_size = 16, shuffleBufferSize = 1, repeat = False):
if repeat:
dataset = dataset.repeat()
if not shuffle_buffer_size:
shuffle_buffer_size = nb_examples
dataset = dataset.shuffle(shuffle_buffer_size)
dataset = dataset.batch(batch_size, drop_remainder=True)
dataset = dataset.prefetch(tf.data.experimental.AUTOTUNE)
return dataset
def getPredictionDataset(dataset, batch_size = 16):
dataset = dataset.batch(batch_size, drop_remainder=False)
dataset = dataset.prefetch(tf.data.experimental.AUTOTUNE)
return dataset
|
[
"transformers.AutoTokenizer.from_pretrained",
"tensorflow.data.Dataset.from_tensor_slices"
] |
[((531, 599), 'transformers.AutoTokenizer.from_pretrained', 'transformers.AutoTokenizer.from_pretrained', (['modelName'], {'use_fast': '(True)'}), '(modelName, use_fast=True)\n', (573, 599), False, 'import transformers\n'), ((1578, 1625), 'tensorflow.data.Dataset.from_tensor_slices', 'tf.data.Dataset.from_tensor_slices', (['(r, labels)'], {}), '((r, labels))\n', (1612, 1625), True, 'import tensorflow as tf\n')]
|
#!pcsx2py
# This volatile module `monitor.00000000` may be re-loaded and loose all variables on some events:
# - Game startup (on _reloadElfInfo)
# - Resume, where it is suspended by pressing ESC key (on AppCoreThread::Resume)
# Not:
# - Resume, where it is suspended by System menu → Pause
# - Load game state
import pcsx2
pcsx2.WriteLn('Hello')
|
[
"pcsx2.WriteLn"
] |
[((338, 360), 'pcsx2.WriteLn', 'pcsx2.WriteLn', (['"""Hello"""'], {}), "('Hello')\n", (351, 360), False, 'import pcsx2\n')]
|
from itertools import count as memoryhog
list(memoryhog(0))
|
[
"itertools.count"
] |
[((49, 61), 'itertools.count', 'memoryhog', (['(0)'], {}), '(0)\n', (58, 61), True, 'from itertools import count as memoryhog\n')]
|
#!/usr/bin/env python3
import time
import zmq
import random
import sys
# Connect to the master port
print("Client socket waiting for connection...")
# Set desired master port number
masterPort = "5000"
#############################################################
# Function to request port number from master server
#############################################################
def getPort(nodeType):
# Set up request socket
context = zmq.Context()
socket = context.socket(zmq.REQ)
socket.connect("tcp://localhost:%s" % masterPort)
# Request to subscribe to channel 1
channel = 1
print("Sending request...")
socket.send_string("%d %s" % (channel, nodeType))
# Get the backend port number reply from the master
port = socket.recv_string()
print("Received Backend Port Number: ", port)
return port
# Start subscribing to the received port here
# Call the getPort function here
socketPort = getPort("SUBSCRIBER")
context = zmq.Context()
subscriber = context.socket(zmq.SUB)
subscriber.connect("tcp://localhost:%s" % socketPort)
# Subscribe to channel 1
topic = "1"
channel = bytes(topic,"ascii")
subscriber.setsockopt(zmq.SUBSCRIBE, channel)
print("Socket connecting to Forwarder backend port...")
while True:
# Keep receiving messages from publisher
message = subscriber.recv_string()
channel, messagedata = message.split()
print("Message Received: ", channel, messagedata)
|
[
"zmq.Context"
] |
[((957, 970), 'zmq.Context', 'zmq.Context', ([], {}), '()\n', (968, 970), False, 'import zmq\n'), ((446, 459), 'zmq.Context', 'zmq.Context', ([], {}), '()\n', (457, 459), False, 'import zmq\n')]
|
from django.db import models
# Create your models here.
class User(models.Model):
user = models.AutoField(primary_key=True)
name = models.TextField(max_length=128, default='', null=False)
login = models.TextField(max_length=128, default='', null=False)
password = models.TextField(max_length=128, default='', null=False)
email = models.TextField(max_length=128, default='', null=False)
class Role(models.Model):
role = models.AutoField(primary_key=True)
name = models.TextField(max_length=128, default='', null=False)
class Team(models.Model):
team = models.AutoField(primary_key=True)
name = models.TextField(max_length=128, null=False)
user = models.ForeignKey(User, on_delete=models.CASCADE)
pictureurl = models.TextField(max_length=128, default='', null=False)
class TeamUser(models.Model):
teamuser = models.AutoField(primary_key=True)
team = models.ForeignKey(Team, on_delete=False, default=None, null=True)
user = models.ForeignKey(User, on_delete=models.CASCADE)
role = models.BooleanField(default=None, null=True)
class Event(models.Model):
event = models.AutoField(primary_key=True)
name = models.TextField(max_length=128, default='', null=False)
pictureurl = models.TextField(max_length=128, default='', null=False)
description = models.TextField(max_length=10000, default='', null=False)
status = models.BooleanField(default=False, null=True)
class EventTeamUser(models.Model):
eventteamuser = models.AutoField(primary_key=True)
event = models.ForeignKey(Event, on_delete=models.CASCADE)
teamuser = models.ForeignKey(TeamUser, on_delete=models.CASCADE)
role = models.ForeignKey(Role, on_delete=models.CASCADE)
class Task(models.Model):
task = models.AutoField(primary_key=True)
name = models.TextField(max_length=128, default='', null=False)
category = models.TextField(max_length=128, default='', null=False)
weight = models.FloatField(max_length=128, default=0.0, null=False)
flag = models.TextField(max_length=128, default='', null=False)
description = models.TextField(max_length=10000, default='', null=False)
# Автор задания
user = models.ForeignKey(User, on_delete=models.CASCADE)
class EventTask(models.Model):
eventtask = models.AutoField(primary_key=True)
event = models.ForeignKey(Event, on_delete=models.CASCADE)
task = models.ForeignKey(Task, on_delete=models.CASCADE)
class Sponsor(models.Model):
sponsor = models.AutoField(primary_key=True)
name = models.TextField(max_length=128, null=False)
pictureurl = models.TextField(max_length=128, default='', null=False)
class EventSponsor(models.Model):
eventsponsor = models.AutoField(primary_key=True)
event = models.ForeignKey(Event, on_delete=models.CASCADE)
sponsor = models.ForeignKey(Sponsor, on_delete=models.CASCADE)
class Hint(models.Model):
hint = models.AutoField(primary_key=True)
task = models.ForeignKey(Task, on_delete=models.CASCADE)
description = models.TextField(max_length=2048, default='', null=False)
class Solution(models.Model):
solution = models.AutoField(primary_key=True)
teamuser = models.ForeignKey(TeamUser, on_delete=models.CASCADE)
eventtask = models.ForeignKey(EventTask, on_delete=models.CASCADE)
status = models.BooleanField(default=None, null=False)
date = models.TimeField(default=None, null=False)
|
[
"django.db.models.TextField",
"django.db.models.TimeField",
"django.db.models.ForeignKey",
"django.db.models.FloatField",
"django.db.models.BooleanField",
"django.db.models.AutoField"
] |
[((96, 130), 'django.db.models.AutoField', 'models.AutoField', ([], {'primary_key': '(True)'}), '(primary_key=True)\n', (112, 130), False, 'from django.db import models\n'), ((142, 198), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': '(128)', 'default': '""""""', 'null': '(False)'}), "(max_length=128, default='', null=False)\n", (158, 198), False, 'from django.db import models\n'), ((211, 267), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': '(128)', 'default': '""""""', 'null': '(False)'}), "(max_length=128, default='', null=False)\n", (227, 267), False, 'from django.db import models\n'), ((283, 339), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': '(128)', 'default': '""""""', 'null': '(False)'}), "(max_length=128, default='', null=False)\n", (299, 339), False, 'from django.db import models\n'), ((352, 408), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': '(128)', 'default': '""""""', 'null': '(False)'}), "(max_length=128, default='', null=False)\n", (368, 408), False, 'from django.db import models\n'), ((448, 482), 'django.db.models.AutoField', 'models.AutoField', ([], {'primary_key': '(True)'}), '(primary_key=True)\n', (464, 482), False, 'from django.db import models\n'), ((494, 550), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': '(128)', 'default': '""""""', 'null': '(False)'}), "(max_length=128, default='', null=False)\n", (510, 550), False, 'from django.db import models\n'), ((590, 624), 'django.db.models.AutoField', 'models.AutoField', ([], {'primary_key': '(True)'}), '(primary_key=True)\n', (606, 624), False, 'from django.db import models\n'), ((636, 680), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': '(128)', 'null': '(False)'}), '(max_length=128, null=False)\n', (652, 680), False, 'from django.db import models\n'), ((692, 741), 'django.db.models.ForeignKey', 'models.ForeignKey', (['User'], {'on_delete': 'models.CASCADE'}), '(User, on_delete=models.CASCADE)\n', (709, 741), False, 'from django.db import models\n'), ((759, 815), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': '(128)', 'default': '""""""', 'null': '(False)'}), "(max_length=128, default='', null=False)\n", (775, 815), False, 'from django.db import models\n'), ((863, 897), 'django.db.models.AutoField', 'models.AutoField', ([], {'primary_key': '(True)'}), '(primary_key=True)\n', (879, 897), False, 'from django.db import models\n'), ((909, 974), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Team'], {'on_delete': '(False)', 'default': 'None', 'null': '(True)'}), '(Team, on_delete=False, default=None, null=True)\n', (926, 974), False, 'from django.db import models\n'), ((986, 1035), 'django.db.models.ForeignKey', 'models.ForeignKey', (['User'], {'on_delete': 'models.CASCADE'}), '(User, on_delete=models.CASCADE)\n', (1003, 1035), False, 'from django.db import models\n'), ((1047, 1091), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': 'None', 'null': '(True)'}), '(default=None, null=True)\n', (1066, 1091), False, 'from django.db import models\n'), ((1133, 1167), 'django.db.models.AutoField', 'models.AutoField', ([], {'primary_key': '(True)'}), '(primary_key=True)\n', (1149, 1167), False, 'from django.db import models\n'), ((1179, 1235), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': '(128)', 'default': '""""""', 'null': '(False)'}), "(max_length=128, default='', null=False)\n", (1195, 1235), False, 'from django.db import models\n'), ((1253, 1309), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': '(128)', 'default': '""""""', 'null': '(False)'}), "(max_length=128, default='', null=False)\n", (1269, 1309), False, 'from django.db import models\n'), ((1328, 1386), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': '(10000)', 'default': '""""""', 'null': '(False)'}), "(max_length=10000, default='', null=False)\n", (1344, 1386), False, 'from django.db import models\n'), ((1400, 1445), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)', 'null': '(True)'}), '(default=False, null=True)\n', (1419, 1445), False, 'from django.db import models\n'), ((1503, 1537), 'django.db.models.AutoField', 'models.AutoField', ([], {'primary_key': '(True)'}), '(primary_key=True)\n', (1519, 1537), False, 'from django.db import models\n'), ((1550, 1600), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Event'], {'on_delete': 'models.CASCADE'}), '(Event, on_delete=models.CASCADE)\n', (1567, 1600), False, 'from django.db import models\n'), ((1616, 1669), 'django.db.models.ForeignKey', 'models.ForeignKey', (['TeamUser'], {'on_delete': 'models.CASCADE'}), '(TeamUser, on_delete=models.CASCADE)\n', (1633, 1669), False, 'from django.db import models\n'), ((1681, 1730), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Role'], {'on_delete': 'models.CASCADE'}), '(Role, on_delete=models.CASCADE)\n', (1698, 1730), False, 'from django.db import models\n'), ((1770, 1804), 'django.db.models.AutoField', 'models.AutoField', ([], {'primary_key': '(True)'}), '(primary_key=True)\n', (1786, 1804), False, 'from django.db import models\n'), ((1816, 1872), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': '(128)', 'default': '""""""', 'null': '(False)'}), "(max_length=128, default='', null=False)\n", (1832, 1872), False, 'from django.db import models\n'), ((1888, 1944), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': '(128)', 'default': '""""""', 'null': '(False)'}), "(max_length=128, default='', null=False)\n", (1904, 1944), False, 'from django.db import models\n'), ((1958, 2016), 'django.db.models.FloatField', 'models.FloatField', ([], {'max_length': '(128)', 'default': '(0.0)', 'null': '(False)'}), '(max_length=128, default=0.0, null=False)\n', (1975, 2016), False, 'from django.db import models\n'), ((2028, 2084), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': '(128)', 'default': '""""""', 'null': '(False)'}), "(max_length=128, default='', null=False)\n", (2044, 2084), False, 'from django.db import models\n'), ((2103, 2161), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': '(10000)', 'default': '""""""', 'null': '(False)'}), "(max_length=10000, default='', null=False)\n", (2119, 2161), False, 'from django.db import models\n'), ((2193, 2242), 'django.db.models.ForeignKey', 'models.ForeignKey', (['User'], {'on_delete': 'models.CASCADE'}), '(User, on_delete=models.CASCADE)\n', (2210, 2242), False, 'from django.db import models\n'), ((2292, 2326), 'django.db.models.AutoField', 'models.AutoField', ([], {'primary_key': '(True)'}), '(primary_key=True)\n', (2308, 2326), False, 'from django.db import models\n'), ((2339, 2389), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Event'], {'on_delete': 'models.CASCADE'}), '(Event, on_delete=models.CASCADE)\n', (2356, 2389), False, 'from django.db import models\n'), ((2401, 2450), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Task'], {'on_delete': 'models.CASCADE'}), '(Task, on_delete=models.CASCADE)\n', (2418, 2450), False, 'from django.db import models\n'), ((2496, 2530), 'django.db.models.AutoField', 'models.AutoField', ([], {'primary_key': '(True)'}), '(primary_key=True)\n', (2512, 2530), False, 'from django.db import models\n'), ((2542, 2586), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': '(128)', 'null': '(False)'}), '(max_length=128, null=False)\n', (2558, 2586), False, 'from django.db import models\n'), ((2604, 2660), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': '(128)', 'default': '""""""', 'null': '(False)'}), "(max_length=128, default='', null=False)\n", (2620, 2660), False, 'from django.db import models\n'), ((2716, 2750), 'django.db.models.AutoField', 'models.AutoField', ([], {'primary_key': '(True)'}), '(primary_key=True)\n', (2732, 2750), False, 'from django.db import models\n'), ((2763, 2813), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Event'], {'on_delete': 'models.CASCADE'}), '(Event, on_delete=models.CASCADE)\n', (2780, 2813), False, 'from django.db import models\n'), ((2828, 2880), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Sponsor'], {'on_delete': 'models.CASCADE'}), '(Sponsor, on_delete=models.CASCADE)\n', (2845, 2880), False, 'from django.db import models\n'), ((2920, 2954), 'django.db.models.AutoField', 'models.AutoField', ([], {'primary_key': '(True)'}), '(primary_key=True)\n', (2936, 2954), False, 'from django.db import models\n'), ((2966, 3015), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Task'], {'on_delete': 'models.CASCADE'}), '(Task, on_delete=models.CASCADE)\n', (2983, 3015), False, 'from django.db import models\n'), ((3034, 3091), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': '(2048)', 'default': '""""""', 'null': '(False)'}), "(max_length=2048, default='', null=False)\n", (3050, 3091), False, 'from django.db import models\n'), ((3139, 3173), 'django.db.models.AutoField', 'models.AutoField', ([], {'primary_key': '(True)'}), '(primary_key=True)\n', (3155, 3173), False, 'from django.db import models\n'), ((3189, 3242), 'django.db.models.ForeignKey', 'models.ForeignKey', (['TeamUser'], {'on_delete': 'models.CASCADE'}), '(TeamUser, on_delete=models.CASCADE)\n', (3206, 3242), False, 'from django.db import models\n'), ((3259, 3313), 'django.db.models.ForeignKey', 'models.ForeignKey', (['EventTask'], {'on_delete': 'models.CASCADE'}), '(EventTask, on_delete=models.CASCADE)\n', (3276, 3313), False, 'from django.db import models\n'), ((3327, 3372), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': 'None', 'null': '(False)'}), '(default=None, null=False)\n', (3346, 3372), False, 'from django.db import models\n'), ((3384, 3426), 'django.db.models.TimeField', 'models.TimeField', ([], {'default': 'None', 'null': '(False)'}), '(default=None, null=False)\n', (3400, 3426), False, 'from django.db import models\n')]
|
from django.http import HttpResponse
from django.shortcuts import render
from chatterbot import ChatBot
from chatterbot.trainers import ListTrainer
from chatterbot.trainers import ChatterBotCorpusTrainer
import json
my_bot = ChatBot('WeatherBot',
logic_adapters=['chatterbot.logic.MathematicalEvaluation',
'chatterbot.logic.BestMatch'],
# storage_adapter=['chatterbot.storage.SQLStorageAdapter']
)
def trainBotWithList(bot):
small_talk = ['hi there!',
'hi!',
'how do you do?',
'how are you?',
'i\'m cool.',
'fine, you?',
'always cool.',
'i\'m ok',
'glad to hear that.',
'i\'m fine',
'glad to hear that.',
'i feel awesome',
'excellent, glad to hear that.',
'not so good',
'sorry to hear that.',
'what\'s your name?',
'i\'m pybot. ask me a \n math question, please.']
math_talk_1 = ['pythagorean theorem',
'a squared plus b squared equals c squared.']
math_talk_2 = ['law of cosines',
'c**2 = a**2 + b**2 - 2 * a * b * cos(gamma)']
list_trainer = ListTrainer(my_bot)
for item in (small_talk, math_talk_1, math_talk_2):
list_trainer.train(item)
def trainBotWithCorpus(bot):
corpus_trainer = ChatterBotCorpusTrainer(bot)
corpus_trainer.train('chatterbot.corpus.english')
def trainBotWithCustom(bot):
with open('chatbot_qa.txt', 'r') as jfile:
qa_data= jfile.read()
qa_json = json.loads(qa_data)
train = []
for k, r in enumerate(qa_json):
train.append(r['question'])
train.append(r['answer'])
trainer = ListTrainer(bot)
trainer.train(train)
trainBotWithCustom(my_bot)
# trainBotWithList(my_bot)
# trainBotWithCorpus(my_bot)
def GetTrainedBotResponse(question):
return my_bot.get_response(question).text
|
[
"json.loads",
"chatterbot.trainers.ChatterBotCorpusTrainer",
"chatterbot.trainers.ListTrainer",
"chatterbot.ChatBot"
] |
[((227, 343), 'chatterbot.ChatBot', 'ChatBot', (['"""WeatherBot"""'], {'logic_adapters': "['chatterbot.logic.MathematicalEvaluation', 'chatterbot.logic.BestMatch']"}), "('WeatherBot', logic_adapters=[\n 'chatterbot.logic.MathematicalEvaluation', 'chatterbot.logic.BestMatch'])\n", (234, 343), False, 'from chatterbot import ChatBot\n'), ((1362, 1381), 'chatterbot.trainers.ListTrainer', 'ListTrainer', (['my_bot'], {}), '(my_bot)\n', (1373, 1381), False, 'from chatterbot.trainers import ListTrainer\n'), ((1523, 1551), 'chatterbot.trainers.ChatterBotCorpusTrainer', 'ChatterBotCorpusTrainer', (['bot'], {}), '(bot)\n', (1546, 1551), False, 'from chatterbot.trainers import ChatterBotCorpusTrainer\n'), ((1728, 1747), 'json.loads', 'json.loads', (['qa_data'], {}), '(qa_data)\n', (1738, 1747), False, 'import json\n'), ((1884, 1900), 'chatterbot.trainers.ListTrainer', 'ListTrainer', (['bot'], {}), '(bot)\n', (1895, 1900), False, 'from chatterbot.trainers import ListTrainer\n')]
|
import cv2
def calc_amount_of_area(gray_img, avg):
# 現在のフレームと移動平均との間の差を計算する
# accumulateWeighted関数の第三引数は「どれくらいの早さで以前の画像を忘れるか」。小さければ小さいほど「最新の画像」を重視する。
# http://opencv.jp/opencv-2svn/cpp/imgproc_motion_analysis_and_object_tracking.html
# 小さくしないと前のフレームの残像が残る
# 重みは蓄積し続ける。
cv2.accumulateWeighted(gray_img, avg, 0.1)
frameDelta = cv2.absdiff(gray_img, cv2.convertScaleAbs(avg))
# 閾値を設定し、フレームを2値化
thresh = cv2.threshold(frameDelta, 80, 255, cv2.THRESH_BINARY)[1]
# 全体の画素数
all_area_pixel = thresh.size
# 白部分と黒部分の画素数
white_area_pixel = cv2.countNonZero(thresh)
black_area_pixel = all_area_pixel - white_area_pixel
# 白色と黒色の割合
white_area = white_area_pixel / all_area_pixel * 100
black_area = black_area_pixel / all_area_pixel * 100
# それぞれの割合を表示
# print(f'White_Area = {white_area}%')
# print(f'Black_Area = {black_area}%\n')
return thresh, white_area
def main():
# キャプチャの設定
cam = cv2.VideoCapture(0)
cam.set(3, 640) # 横幅
cam.set(4, 480) # 高さ
avg = None
loop_count = 0
while(True):
ret, frame = cam.read()
# 開始直後はおかしな画像が入るので無視する
if loop_count <= 5:
loop_count += 1
continue
# グレースケールに変換
gray_img = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
# 前フレームを保存
if avg is None:
avg = gray_img.copy().astype("float")
continue
change_rate_img, change_rate = calc_amount_of_area(gray_img, avg)
print(f'Change_Rate = {change_rate}%')
# リアルタイムに差分領域を表示
cv2.imshow('th', change_rate_img)
# ESCキーで終了
k = cv2.waitKey(100) & 0xff
if k == 27:
break
# カメラの後始末
cv2.waitKey(0)
cam.release()
cv2.destroyAllWindows()
if __name__ == "__main__":
main()
|
[
"cv2.countNonZero",
"cv2.waitKey",
"cv2.accumulateWeighted",
"cv2.threshold",
"cv2.cvtColor",
"cv2.imshow",
"cv2.VideoCapture",
"cv2.convertScaleAbs",
"cv2.destroyAllWindows"
] |
[((295, 337), 'cv2.accumulateWeighted', 'cv2.accumulateWeighted', (['gray_img', 'avg', '(0.1)'], {}), '(gray_img, avg, 0.1)\n', (317, 337), False, 'import cv2\n'), ((584, 608), 'cv2.countNonZero', 'cv2.countNonZero', (['thresh'], {}), '(thresh)\n', (600, 608), False, 'import cv2\n'), ((972, 991), 'cv2.VideoCapture', 'cv2.VideoCapture', (['(0)'], {}), '(0)\n', (988, 991), False, 'import cv2\n'), ((1743, 1757), 'cv2.waitKey', 'cv2.waitKey', (['(0)'], {}), '(0)\n', (1754, 1757), False, 'import cv2\n'), ((1780, 1803), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (1801, 1803), False, 'import cv2\n'), ((377, 401), 'cv2.convertScaleAbs', 'cv2.convertScaleAbs', (['avg'], {}), '(avg)\n', (396, 401), False, 'import cv2\n'), ((439, 492), 'cv2.threshold', 'cv2.threshold', (['frameDelta', '(80)', '(255)', 'cv2.THRESH_BINARY'], {}), '(frameDelta, 80, 255, cv2.THRESH_BINARY)\n', (452, 492), False, 'import cv2\n'), ((1277, 1316), 'cv2.cvtColor', 'cv2.cvtColor', (['frame', 'cv2.COLOR_BGR2GRAY'], {}), '(frame, cv2.COLOR_BGR2GRAY)\n', (1289, 1316), False, 'import cv2\n'), ((1596, 1629), 'cv2.imshow', 'cv2.imshow', (['"""th"""', 'change_rate_img'], {}), "('th', change_rate_img)\n", (1606, 1629), False, 'import cv2\n'), ((1662, 1678), 'cv2.waitKey', 'cv2.waitKey', (['(100)'], {}), '(100)\n', (1673, 1678), False, 'import cv2\n')]
|
import logging
from typing import List, Tuple
import requests
from dateutil.parser import parse
from django.utils.timezone import now
from lxml.html import document_fromstring
from obj_update import obj_update_or_create
from .models import BandC, Meeting, Document
from . import scrape_logger
# CONSTANTS
MEETING_DATE = "bcic_mtgdate"
MEETING_TITLE = "bcic_mtgtype"
DOCUMENT = "bcic_doc"
logger = logging.getLogger(__name__)
def populate_bandc_list():
"""
Populate the BandC table.
"""
response = requests.get(
"https://www.austintexas.gov/department/boards-and-commissions"
)
assert response.ok
doc = document_fromstring(response.text)
for option in doc.xpath('//form[@id="bc_form"]' '//select[@name="board"]/option'):
name = option.text
path = option.values()[0]
url = f"https://www.austintexas.gov{path}"
slug = path.split("/")[-1]
bandc, created = BandC.objects.get_or_create(
name=name.strip(),
slug=slug,
homepage=url,
)
logger.info("Found %s. Created? %s", bandc, created)
class MeetingCancelled(Exception):
pass
def parse_date(string):
"""
Turn a date row into a datetime.date instance.
"""
if "cancel" in string.lower():
raise MeetingCancelled("Meeting Cancelled")
return parse(string).date()
def clean_text(text):
return text.lstrip("- ")
def process_page(html: str) -> Tuple[List, List]:
"""
Transform the raw html into semi-structured data.
Returns
-------
tuple (dict, dict)
Returns all the meeting data, and all the documents found.
"""
doc = document_fromstring(html)
date = None
meeting_data = []
doc_data = []
# WISHLIST do two-pass to group into meetings then parse contents
for row in doc.xpath('//div[@id="bcic"]/h5'):
row_class = row.attrib["class"] # assume each has only one css class
if row_class == MEETING_DATE:
try:
date = parse_date(row.text)
except MeetingCancelled:
date = None
elif date and row_class == MEETING_TITLE:
# XXX assume all meeting date rows are followed by meeting title
meeting_data.append({"date": date, "title": row.text_content()})
elif date and row_class == DOCUMENT:
row_type = row.xpath("./a/b/text()")[0]
url = row.xpath("./a/@href")[0]
title = clean_text("".join(row.xpath("./text()")).strip())
doc_data.append(
{"date": date, "type": row_type, "url": url, "title": title}
)
return meeting_data, doc_data
def _save_page(meeting_data, doc_data, bandc: BandC) -> bool:
"""
Save one page worth of data, updating BandC, creating Meetings, and Documents.
Returns
-------
True if there's another page to process (always False for now)
"""
logger.info("save_page %s", bandc)
if not meeting_data:
return False
# Populate meetings
meetings = {}
for row in meeting_data:
meeting, created = obj_update_or_create(
Meeting, bandc=bandc, date=row["date"], defaults={"title": row["title"]}
)
scrape_logger.log_meeting(meeting, created)
meetings[row["date"]] = {
"meeting": meeting,
"docs": set(meeting.documents.values_list("url", flat=True)),
}
if not bandc.latest_meeting or bandc.latest_meeting.date < row["date"]:
bandc.latest_meeting = meeting
bandc.save()
# Populate documents
for row in doc_data:
defaults = dict(title=row["title"], type=row["type"])
if "/edims/document.cfm" in row["url"]:
defaults["edims_id"] = row["url"].rsplit("=", 2)[-1]
doc, created = Document.objects.get_or_create(
url=row["url"],
meeting=meetings[row["date"]]["meeting"],
defaults=defaults,
)
scrape_logger.log_document(doc, created)
if not created:
try:
meetings[row["date"]]["docs"].remove(row["url"])
except KeyError:
pass
if doc.scrape_status == "toscrape":
doc.refresh()
# Look for stale documents
stale_documents: List[str] = []
for meeting in meetings.values():
stale_documents.extend(meeting["docs"])
# Deal with stale documents
if stale_documents:
print("These docs are stale:", stale_documents)
Document.objects.filter(url__in=stale_documents).update(active=False)
return False # TODO
def get_number_of_pages(html):
doc = document_fromstring(html)
last_page_link = doc.xpath('(//a[@class="bcic_nav"])[last()]/text()')
if not last_page_link:
return 1
return int(last_page_link[0].strip())
def pull_bandc(bandc: BandC) -> None:
"""
Get info about all the meetings for the most recent year.
"""
headers = {
# TODO pull version from VERSION
"User-Agent": "atx_bandc/v0.2.0 https://github.com/crccheck/atx-bandc",
}
page_number = 1
bandc.scraped_at = now()
bandc.save()
scrape_logger.log_bandc(bandc)
process_next = True
while process_next:
response = requests.get(
bandc.current_meeting_url_format(page_number), headers=headers
)
if not response.ok:
scrape_logger.error(f"Response {response.status_code}")
continue
n_pages = get_number_of_pages(response.text) # TODO only do this once
meeting_data, doc_data = process_page(response.text)
page_number += 1
should_process_next = _save_page(meeting_data, doc_data, bandc=bandc)
process_next = should_process_next and page_number <= n_pages
|
[
"dateutil.parser.parse",
"django.utils.timezone.now",
"obj_update.obj_update_or_create",
"lxml.html.document_fromstring",
"requests.get",
"logging.getLogger"
] |
[((404, 431), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (421, 431), False, 'import logging\n'), ((522, 599), 'requests.get', 'requests.get', (['"""https://www.austintexas.gov/department/boards-and-commissions"""'], {}), "('https://www.austintexas.gov/department/boards-and-commissions')\n", (534, 599), False, 'import requests\n'), ((647, 681), 'lxml.html.document_fromstring', 'document_fromstring', (['response.text'], {}), '(response.text)\n', (666, 681), False, 'from lxml.html import document_fromstring\n'), ((1682, 1707), 'lxml.html.document_fromstring', 'document_fromstring', (['html'], {}), '(html)\n', (1701, 1707), False, 'from lxml.html import document_fromstring\n'), ((4689, 4714), 'lxml.html.document_fromstring', 'document_fromstring', (['html'], {}), '(html)\n', (4708, 4714), False, 'from lxml.html import document_fromstring\n'), ((5180, 5185), 'django.utils.timezone.now', 'now', ([], {}), '()\n', (5183, 5185), False, 'from django.utils.timezone import now\n'), ((3140, 3239), 'obj_update.obj_update_or_create', 'obj_update_or_create', (['Meeting'], {'bandc': 'bandc', 'date': "row['date']", 'defaults': "{'title': row['title']}"}), "(Meeting, bandc=bandc, date=row['date'], defaults={\n 'title': row['title']})\n", (3160, 3239), False, 'from obj_update import obj_update_or_create\n'), ((1361, 1374), 'dateutil.parser.parse', 'parse', (['string'], {}), '(string)\n', (1366, 1374), False, 'from dateutil.parser import parse\n')]
|
import discord
from discord.ext import commands
import todoist
import datetime
class TodoistCog:
def __init__(self, bot):
self.bot = bot
global debuglv
debuglv = 0
@commands.command(name='todoist',
description="runs some API tests on todoist",
brief="Tests todoist API",
aliases=['todo'])
@commands.guild_only()
async def todoist_test(self,ctx):
APIkey = ''
try:
api = todoist.TodoistAPI(APIkey)
api.sync()
full_name = api.state['user']['full_name']
await ctx.send(full_name)
for project in api.state['projects']:
await ctx.send(project['name'])
except Exception as e:
await self.log_error(ctx,e)
async def log_error(self,ctx,e):
global debuglv
try:
if debuglv >0:
await ctx.send(traceback.format_exc())
now = datetime.datetime.now()
print('**`ERROR:`**'+ str(type(e).__name__) +'-'+ str(e))
await ctx.send('**`ERROR:`**'+ str(type(e).__name__) +' - '+ str(e))
logging.ERROR(str(now) + ': '+ str(type(e).__name__) +' - '+ str(e)+'.\r\n')
except Exception as e:
await ctx.send('**`ERROR:`**'+ str(type(e).__name__) +'-'+ str(e))
# The setup function below is neccesarry. Remember we give bot.add_cog() the name of the class in this case MembersCog.
# When we load the cog, we use the name of the file.
def setup(bot):
bot.add_cog(TodoistCog(bot))
|
[
"discord.ext.commands.guild_only",
"discord.ext.commands.command",
"todoist.TodoistAPI",
"datetime.datetime.now"
] |
[((206, 339), 'discord.ext.commands.command', 'commands.command', ([], {'name': '"""todoist"""', 'description': '"""runs some API tests on todoist"""', 'brief': '"""Tests todoist API"""', 'aliases': "['todo']"}), "(name='todoist', description=\n 'runs some API tests on todoist', brief='Tests todoist API', aliases=[\n 'todo'])\n", (222, 339), False, 'from discord.ext import commands\n'), ((387, 408), 'discord.ext.commands.guild_only', 'commands.guild_only', ([], {}), '()\n', (406, 408), False, 'from discord.ext import commands\n'), ((502, 528), 'todoist.TodoistAPI', 'todoist.TodoistAPI', (['APIkey'], {}), '(APIkey)\n', (520, 528), False, 'import todoist\n'), ((1008, 1031), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (1029, 1031), False, 'import datetime\n')]
|
"""
mbed CMSIS-DAP debugger
Copyright (c) 2016 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from .provider import (TargetThread, ThreadProvider)
from ..debug.context import DebugContext
from ..coresight.cortex_m import (CORE_REGISTER, register_name_to_index)
from pyOCD.pyDAPAccess import DAPAccess
import logging
LIST_NODE_NEXT_OFFSET = 0
LIST_NODE_OBJ_OFFSET= 8
## @brief Reads a null-terminated C string from the target.
def read_c_string(context, ptr):
if ptr == 0:
return ""
s = ""
done = False
count = 0
badCount = 0
try:
while not done and count < 256:
data = context.readBlockMemoryUnaligned8(ptr, 16)
ptr += 16
count += 16
for c in data:
if c == 0:
done = True
break
elif c > 127:
# Replace non-ASCII characters. If there is a run of invalid characters longer
# than 4, then terminate the string early.
badCount += 1
if badCount > 4:
done = True
break
s += '?'
else:
s += chr(c)
badCount = 0
except DAPAccess.TransferError:
logging.debug("TransferError while trying to read 16 bytes at 0x%08x", ptr)
return s
## @brief Standard Cortex-M register stacking context.
class CommonThreadContext(DebugContext):
# SP is handled specially, so it is not in this dict.
CORE_REGISTER_OFFSETS = {
0: 32, # r0
1: 36, # r1
2: 40, # r2
3: 44, # r3
4: 0, # r4
5: 4, # r5
6: 8, # r6
7: 12, # r7
8: 16, # r8
9: 20, # r9
10: 24, # r10
11: 28, # r11
12: 48, # r12
14: 52, # lr
15: 56, # pc
16: 60, # xpsr
}
def __init__(self, parentContext, thread):
super(CommonThreadContext, self).__init__(parentContext.core)
self._parent = parentContext
self._thread = thread
def readCoreRegistersRaw(self, reg_list):
reg_list = [register_name_to_index(reg) for reg in reg_list]
reg_vals = []
inException = self._get_ipsr() > 0
isCurrent = self._is_current()
sp = self._get_stack_pointer()
saveSp = sp
if not isCurrent:
sp -= 0x40
elif inException:
sp -= 0x20
for reg in reg_list:
if isCurrent:
if not inException:
# Not in an exception, so just read the live register.
reg_vals.append(self._core.readCoreRegisterRaw(reg))
continue
else:
# Check for regs we can't access.
if reg in (4, 5, 6, 7, 8, 9, 10, 11):
reg_vals.append(0)
continue
# Must handle stack pointer specially.
if reg == 13:
reg_vals.append(saveSp)
continue
spOffset = self.CORE_REGISTER_OFFSETS.get(reg, None)
if spOffset is None:
reg_vals.append(self._core.readCoreRegisterRaw(reg))
continue
if isCurrent and inException:
spOffset -= 0x20
try:
reg_vals.append(self._core.read32(sp + spOffset))
except DAPAccess.TransferError:
reg_vals.append(0)
return reg_vals
def _get_stack_pointer(self):
sp = 0
if self._is_current():
# Read live process stack.
sp = self._core.readCoreRegister('sp')
# In IRQ context, we have to adjust for hw saved state.
if self._get_ipsr() > 0:
sp += 0x20
else:
# Get stack pointer saved in thread struct.
sp = self._core.read32(self._thread._base + THREAD_STACK_POINTER_OFFSET)
# Skip saved thread state.
sp += 0x40
return sp
def _get_ipsr(self):
return self._core.readCoreRegister('xpsr') & 0xff
def _has_extended_frame(self):
return False
def _is_current(self):
return self._thread.is_current
def writeCoreRegistersRaw(self, reg_list, data_list):
self._core.writeCoreRegistersRaw(reg_list, data_list)
## @brief Class representing the handler mode.
class HandlerModeThread(TargetThread):
def __init__(self, targetContext, provider):
super(HandlerModeThread, self).__init__()
self._target_context = targetContext
self._provider = provider
def get_stack_pointer(self):
return self._target_context.readCoreRegister('msp')
@property
def priority(self):
return 0
@property
def unique_id(self):
return 2
@property
def name(self):
return "Handler mode"
@property
def description(self):
return ""
@property
def is_current(self):
return self._provider.get_ipsr() > 0
@property
def context(self):
return self._target_context
def __str__(self):
return "<HandlerModeThread@0x%08x>" % (id(self))
def __repr__(self):
return str(self)
|
[
"logging.debug"
] |
[((1821, 1896), 'logging.debug', 'logging.debug', (['"""TransferError while trying to read 16 bytes at 0x%08x"""', 'ptr'], {}), "('TransferError while trying to read 16 bytes at 0x%08x', ptr)\n", (1834, 1896), False, 'import logging\n')]
|
from django.contrib import admin
from .models import student
admin.site.register(student)
# Register your models here.
|
[
"django.contrib.admin.site.register"
] |
[((65, 93), 'django.contrib.admin.site.register', 'admin.site.register', (['student'], {}), '(student)\n', (84, 93), False, 'from django.contrib import admin\n')]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.urls import reverse
from misago.acl.testutils import override_acl
from misago.categories.models import Category
from misago.users.testutils import AuthenticatedUserTestCase
class StartThreadTests(AuthenticatedUserTestCase):
def setUp(self):
super(StartThreadTests, self).setUp()
self.category = Category.objects.get(slug='first-category')
self.api_link = reverse('misago:api:thread-list')
def override_acl(self, extra_acl=None):
new_acl = self.user.acl_cache
new_acl['categories'][self.category.pk].update({
'can_see': 1,
'can_browse': 1,
'can_start_threads': 1,
'can_pin_threads': 0,
'can_close_threads': 0,
'can_hide_threads': 0,
'can_hide_own_threads': 0,
})
if extra_acl:
new_acl['categories'][self.category.pk].update(extra_acl)
if 'can_see' in extra_acl and not extra_acl['can_see']:
new_acl['visible_categories'].remove(self.category.pk)
new_acl['browseable_categories'].remove(self.category.pk)
if 'can_browse' in extra_acl and not extra_acl['can_browse']:
new_acl['browseable_categories'].remove(self.category.pk)
override_acl(self.user, new_acl)
def test_cant_start_thread_as_guest(self):
"""user has to be authenticated to be able to post thread"""
self.logout_user()
response = self.client.post(self.api_link)
self.assertEqual(response.status_code, 403)
def test_cant_see(self):
"""has no permission to see selected category"""
self.override_acl({'can_see': 0})
response = self.client.post(self.api_link, {
'category': self.category.pk,
})
self.assertContains(response, "Selected category is invalid.", status_code=400)
def test_cant_browse(self):
"""has no permission to browse selected category"""
self.override_acl({'can_browse': 0})
response = self.client.post(self.api_link, {
'category': self.category.pk,
})
self.assertContains(response, "Selected category is invalid.", status_code=400)
def test_cant_start_thread(self):
"""permission to start thread in category is validated"""
self.override_acl({'can_start_threads': 0})
response = self.client.post(self.api_link, {
'category': self.category.pk,
})
self.assertContains(
response, "You don't have permission to start new threads", status_code=400
)
def test_cant_start_thread_in_locked_category(self):
"""can't post in closed category"""
self.category.is_closed = True
self.category.save()
self.override_acl({'can_close_threads': 0})
response = self.client.post(self.api_link, {
'category': self.category.pk,
})
self.assertContains(response, "This category is closed.", status_code=400)
def test_cant_start_thread_in_invalid_category(self):
"""can't post in invalid category"""
self.category.is_closed = True
self.category.save()
self.override_acl({'can_close_threads': 0})
response = self.client.post(self.api_link, {'category': self.category.pk * 100000})
self.assertContains(response, "Selected category doesn't exist", status_code=400)
def test_empty_data(self):
"""no data sent handling has no showstoppers"""
self.override_acl()
response = self.client.post(self.api_link, data={})
self.assertEqual(response.status_code, 400)
self.assertEqual(
response.json(), {
'category': ["You have to select category to post thread in."],
'title': ["You have to enter thread title."],
'post': ["You have to enter a message."],
}
)
def test_title_is_validated(self):
"""title is validated"""
self.override_acl()
response = self.client.post(
self.api_link,
data={
'category': self.category.pk,
'title': "------",
'post': "Lorem ipsum dolor met, sit amet elit!",
}
)
self.assertEqual(response.status_code, 400)
self.assertEqual(
response.json(), {
'title': ["Thread title should contain alpha-numeric characters."],
}
)
def test_post_is_validated(self):
"""post is validated"""
self.override_acl()
response = self.client.post(
self.api_link,
data={
'category': self.category.pk,
'title': "Lorem ipsum dolor met",
'post': "a",
}
)
self.assertEqual(response.status_code, 400)
self.assertEqual(
response.json(), {
'post': ["Posted message should be at least 5 characters long (it has 1)."],
}
)
def test_can_start_thread(self):
"""endpoint creates new thread"""
self.override_acl()
response = self.client.post(
self.api_link,
data={
'category': self.category.pk,
'title': "Hello, I am test thread!",
'post': "Lorem ipsum dolor met!",
}
)
self.assertEqual(response.status_code, 200)
thread = self.user.thread_set.all()[:1][0]
response_json = response.json()
self.assertEqual(response_json['url'], thread.get_absolute_url())
self.override_acl()
response = self.client.get(thread.get_absolute_url())
self.assertContains(response, self.category.name)
self.assertContains(response, thread.title)
self.assertContains(response, "<p>Lorem ipsum dolor met!</p>")
# api increased user's threads and posts counts
self.reload_user()
self.assertEqual(self.user.threads, 1)
self.assertEqual(self.user.posts, 1)
self.assertEqual(thread.category_id, self.category.pk)
self.assertEqual(thread.title, "Hello, I am test thread!")
self.assertEqual(thread.starter_id, self.user.id)
self.assertEqual(thread.starter_name, self.user.username)
self.assertEqual(thread.starter_slug, self.user.slug)
self.assertEqual(thread.last_poster_id, self.user.id)
self.assertEqual(thread.last_poster_name, self.user.username)
self.assertEqual(thread.last_poster_slug, self.user.slug)
post = self.user.post_set.all()[:1][0]
self.assertEqual(post.category_id, self.category.pk)
self.assertEqual(post.original, 'Lorem ipsum dolor met!')
self.assertEqual(post.poster_id, self.user.id)
self.assertEqual(post.poster_name, self.user.username)
category = Category.objects.get(pk=self.category.pk)
self.assertEqual(category.threads, 1)
self.assertEqual(category.posts, 1)
self.assertEqual(category.last_thread_id, thread.id)
self.assertEqual(category.last_thread_title, thread.title)
self.assertEqual(category.last_thread_slug, thread.slug)
self.assertEqual(category.last_poster_id, self.user.id)
self.assertEqual(category.last_poster_name, self.user.username)
self.assertEqual(category.last_poster_slug, self.user.slug)
def test_start_closed_thread_no_permission(self):
"""permission is checked before thread is closed"""
self.override_acl({'can_close_threads': 0})
response = self.client.post(
self.api_link,
data={
'category': self.category.pk,
'title': "Hello, I am test thread!",
'post': "Lorem ipsum dolor met!",
'close': True,
}
)
self.assertEqual(response.status_code, 200)
thread = self.user.thread_set.all()[:1][0]
self.assertFalse(thread.is_closed)
def test_start_closed_thread(self):
"""can post closed thread"""
self.override_acl({'can_close_threads': 1})
response = self.client.post(
self.api_link,
data={
'category': self.category.pk,
'title': "Hello, I am test thread!",
'post': "Lorem ipsum dolor met!",
'close': True,
}
)
self.assertEqual(response.status_code, 200)
thread = self.user.thread_set.all()[:1][0]
self.assertTrue(thread.is_closed)
def test_start_unpinned_thread(self):
"""can post unpinned thread"""
self.override_acl({'can_pin_threads': 1})
response = self.client.post(
self.api_link,
data={
'category': self.category.pk,
'title': "Hello, I am test thread!",
'post': "Lorem ipsum dolor met!",
'pin': 0,
}
)
self.assertEqual(response.status_code, 200)
thread = self.user.thread_set.all()[:1][0]
self.assertEqual(thread.weight, 0)
def test_start_locally_pinned_thread(self):
"""can post locally pinned thread"""
self.override_acl({'can_pin_threads': 1})
response = self.client.post(
self.api_link,
data={
'category': self.category.pk,
'title': "Hello, I am test thread!",
'post': "Lorem ipsum dolor met!",
'pin': 1,
}
)
self.assertEqual(response.status_code, 200)
thread = self.user.thread_set.all()[:1][0]
self.assertEqual(thread.weight, 1)
def test_start_globally_pinned_thread(self):
"""can post globally pinned thread"""
self.override_acl({'can_pin_threads': 2})
response = self.client.post(
self.api_link,
data={
'category': self.category.pk,
'title': "Hello, I am test thread!",
'post': "Lorem ipsum dolor met!",
'pin': 2,
}
)
self.assertEqual(response.status_code, 200)
thread = self.user.thread_set.all()[:1][0]
self.assertEqual(thread.weight, 2)
def test_start_globally_pinned_thread_no_permission(self):
"""cant post globally pinned thread without permission"""
self.override_acl({'can_pin_threads': 1})
response = self.client.post(
self.api_link,
data={
'category': self.category.pk,
'title': "Hello, I am test thread!",
'post': "Lorem ipsum dolor met!",
'pin': 2,
}
)
self.assertEqual(response.status_code, 200)
thread = self.user.thread_set.all()[:1][0]
self.assertEqual(thread.weight, 0)
def test_start_locally_pinned_thread_no_permission(self):
"""cant post locally pinned thread without permission"""
self.override_acl({'can_pin_threads': 0})
response = self.client.post(
self.api_link,
data={
'category': self.category.pk,
'title': "Hello, I am test thread!",
'post': "Lorem ipsum dolor met!",
'pin': 1,
}
)
self.assertEqual(response.status_code, 200)
thread = self.user.thread_set.all()[:1][0]
self.assertEqual(thread.weight, 0)
def test_start_hidden_thread(self):
"""can post hidden thread"""
self.override_acl({'can_hide_threads': 1})
response = self.client.post(
self.api_link,
data={
'category': self.category.pk,
'title': "Hello, I am test thread!",
'post': "Lorem ipsum dolor met!",
'hide': 1,
}
)
self.assertEqual(response.status_code, 200)
thread = self.user.thread_set.all()[:1][0]
self.assertTrue(thread.is_hidden)
category = Category.objects.get(pk=self.category.pk)
self.assertNotEqual(category.last_thread_id, thread.id)
def test_start_hidden_thread_no_permission(self):
"""cant post hidden thread without permission"""
self.override_acl({'can_hide_threads': 0})
response = self.client.post(
self.api_link,
data={
'category': self.category.pk,
'title': "Hello, I am test thread!",
'post': "Lorem ipsum dolor met!",
'hide': 1,
}
)
self.assertEqual(response.status_code, 200)
thread = self.user.thread_set.all()[:1][0]
self.assertFalse(thread.is_hidden)
def test_post_unicode(self):
"""unicode characters can be posted"""
self.override_acl()
response = self.client.post(
self.api_link,
data={
'category': self.category.pk,
'title': "Brzęczyżczykiewicz",
'post': "Chrzążczyżewoszyce, powiat Łękółody.",
}
)
self.assertEqual(response.status_code, 200)
def test_category_moderation_queue(self):
"""start unapproved thread in category that requires approval"""
self.category.require_threads_approval = True
self.category.save()
response = self.client.post(
self.api_link,
data={
'category': self.category.pk,
'title': "Hello, I am test thread!",
'post': "Lorem ipsum dolor met!",
}
)
self.assertEqual(response.status_code, 200)
thread = self.user.thread_set.all()[:1][0]
self.assertTrue(thread.is_unapproved)
self.assertTrue(thread.has_unapproved_posts)
post = self.user.post_set.all()[:1][0]
self.assertTrue(post.is_unapproved)
category = Category.objects.get(slug='first-category')
self.assertEqual(category.threads, self.category.threads)
self.assertEqual(category.posts, self.category.posts)
self.assertFalse(category.last_thread_id == thread.id)
def test_category_moderation_queue_bypass(self):
"""bypass moderation queue due to user's acl"""
override_acl(self.user, {'can_approve_content': 1})
self.category.require_threads_approval = True
self.category.save()
response = self.client.post(
self.api_link,
data={
'category': self.category.pk,
'title': "Hello, I am test thread!",
'post': "Lorem ipsum dolor met!",
}
)
self.assertEqual(response.status_code, 200)
thread = self.user.thread_set.all()[:1][0]
self.assertFalse(thread.is_unapproved)
self.assertFalse(thread.has_unapproved_posts)
post = self.user.post_set.all()[:1][0]
self.assertFalse(post.is_unapproved)
category = Category.objects.get(slug='first-category')
self.assertEqual(category.threads, self.category.threads + 1)
self.assertEqual(category.posts, self.category.posts + 1)
self.assertEqual(category.last_thread_id, thread.id)
def test_user_moderation_queue(self):
"""start unapproved thread in category that requires approval"""
self.override_acl({'require_threads_approval': 1})
response = self.client.post(
self.api_link,
data={
'category': self.category.pk,
'title': "Hello, I am test thread!",
'post': "Lorem ipsum dolor met!",
}
)
self.assertEqual(response.status_code, 200)
thread = self.user.thread_set.all()[:1][0]
self.assertTrue(thread.is_unapproved)
self.assertTrue(thread.has_unapproved_posts)
post = self.user.post_set.all()[:1][0]
self.assertTrue(post.is_unapproved)
category = Category.objects.get(slug='first-category')
self.assertEqual(category.threads, self.category.threads)
self.assertEqual(category.posts, self.category.posts)
self.assertFalse(category.last_thread_id == thread.id)
def test_user_moderation_queue_bypass(self):
"""bypass moderation queue due to user's acl"""
override_acl(self.user, {'can_approve_content': 1})
self.override_acl({'require_threads_approval': 1})
response = self.client.post(
self.api_link,
data={
'category': self.category.pk,
'title': "Hello, I am test thread!",
'post': "Lorem ipsum dolor met!",
}
)
self.assertEqual(response.status_code, 200)
thread = self.user.thread_set.all()[:1][0]
self.assertFalse(thread.is_unapproved)
self.assertFalse(thread.has_unapproved_posts)
post = self.user.post_set.all()[:1][0]
self.assertFalse(post.is_unapproved)
category = Category.objects.get(slug='first-category')
self.assertEqual(category.threads, self.category.threads + 1)
self.assertEqual(category.posts, self.category.posts + 1)
self.assertEqual(category.last_thread_id, thread.id)
def test_omit_other_moderation_queues(self):
"""other queues are omitted"""
self.category.require_replies_approval = True
self.category.require_edits_approval = True
self.category.save()
self.override_acl({
'require_replies_approval': 1,
'require_edits_approval': 1,
})
response = self.client.post(
self.api_link,
data={
'category': self.category.pk,
'title': "Hello, I am test thread!",
'post': "Lorem ipsum dolor met!",
}
)
self.assertEqual(response.status_code, 200)
thread = self.user.thread_set.all()[:1][0]
self.assertFalse(thread.is_unapproved)
self.assertFalse(thread.has_unapproved_posts)
post = self.user.post_set.all()[:1][0]
self.assertFalse(post.is_unapproved)
category = Category.objects.get(slug='first-category')
self.assertEqual(category.threads, self.category.threads + 1)
self.assertEqual(category.posts, self.category.posts + 1)
self.assertEqual(category.last_thread_id, thread.id)
|
[
"django.urls.reverse",
"misago.categories.models.Category.objects.get",
"misago.acl.testutils.override_acl"
] |
[((396, 439), 'misago.categories.models.Category.objects.get', 'Category.objects.get', ([], {'slug': '"""first-category"""'}), "(slug='first-category')\n", (416, 439), False, 'from misago.categories.models import Category\n'), ((464, 497), 'django.urls.reverse', 'reverse', (['"""misago:api:thread-list"""'], {}), "('misago:api:thread-list')\n", (471, 497), False, 'from django.urls import reverse\n'), ((1349, 1381), 'misago.acl.testutils.override_acl', 'override_acl', (['self.user', 'new_acl'], {}), '(self.user, new_acl)\n', (1361, 1381), False, 'from misago.acl.testutils import override_acl\n'), ((7005, 7046), 'misago.categories.models.Category.objects.get', 'Category.objects.get', ([], {'pk': 'self.category.pk'}), '(pk=self.category.pk)\n', (7025, 7046), False, 'from misago.categories.models import Category\n'), ((12210, 12251), 'misago.categories.models.Category.objects.get', 'Category.objects.get', ([], {'pk': 'self.category.pk'}), '(pk=self.category.pk)\n', (12230, 12251), False, 'from misago.categories.models import Category\n'), ((14111, 14154), 'misago.categories.models.Category.objects.get', 'Category.objects.get', ([], {'slug': '"""first-category"""'}), "(slug='first-category')\n", (14131, 14154), False, 'from misago.categories.models import Category\n'), ((14464, 14515), 'misago.acl.testutils.override_acl', 'override_acl', (['self.user', "{'can_approve_content': 1}"], {}), "(self.user, {'can_approve_content': 1})\n", (14476, 14515), False, 'from misago.acl.testutils import override_acl\n'), ((15175, 15218), 'misago.categories.models.Category.objects.get', 'Category.objects.get', ([], {'slug': '"""first-category"""'}), "(slug='first-category')\n", (15195, 15218), False, 'from misago.categories.models import Category\n'), ((16163, 16206), 'misago.categories.models.Category.objects.get', 'Category.objects.get', ([], {'slug': '"""first-category"""'}), "(slug='first-category')\n", (16183, 16206), False, 'from misago.categories.models import Category\n'), ((16512, 16563), 'misago.acl.testutils.override_acl', 'override_acl', (['self.user', "{'can_approve_content': 1}"], {}), "(self.user, {'can_approve_content': 1})\n", (16524, 16563), False, 'from misago.acl.testutils import override_acl\n'), ((17199, 17242), 'misago.categories.models.Category.objects.get', 'Category.objects.get', ([], {'slug': '"""first-category"""'}), "(slug='first-category')\n", (17219, 17242), False, 'from misago.categories.models import Category\n'), ((18363, 18406), 'misago.categories.models.Category.objects.get', 'Category.objects.get', ([], {'slug': '"""first-category"""'}), "(slug='first-category')\n", (18383, 18406), False, 'from misago.categories.models import Category\n')]
|
from direct.showbase.ShowBase import ShowBase
from mapmanager import Mapmanager
from hero import Hero
class Game(ShowBase):
def __init__(self):
ShowBase.__init__(self)
self.land = Mapmanager()
x,y = self.land.loadLand("land.txt")
self.hero = Hero((x//2,y//2,2),self.land)
base.camLens.setFov(90)
game = Game()
game.run()
|
[
"mapmanager.Mapmanager",
"hero.Hero",
"direct.showbase.ShowBase.ShowBase.__init__"
] |
[((157, 180), 'direct.showbase.ShowBase.ShowBase.__init__', 'ShowBase.__init__', (['self'], {}), '(self)\n', (174, 180), False, 'from direct.showbase.ShowBase import ShowBase\n'), ((201, 213), 'mapmanager.Mapmanager', 'Mapmanager', ([], {}), '()\n', (211, 213), False, 'from mapmanager import Mapmanager\n'), ((279, 315), 'hero.Hero', 'Hero', (['(x // 2, y // 2, 2)', 'self.land'], {}), '((x // 2, y // 2, 2), self.land)\n', (283, 315), False, 'from hero import Hero\n')]
|
import cv2
import numpy as np
import copy
class Drawer(object):
def __init__(self, color = (255,255,0), font=cv2.FONT_HERSHEY_DUPLEX):
self.color = color
self.RED = (0,0,255)
self.LESSRED = (0,20,100)
self.TEAL = (148, 184, 0)
self.font = font
self.fontScale = 1.2
self.fontThickness = 2
self.indFontScale = self.fontScale * 2.5
self.indFontThickness = self.fontThickness * 2
self.indTextSize = cv2.getTextSize(text=str('1'), fontFace=self.font, fontScale=self.indFontScale, thickness=self.indFontThickness)[0]
def _resize(self, frame):
height, width = frame.shape[:2]
if height != self.frameHeight:
scale = float(height) / self.frameHeight
frame = cv2.resize(frame, (int(width / scale), int(self.frameHeight) ) )
return frame
def _put_label(self, frame, label):
border=3
cv2.putText(frame, label, (border,20+border),
self.font, self.fontScale,
self.color, self.fontThickness)
def draw_chosen(self, frameDC, track):
if track:
msg = 'FOLLOWING: {}'.format(track.track_id)
color = (0,255,0)
else:
msg = 'FOLLOWING: NIL'
color = self.color
fontScale = 1.2
fontThickness = 3
# print(frameDC.shape)
cv2.putText(frameDC, msg, (frameDC.shape[1]-310,10+24), self.font, fontScale, color, fontThickness)
# cv2.putText(frameDC, msg, (frameDC.shape[1]-20,10+24), self.font, fontScale, self.color, fontThickness)
def draw_status(self, frameDC, status):
if status:
status_msg = 'DET: ON'
else:
status_msg = 'DET: OFF'
fontScale = 1.2
fontThickness = 3
cv2.putText(frameDC, status_msg, (10,10+24), self.font, fontScale, self.color, fontThickness)
def draw_track(self, frameDC, track, chosen_track=None):
color = None
if chosen_track and chosen_track.track_id == track.track_id:
color = (0,255,0)
if color is None:
color = self.color
l,t,r,b = [int(x) for x in track.to_tlbr()]
best_cls = track.get_top_k_finegrain_cls(k=1)
if best_cls:
clsname, conf = best_cls[0]
conf = int(conf*100)
text = 'person {}: {} @ {}%'.format(track.track_id, clsname, conf)
else:
text = 'person {}'.format(track.track_id)
fontScale = 1
fontThickness = 2
cv2.rectangle(frameDC, (l, t), (r, b), color, fontThickness)
cv2.putText(frameDC,
text,
(l+5, b-10),
self.font, fontScale, color, fontThickness)
def draw_tracks(self, frameDC, tracks, chosen_track=None):
for track in tracks:
if not track.is_confirmed() or track.time_since_update > 1:
continue
self.draw_track(frameDC, track, chosen_track)
def draw_track_class(self, frameDC, track, chosen_track=None):
color = None
if chosen_track and chosen_track.track_id == track.track_id:
color = (0,255,0)
if color is None:
color = self.color
l,t,r,b = [int(x) for x in track.to_tlbr()]
text = '{} {}'.format(track.det_class, track.track_id)
fontScale = 1
fontThickness = 2
cv2.rectangle(frameDC, (l, t), (r, b), color, fontThickness)
cv2.putText(frameDC,
text,
(l+5, b-10),
self.font, fontScale, color, fontThickness)
def draw_tracks_class(self, frameDC, tracks, chosen_track=None):
for track in tracks:
# if not track.is_confirmed() or track.time_since_update > 1:
if not track.is_confirmed() or track.time_since_update > 1:
continue
self.draw_track_class(frameDC, track, chosen_track)
def draw_bbs(self, frameDC, bbs, label=''):
if bbs is None or len(bbs) == 0:
return
# frameDC = copy.deepcopy(frame)
frame_h, frame_w = frameDC.shape[:2]
for i, bb in enumerate(bbs):
if bb is None:
continue
l,t,w,h = [ int(x) for x in bb[0]]
r = l + w - 1
b = t + h - 1
# if isinstance(bb['confidence'], str):
# if bb['confidence'].isdigit():
# conf_text = str('{:.2}'.format(float(bb['confidence'])))
# else:
# conf_text = str('{}'.format(bb['confidence']))
# elif isinstance(bb['confidence'], float):
# conf_text = str('{:.2}'.format(bb['confidence']))
# else:
# conf_text = str('{}'.format(bb['confidence']))
text = 'PERSON'
cv2.rectangle(frameDC, (l,t), (r,b), self.color, 2)
cv2.putText(frameDC,
text,
(l+5, b-10),
self.font, self.fontScale, self.color, self.fontThickness)
if t - 10 - self.indTextSize[1] >= 0:
text_y = int(t - 10)
elif b + 10 + self.indTextSize[1] <= frame_h - 1:
text_y = int(b + 10 + self.indTextSize[1])
else:
text_y = int(t + (b-t)/2 + self.indTextSize[1]/2)
cv2.putText(frameDC,
str(i),
(l+5, text_y),
self.font, self.fontScale*2.5, self.color, self.fontThickness*2)
self._put_label(frameDC, label)
# return frameDC
def draw_label(self, frame, label=''):
frameDC = copy.deepcopy(frame)
self._put_label(frameDC, label)
return frameDC
def draw_dets(self, frame, dets, color=None, label=''):
if dets is None or len(dets) == 0:
return frame
if color is None:
color = self.color
frameDC = copy.deepcopy(frame)
self._put_label(frameDC, label)
for det in dets:
# det = ( class, confidence , (x, y, w, h) )
l = int(det[2][0] - det[2][2]/2)
t = int(det[2][1] - det[2][3]/2)
r = int(det[2][0] + det[2][2]/2)
b = int(det[2][1] + det[2][3]/2)
text = '{}: {:0.2f}%'.format(det[0].decode("utf-8"), det[1]*100)
cv2.rectangle(frameDC, (l,t), (r,b), color, 2)
cv2.putText(frameDC,
text,
(l+5, b-10),
self.font, self.fontScale, color, self.fontThickness)
return frameDC
def draw_bb_name(self, frame, bb, name, color=None, label=''):
if color is None:
color = self.color
frameDC = copy.deepcopy(frame)
frame_h, frame_w = frame.shape[:2]
l = max(0, int(bb['rect']['l']))
t = max(0, int(bb['rect']['t']))
r = min(frame_w-1, int(bb['rect']['r']))
b = min(frame_h-1, int(bb['rect']['b']))
text = str('{}'.format(name))
cv2.rectangle(frameDC, (l,t), (r,b), color, 2)
cv2.putText(frameDC,
text,
(l+5, b-10),
self.font, self.fontScale, color, self.fontThickness)
self._put_label(frameDC, label)
return frameDC
def draw_annots_face(self, frame, annotation_frame):
for name, bb in annotation_frame.items():
frame = self.draw_bb_name(frame, bb, name, color=self.TEAL)
return frame
def draw_annots(self, frame, annotation_frame, exclude=[]):
for trk_idx, annot in annotation_frame.items():
if trk_idx in exclude:
continue
frame = self.draw_bb_name(frame, annot['bb'], annot['class'], color=self.TEAL)
# for name, bb in annotation_frame.items():
# frame = self.draw_bb_name(frame, bb, name, color=self.TEAL)
return frame
def draw_bb_tracking(self, frame, bb, name, generated=False, last_tracked=False, det_asst=True, took_det=(None, None), label=''):
if generated:
color = self.RED # red
label = 'TRACK{}'.format('+DETECT' if det_asst else '-NODETECT')
if took_det[0] is not None:
label += ': {} '.format('det' if took_det[0] else 'trk')
label += '{:0.1f}'.format(took_det[1]) if took_det[0] else ''
elif last_tracked:
color = self.LESSRED # reddish brown
label = 'TRACK{}'.format('+DETECT' if det_asst else '-NODETECT')
else:
color = self.TEAL
label = 'NO-TRACK'
return self.draw_bb_name(frame, bb, name, color=color, label=label)
|
[
"copy.deepcopy",
"cv2.putText",
"cv2.rectangle"
] |
[((931, 1042), 'cv2.putText', 'cv2.putText', (['frame', 'label', '(border, 20 + border)', 'self.font', 'self.fontScale', 'self.color', 'self.fontThickness'], {}), '(frame, label, (border, 20 + border), self.font, self.fontScale,\n self.color, self.fontThickness)\n', (942, 1042), False, 'import cv2\n'), ((1395, 1503), 'cv2.putText', 'cv2.putText', (['frameDC', 'msg', '(frameDC.shape[1] - 310, 10 + 24)', 'self.font', 'fontScale', 'color', 'fontThickness'], {}), '(frameDC, msg, (frameDC.shape[1] - 310, 10 + 24), self.font,\n fontScale, color, fontThickness)\n', (1406, 1503), False, 'import cv2\n'), ((1817, 1918), 'cv2.putText', 'cv2.putText', (['frameDC', 'status_msg', '(10, 10 + 24)', 'self.font', 'fontScale', 'self.color', 'fontThickness'], {}), '(frameDC, status_msg, (10, 10 + 24), self.font, fontScale, self.\n color, fontThickness)\n', (1828, 1918), False, 'import cv2\n'), ((2562, 2622), 'cv2.rectangle', 'cv2.rectangle', (['frameDC', '(l, t)', '(r, b)', 'color', 'fontThickness'], {}), '(frameDC, (l, t), (r, b), color, fontThickness)\n', (2575, 2622), False, 'import cv2\n'), ((2631, 2722), 'cv2.putText', 'cv2.putText', (['frameDC', 'text', '(l + 5, b - 10)', 'self.font', 'fontScale', 'color', 'fontThickness'], {}), '(frameDC, text, (l + 5, b - 10), self.font, fontScale, color,\n fontThickness)\n', (2642, 2722), False, 'import cv2\n'), ((3451, 3511), 'cv2.rectangle', 'cv2.rectangle', (['frameDC', '(l, t)', '(r, b)', 'color', 'fontThickness'], {}), '(frameDC, (l, t), (r, b), color, fontThickness)\n', (3464, 3511), False, 'import cv2\n'), ((3520, 3611), 'cv2.putText', 'cv2.putText', (['frameDC', 'text', '(l + 5, b - 10)', 'self.font', 'fontScale', 'color', 'fontThickness'], {}), '(frameDC, text, (l + 5, b - 10), self.font, fontScale, color,\n fontThickness)\n', (3531, 3611), False, 'import cv2\n'), ((5760, 5780), 'copy.deepcopy', 'copy.deepcopy', (['frame'], {}), '(frame)\n', (5773, 5780), False, 'import copy\n'), ((6048, 6068), 'copy.deepcopy', 'copy.deepcopy', (['frame'], {}), '(frame)\n', (6061, 6068), False, 'import copy\n'), ((6853, 6873), 'copy.deepcopy', 'copy.deepcopy', (['frame'], {}), '(frame)\n', (6866, 6873), False, 'import copy\n'), ((7143, 7191), 'cv2.rectangle', 'cv2.rectangle', (['frameDC', '(l, t)', '(r, b)', 'color', '(2)'], {}), '(frameDC, (l, t), (r, b), color, 2)\n', (7156, 7191), False, 'import cv2\n'), ((7198, 7299), 'cv2.putText', 'cv2.putText', (['frameDC', 'text', '(l + 5, b - 10)', 'self.font', 'self.fontScale', 'color', 'self.fontThickness'], {}), '(frameDC, text, (l + 5, b - 10), self.font, self.fontScale,\n color, self.fontThickness)\n', (7209, 7299), False, 'import cv2\n'), ((4906, 4959), 'cv2.rectangle', 'cv2.rectangle', (['frameDC', '(l, t)', '(r, b)', 'self.color', '(2)'], {}), '(frameDC, (l, t), (r, b), self.color, 2)\n', (4919, 4959), False, 'import cv2\n'), ((4970, 5077), 'cv2.putText', 'cv2.putText', (['frameDC', 'text', '(l + 5, b - 10)', 'self.font', 'self.fontScale', 'self.color', 'self.fontThickness'], {}), '(frameDC, text, (l + 5, b - 10), self.font, self.fontScale, self\n .color, self.fontThickness)\n', (4981, 5077), False, 'import cv2\n'), ((6460, 6508), 'cv2.rectangle', 'cv2.rectangle', (['frameDC', '(l, t)', '(r, b)', 'color', '(2)'], {}), '(frameDC, (l, t), (r, b), color, 2)\n', (6473, 6508), False, 'import cv2\n'), ((6519, 6620), 'cv2.putText', 'cv2.putText', (['frameDC', 'text', '(l + 5, b - 10)', 'self.font', 'self.fontScale', 'color', 'self.fontThickness'], {}), '(frameDC, text, (l + 5, b - 10), self.font, self.fontScale,\n color, self.fontThickness)\n', (6530, 6620), False, 'import cv2\n')]
|
import pandas as pd
import plotly.express as px
import plotly.graph_objects as go
import numpy as np
from plotly.subplots import make_subplots
from pathlib import Path
repo_dir = Path(__file__).parent.parent
outputdir = repo_dir/'output'
outputdir.mkdir(parents=True, exist_ok=True)
casos = pd.read_csv('https://raw.githubusercontent.com/MinCiencia/Datos-COVID19/master/output/producto3/TotalesPorRegion_std.csv')
casos['Fecha'] = pd.to_datetime(casos['Fecha'])
casos_sintomaticos = casos[casos['Categoria']=='Casos nuevos con sintomas'].pivot(index='Fecha', columns='Region', values='Total')
casos_nuevos = casos[casos['Categoria']=='Casos nuevos totales'].pivot(index='Fecha', columns='Region', values='Total')
casos_activos_conf = casos[casos['Categoria']=='Casos activos confirmados'].pivot(index='Fecha', columns='Region', values='Total')
casos_activos_prob = casos[casos['Categoria']=='Casos activos probables'].pivot(index='Fecha', columns='Region', values='Total')
casos_nuevos_prob = casos[casos['Categoria']=='Casos probables acumulados'].pivot(index='Fecha', columns='Region', values='Total').diff()
casos_nuevos_antigeno = casos[casos['Categoria']=='Casos nuevos confirmados por antigeno'].pivot(index='Fecha', columns='Region', values='Total')
casos_sintomaticos.rename(columns={'Total': 'Chile'}, inplace=True)
casos_nuevos.rename(columns={'Total': 'Chile'}, inplace=True)
casos_activos_conf.rename(columns={'Total': 'Chile'}, inplace=True)
casos_activos_prob.rename(columns={'Total': 'Chile'}, inplace=True)
casos_nuevos_prob.rename(columns={'Total': 'Chile'}, inplace=True)
casos_nuevos_antigeno.rename(columns={'Total': 'Chile'}, inplace=True)
casos_nuevos_prob_antigeno = casos_nuevos.add(casos_nuevos_prob, fill_value=0)
casos_nuevos_prob_antigeno = casos_nuevos_prob_antigeno.add(casos_nuevos_antigeno, fill_value=0)
datos_regiones = pd.read_csv('https://raw.githubusercontent.com/ivanMSC/COVID19_Chile/master/utils/regionesChile.csv')
casos_activos = pd.read_csv('https://raw.githubusercontent.com/MinCiencia/Datos-COVID19/master/output/producto46/activos_vs_recuperados.csv')
casos_activos.rename(columns={
'fecha_primeros_sintomas': 'Fecha',
'activos': 'Activos',
'recuperados': 'Recuperados'
}, inplace=True)
casos_activos['Fecha'] = pd.to_datetime(casos_activos['Fecha'])
casos_activos['Activos'] = pd.to_numeric(casos_activos['Activos'])
casos_activos['Recuperados'] = pd.to_numeric(casos_activos['Recuperados'])
casos_activos.set_index('Fecha', inplace=True)
casos_uci = pd.read_csv('https://raw.githubusercontent.com/MinCiencia/Datos-COVID19/master/output/producto8/UCI_T.csv')
casos_uci.rename(columns={'Region': 'Fecha'}, inplace=True)
datos_regiones = pd.merge(datos_regiones, casos_uci.iloc[[0,1]].T, left_on='numTradicional', right_on=0)
datos_regiones.drop(columns=0, inplace=True)
datos_regiones.rename(columns={1: 'Poblacion'}, inplace=True)
casos_uci = casos_uci.iloc[2:]
casos_uci['Fecha'] = pd.to_datetime(casos_uci['Fecha'])
casos_uci.set_index('Fecha', inplace=True)
casos_uci['Chile'] = casos_uci[list(casos_uci.columns)].sum(axis=1)
DP19 = pd.read_csv('https://raw.githubusercontent.com/MinCiencia/Datos-COVID19/master/output/producto19/CasosActivosPorComuna_std.csv')
activos_dp19 = DP19[DP19['Comuna']=='Total'].pivot(index='Fecha', columns='Codigo region', values='Casos activos').sum(axis=1)
activos_dp19.index = pd.to_datetime(activos_dp19.index)
activos_dp19
DP5 = pd.read_csv('https://raw.githubusercontent.com/MinCiencia/Datos-COVID19/master/output/producto5/TotalesNacionales_T.csv')
DP5['Fecha'] = pd.to_datetime(DP5['Fecha'])
DP5 = DP5.set_index('Fecha')
fig = go.Figure()
Wong = ['#000000', '#E69F00', '#56B4E9',
'#009E73', '#F0E442', '#0072B2',
'#D55E00', '#CC79A7']
fig.add_trace(
go.Scatter(x=casos_activos.index,
y=casos_activos['Activos'],
mode='lines',
name='Activos (DP46)',
line_color=Wong[0]
)
)
fig.add_trace(
go.Scatter(x=casos_nuevos.index,
y=casos_nuevos['Chile'].rolling(11).sum(),
mode='lines',
name='Inferencia de activos (DP3)',
line_color=Wong[1]
)
)
fig.add_trace(
go.Scatter(x=casos_nuevos.index,
y=casos_nuevos['Chile'].rolling(11).sum().shift(-6),
mode='lines',
name='Inferencia de activos (DP3) (shift-6)',
line_color=Wong[5],
visible='legendonly',
)
)
fig.add_trace(
go.Scatter(x=casos_nuevos_prob_antigeno.index,
y=casos_nuevos_prob_antigeno['Chile'].rolling(11).sum(),
mode='lines',
name='Inferencia de activos (PCR + Probables + Antígeno) (DP3)',
line_color=Wong[6],
visible='legendonly',
)
)
fig.add_trace(
go.Scatter(x=activos_dp19.index,
y=activos_dp19,
mode='lines',
name='Activos (DP19)',
line_color=Wong[2]
)
)
fig.add_trace(
go.Scatter(x=DP5.index,
y=DP5['Casos activos por FD'],
mode='lines',
name='Casos Activos FD (DP5)',
line_color=Wong[3]
)
)
fig.add_trace(
go.Scatter(x=DP5.index,
y=DP5['Casos activos por FIS'],
mode='lines',
name='Casos Activos FIS (DP5)',
line_color=Wong[4]
)
)
fig.update_layout(hovermode='x')
fig.update_layout(template='plotly_white',
title='Casos Activos de COVID19 en Chile')
fig.update_layout(yaxis_tickformat = ',')
fig.update_layout(
font=dict(
size=14,
)
)
fig.add_layout_image(
dict(
source="https://i2.wp.com/dlab.cl/wp-content/uploads/2016/08/LogoWebDlab.png",
xref="paper", yref="paper",
x=1, y=1.05,
sizex=0.2, sizey=0.2,
xanchor="right", yanchor="bottom"
)
)
fig.write_html(f'{outputdir}/Casos_Activos.html')
fig = make_subplots(rows=2, shared_xaxes=True, specs=[[{"secondary_y": True}],[{"secondary_y": True}],], row_heights=[0.7, 0.3])
fig.add_trace(
go.Scatter(x=casos_sintomaticos.index,
y=casos_sintomaticos['Chile'].rolling(11).sum().rolling(7).mean(),
mode='lines',
name='Inferencia de activos (DP3)',
line_color=Wong[1]
)
, row=1, col=1, secondary_y=False,
)
fig.add_trace(
go.Scatter(x=casos_uci.index,
y=casos_uci['Chile'],
mode='lines',
name='Ocupación UCI (DP8)',
line_color=Wong[2]
)
, row=1, col=1, secondary_y=True,
)
ucilag = 10
propuci = casos_uci['Chile'].shift(-ucilag)/casos_sintomaticos['Chile'].rolling(11).sum()
propuci_toto = casos_uci['Chile'].shift(-ucilag)/casos_nuevos_prob_antigeno['Chile'].rolling(11).sum()
prediccion_uci = casos_sintomaticos['Chile'].rolling(11).sum().rolling(7).mean()*0.066
prediccion_uci_toto = casos_nuevos_prob_antigeno['Chile'].rolling(11).sum().rolling(7).mean()*0.03771422787573839
prediccion_uci.index = prediccion_uci.index + pd.Timedelta(days=ucilag)
prediccion_uci_toto.index = prediccion_uci_toto.index + pd.Timedelta(days=ucilag)
fig.add_trace(
go.Scatter(x=prediccion_uci.index,
y=prediccion_uci,
mode='lines',
name='Ocupación UCI (predicción desde activos sintomaticos)',
line_color=Wong[6],
visible='legendonly'
)
, row=1, col=1, secondary_y=True,
)
fig.add_trace(
go.Scatter(x=prediccion_uci_toto.index,
y=prediccion_uci_toto,
mode='lines',
name='Ocupación UCI (predicción desde total de activos)',
line_color=Wong[7],
visible='legendonly'
)
, row=1, col=1, secondary_y=True,
)
fig.add_trace(
go.Scatter(x=casos_uci.shift(-ucilag).index,
y=casos_uci['Chile'].shift(-ucilag),
mode='lines',
name=f'Ocupación UCI (shift-{ucilag})',
line_color=Wong[3],
visible='legendonly'
)
, row=1, col=1, secondary_y=True,
)
fig.add_trace(
go.Scatter(x=propuci.index,
y=propuci.rolling(7).mean(),
mode='lines',
name=f'UCI (shift-{ucilag}) / Activos',
line_color=Wong[0]
)
, row=2, col=1, secondary_y=False,
)
propuci_nolag = casos_uci['Chile']/casos_sintomaticos['Chile'].rolling(11).sum()
fig.add_trace(
go.Scatter(x=propuci_nolag.index,
y=propuci_nolag.rolling(7).mean(),
mode='lines',
name='UCI / Activos',
line_color=Wong[4],
visible='legendonly'
)
, row=2, col=1, secondary_y=False,
)
fig.add_trace(
go.Scatter(x=propuci_toto.index,
y=propuci_toto.rolling(7).mean(),
mode='lines',
name='UCI / Activos (PCR + Probable + Antígeno)',
line_color=Wong[7],
visible='legendonly'
)
, row=2, col=1, secondary_y=False,
)
fig.update_layout(hovermode='x')
fig.update_layout(yaxis3_tickformat = '.1%')
fig.update_layout(yaxis1_tickformat = ',.0f')
fig.update_layout(yaxis2_tickformat = ',.0f')
fig.update_layout(template='plotly_white',
title='Incidencia del número de infectados activos en la utilización de UCIs')
fig.update_layout(
font=dict(
size=14,
)
)
fig.update_yaxes(row=1, col=1, title_text='Casos activos')
fig.update_yaxes(row=1, col=1, title_text='Ocupación UCI', secondary_y=True)
fig.update_yaxes(row=2, col=1, title_text='UCI / Activos')
fig.add_layout_image(
dict(
source="https://i2.wp.com/dlab.cl/wp-content/uploads/2016/08/LogoWebDlab.png",
xref="paper", yref="paper",
x=1, y=1.05,
sizex=0.2, sizey=0.2,
xanchor="right", yanchor="bottom"
)
)
fig.write_html(f'{outputdir}/Casos_Activos_vs_UCI.html')
|
[
"plotly.graph_objects.Scatter",
"pandas.read_csv",
"plotly.graph_objects.Figure",
"pandas.merge",
"pathlib.Path",
"pandas.to_datetime",
"pandas.Timedelta",
"plotly.subplots.make_subplots",
"pandas.to_numeric"
] |
[((292, 424), 'pandas.read_csv', 'pd.read_csv', (['"""https://raw.githubusercontent.com/MinCiencia/Datos-COVID19/master/output/producto3/TotalesPorRegion_std.csv"""'], {}), "(\n 'https://raw.githubusercontent.com/MinCiencia/Datos-COVID19/master/output/producto3/TotalesPorRegion_std.csv'\n )\n", (303, 424), True, 'import pandas as pd\n'), ((432, 462), 'pandas.to_datetime', 'pd.to_datetime', (["casos['Fecha']"], {}), "(casos['Fecha'])\n", (446, 462), True, 'import pandas as pd\n'), ((1855, 1966), 'pandas.read_csv', 'pd.read_csv', (['"""https://raw.githubusercontent.com/ivanMSC/COVID19_Chile/master/utils/regionesChile.csv"""'], {}), "(\n 'https://raw.githubusercontent.com/ivanMSC/COVID19_Chile/master/utils/regionesChile.csv'\n )\n", (1866, 1966), True, 'import pandas as pd\n'), ((1973, 2108), 'pandas.read_csv', 'pd.read_csv', (['"""https://raw.githubusercontent.com/MinCiencia/Datos-COVID19/master/output/producto46/activos_vs_recuperados.csv"""'], {}), "(\n 'https://raw.githubusercontent.com/MinCiencia/Datos-COVID19/master/output/producto46/activos_vs_recuperados.csv'\n )\n", (1984, 2108), True, 'import pandas as pd\n'), ((2271, 2309), 'pandas.to_datetime', 'pd.to_datetime', (["casos_activos['Fecha']"], {}), "(casos_activos['Fecha'])\n", (2285, 2309), True, 'import pandas as pd\n'), ((2337, 2376), 'pandas.to_numeric', 'pd.to_numeric', (["casos_activos['Activos']"], {}), "(casos_activos['Activos'])\n", (2350, 2376), True, 'import pandas as pd\n'), ((2408, 2451), 'pandas.to_numeric', 'pd.to_numeric', (["casos_activos['Recuperados']"], {}), "(casos_activos['Recuperados'])\n", (2421, 2451), True, 'import pandas as pd\n'), ((2511, 2628), 'pandas.read_csv', 'pd.read_csv', (['"""https://raw.githubusercontent.com/MinCiencia/Datos-COVID19/master/output/producto8/UCI_T.csv"""'], {}), "(\n 'https://raw.githubusercontent.com/MinCiencia/Datos-COVID19/master/output/producto8/UCI_T.csv'\n )\n", (2522, 2628), True, 'import pandas as pd\n'), ((2696, 2788), 'pandas.merge', 'pd.merge', (['datos_regiones', 'casos_uci.iloc[[0, 1]].T'], {'left_on': '"""numTradicional"""', 'right_on': '(0)'}), "(datos_regiones, casos_uci.iloc[[0, 1]].T, left_on='numTradicional',\n right_on=0)\n", (2704, 2788), True, 'import pandas as pd\n'), ((2943, 2977), 'pandas.to_datetime', 'pd.to_datetime', (["casos_uci['Fecha']"], {}), "(casos_uci['Fecha'])\n", (2957, 2977), True, 'import pandas as pd\n'), ((3096, 3234), 'pandas.read_csv', 'pd.read_csv', (['"""https://raw.githubusercontent.com/MinCiencia/Datos-COVID19/master/output/producto19/CasosActivosPorComuna_std.csv"""'], {}), "(\n 'https://raw.githubusercontent.com/MinCiencia/Datos-COVID19/master/output/producto19/CasosActivosPorComuna_std.csv'\n )\n", (3107, 3234), True, 'import pandas as pd\n'), ((3373, 3407), 'pandas.to_datetime', 'pd.to_datetime', (['activos_dp19.index'], {}), '(activos_dp19.index)\n', (3387, 3407), True, 'import pandas as pd\n'), ((3427, 3558), 'pandas.read_csv', 'pd.read_csv', (['"""https://raw.githubusercontent.com/MinCiencia/Datos-COVID19/master/output/producto5/TotalesNacionales_T.csv"""'], {}), "(\n 'https://raw.githubusercontent.com/MinCiencia/Datos-COVID19/master/output/producto5/TotalesNacionales_T.csv'\n )\n", (3438, 3558), True, 'import pandas as pd\n'), ((3564, 3592), 'pandas.to_datetime', 'pd.to_datetime', (["DP5['Fecha']"], {}), "(DP5['Fecha'])\n", (3578, 3592), True, 'import pandas as pd\n'), ((3629, 3640), 'plotly.graph_objects.Figure', 'go.Figure', ([], {}), '()\n', (3638, 3640), True, 'import plotly.graph_objects as go\n'), ((6037, 6164), 'plotly.subplots.make_subplots', 'make_subplots', ([], {'rows': '(2)', 'shared_xaxes': '(True)', 'specs': "[[{'secondary_y': True}], [{'secondary_y': True}]]", 'row_heights': '[0.7, 0.3]'}), "(rows=2, shared_xaxes=True, specs=[[{'secondary_y': True}], [{\n 'secondary_y': True}]], row_heights=[0.7, 0.3])\n", (6050, 6164), False, 'from plotly.subplots import make_subplots\n'), ((3774, 3896), 'plotly.graph_objects.Scatter', 'go.Scatter', ([], {'x': 'casos_activos.index', 'y': "casos_activos['Activos']", 'mode': '"""lines"""', 'name': '"""Activos (DP46)"""', 'line_color': 'Wong[0]'}), "(x=casos_activos.index, y=casos_activos['Activos'], mode='lines',\n name='Activos (DP46)', line_color=Wong[0])\n", (3784, 3896), True, 'import plotly.graph_objects as go\n'), ((4868, 4978), 'plotly.graph_objects.Scatter', 'go.Scatter', ([], {'x': 'activos_dp19.index', 'y': 'activos_dp19', 'mode': '"""lines"""', 'name': '"""Activos (DP19)"""', 'line_color': 'Wong[2]'}), "(x=activos_dp19.index, y=activos_dp19, mode='lines', name=\n 'Activos (DP19)', line_color=Wong[2])\n", (4878, 4978), True, 'import plotly.graph_objects as go\n'), ((5070, 5194), 'plotly.graph_objects.Scatter', 'go.Scatter', ([], {'x': 'DP5.index', 'y': "DP5['Casos activos por FD']", 'mode': '"""lines"""', 'name': '"""Casos Activos FD (DP5)"""', 'line_color': 'Wong[3]'}), "(x=DP5.index, y=DP5['Casos activos por FD'], mode='lines', name=\n 'Casos Activos FD (DP5)', line_color=Wong[3])\n", (5080, 5194), True, 'import plotly.graph_objects as go\n'), ((5286, 5412), 'plotly.graph_objects.Scatter', 'go.Scatter', ([], {'x': 'DP5.index', 'y': "DP5['Casos activos por FIS']", 'mode': '"""lines"""', 'name': '"""Casos Activos FIS (DP5)"""', 'line_color': 'Wong[4]'}), "(x=DP5.index, y=DP5['Casos activos por FIS'], mode='lines', name=\n 'Casos Activos FIS (DP5)', line_color=Wong[4])\n", (5296, 5412), True, 'import plotly.graph_objects as go\n'), ((6490, 6608), 'plotly.graph_objects.Scatter', 'go.Scatter', ([], {'x': 'casos_uci.index', 'y': "casos_uci['Chile']", 'mode': '"""lines"""', 'name': '"""Ocupación UCI (DP8)"""', 'line_color': 'Wong[2]'}), "(x=casos_uci.index, y=casos_uci['Chile'], mode='lines', name=\n 'Ocupación UCI (DP8)', line_color=Wong[2])\n", (6500, 6608), True, 'import plotly.graph_objects as go\n'), ((7172, 7197), 'pandas.Timedelta', 'pd.Timedelta', ([], {'days': 'ucilag'}), '(days=ucilag)\n', (7184, 7197), True, 'import pandas as pd\n'), ((7254, 7279), 'pandas.Timedelta', 'pd.Timedelta', ([], {'days': 'ucilag'}), '(days=ucilag)\n', (7266, 7279), True, 'import pandas as pd\n'), ((7299, 7479), 'plotly.graph_objects.Scatter', 'go.Scatter', ([], {'x': 'prediccion_uci.index', 'y': 'prediccion_uci', 'mode': '"""lines"""', 'name': '"""Ocupación UCI (predicción desde activos sintomaticos)"""', 'line_color': 'Wong[6]', 'visible': '"""legendonly"""'}), "(x=prediccion_uci.index, y=prediccion_uci, mode='lines', name=\n 'Ocupación UCI (predicción desde activos sintomaticos)', line_color=\n Wong[6], visible='legendonly')\n", (7309, 7479), True, 'import plotly.graph_objects as go\n'), ((7619, 7804), 'plotly.graph_objects.Scatter', 'go.Scatter', ([], {'x': 'prediccion_uci_toto.index', 'y': 'prediccion_uci_toto', 'mode': '"""lines"""', 'name': '"""Ocupación UCI (predicción desde total de activos)"""', 'line_color': 'Wong[7]', 'visible': '"""legendonly"""'}), "(x=prediccion_uci_toto.index, y=prediccion_uci_toto, mode='lines',\n name='Ocupación UCI (predicción desde total de activos)', line_color=\n Wong[7], visible='legendonly')\n", (7629, 7804), True, 'import plotly.graph_objects as go\n'), ((179, 193), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (183, 193), False, 'from pathlib import Path\n')]
|
# coding:utf-8
'''
@Copyright:LintCode
@Author: taoleetju
@Problem: http://www.lintcode.com/problem/cosine-similarity
@Language: Python
@Datetime: 15-10-05 15:16
'''
class Solution:
"""
@param A: An integer array.
@param B: An integer array.
@return: Cosine similarity.
"""
def cosineSimilarity(self, A, B):
# write your code here
A_B = 0
LA = 0
LB = 0
for i in range(0, len(A) ):
A_B += A[i]*B[i]
LA += A[i]**2
LB += B[i]**2
if LA == 0 or LB == 0:
return 2.0000
from math import sqrt
return A_B/( sqrt(LA) * sqrt(LB) )
|
[
"math.sqrt"
] |
[((638, 646), 'math.sqrt', 'sqrt', (['LA'], {}), '(LA)\n', (642, 646), False, 'from math import sqrt\n'), ((649, 657), 'math.sqrt', 'sqrt', (['LB'], {}), '(LB)\n', (653, 657), False, 'from math import sqrt\n')]
|
# Generated by Django 2.0.4 on 2018-05-15 03:13
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('ticketing', '0003_auto_20180515_0148'),
]
operations = [
migrations.AlterField(
model_name='pushnotification',
name='account',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='notification_tokens', to='ticketing.Account'),
),
]
|
[
"django.db.models.ForeignKey"
] |
[((382, 509), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""notification_tokens"""', 'to': '"""ticketing.Account"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='notification_tokens', to='ticketing.Account')\n", (399, 509), False, 'from django.db import migrations, models\n')]
|
"""
Tests of 'python -m recipy' usage.
This script uses a Python script (run_numpy_no_recipy.py) about
which the following assumptions are made:
* Co-located with this test script, in the same directory.
* Expects two arguments via the command-line: an input file
name and an output file name.
* Reads the input file and creates the output file using a library
which recipy is configured to log.
"""
# Copyright (c) 2016 University of Edinburgh.
import os
import os.path
import shutil
import tempfile
from integration_test import helpers
from integration_test import recipy_environment as recipyenv
class TestMflag:
"""
Tests of 'python -m recipy' usage.
"""
SCRIPT_NAME = "run_numpy_no_recipy.py"
""" Test script assumed to be in same directory as this class. """
script = ""
""" Absolute path to test script. """
original_script = ""
""" Absolute path to original copy of test script. """
directory = ""
""" Absolute path to temporary directory for these tests. """
def setup_method(self, method):
"""
py.test setup function, creates test directory in $TEMP,
sets 'script' with path to SCRIPT_NAME and copies script from
'script' to 'original_script'.
:param method: Test method
:type method: function
"""
TestMflag.directory = tempfile.mkdtemp(TestMflag.__name__)
TestMflag.script = os.path.join(os.path.dirname(__file__),
TestMflag.SCRIPT_NAME)
TestMflag.original_script = TestMflag.script + ".orig"
shutil.copy(TestMflag.script, TestMflag.original_script)
def teardown_method(self, method):
"""
py.test teardown function, deletes test directory in $TEMP,
and moves 'original_script' to 'script'.
"""
if os.path.isdir(TestMflag.directory):
shutil.rmtree(TestMflag.directory)
os.remove(TestMflag.script)
os.rename(TestMflag.original_script, TestMflag.script)
def test_m_recipy(self):
"""
Running 'python -m recipy script' and the same script that
inclues 'import recipy' should give the same results in the
log (aside from their 'unique_id', 'diff', 'date',
'exit_date', 'command_args', 'inputs' and 'outputs').
"""
input_file = os.path.join(TestMflag.directory, "input.csv")
with open(input_file, "w") as csv_file:
csv_file.write("1,4,9,16\n")
output_file = os.path.join(TestMflag.directory, "output.csv")
exit_code, _ = helpers.execute_python(
["-m", "recipy", TestMflag.script,
input_file, output_file])
assert exit_code == 0, ("Unexpected exit code " + str(exit_code))
module_log, _ = helpers.get_log(recipyenv.get_recipydb())
helpers.enable_recipy(TestMflag.original_script, TestMflag.script)
exit_code, _ = helpers.execute_python(
["-m", "recipy", TestMflag.script,
input_file, output_file])
assert exit_code == 0, ("Unexpected exit code " + str(exit_code))
import_log, _ = helpers.get_log(recipyenv.get_recipydb())
for key in ["inputs", "outputs"]:
assert len(module_log[key]) == len(import_log[key]),\
("Expected same number of " + key + " files")
for index in range(0, len(module_log[key])):
[import_file, _] = module_log[key][index]
[module_file, _] = import_log[key][index]
assert os.path.basename(import_file) ==\
os.path.basename(module_file),\
"Expected local file names to be equal"
# Remove fields that are specific to a run.
for key in ["unique_id", "diff", "date", "exit_date",
"command_args", "inputs", "outputs"]:
if key in module_log:
del module_log[key]
del import_log[key]
assert module_log == import_log,\
("Expected " + str(module_log) + " to equal " +
str(import_log))
|
[
"integration_test.helpers.enable_recipy",
"os.remove",
"integration_test.helpers.execute_python",
"os.path.isdir",
"os.path.basename",
"os.rename",
"os.path.dirname",
"integration_test.recipy_environment.get_recipydb",
"tempfile.mkdtemp",
"shutil.rmtree",
"os.path.join",
"shutil.copy"
] |
[((1356, 1392), 'tempfile.mkdtemp', 'tempfile.mkdtemp', (['TestMflag.__name__'], {}), '(TestMflag.__name__)\n', (1372, 1392), False, 'import tempfile\n'), ((1594, 1650), 'shutil.copy', 'shutil.copy', (['TestMflag.script', 'TestMflag.original_script'], {}), '(TestMflag.script, TestMflag.original_script)\n', (1605, 1650), False, 'import shutil\n'), ((1843, 1877), 'os.path.isdir', 'os.path.isdir', (['TestMflag.directory'], {}), '(TestMflag.directory)\n', (1856, 1877), False, 'import os\n'), ((1934, 1961), 'os.remove', 'os.remove', (['TestMflag.script'], {}), '(TestMflag.script)\n', (1943, 1961), False, 'import os\n'), ((1970, 2024), 'os.rename', 'os.rename', (['TestMflag.original_script', 'TestMflag.script'], {}), '(TestMflag.original_script, TestMflag.script)\n', (1979, 2024), False, 'import os\n'), ((2356, 2402), 'os.path.join', 'os.path.join', (['TestMflag.directory', '"""input.csv"""'], {}), "(TestMflag.directory, 'input.csv')\n", (2368, 2402), False, 'import os\n'), ((2514, 2561), 'os.path.join', 'os.path.join', (['TestMflag.directory', '"""output.csv"""'], {}), "(TestMflag.directory, 'output.csv')\n", (2526, 2561), False, 'import os\n'), ((2586, 2673), 'integration_test.helpers.execute_python', 'helpers.execute_python', (["['-m', 'recipy', TestMflag.script, input_file, output_file]"], {}), "(['-m', 'recipy', TestMflag.script, input_file,\n output_file])\n", (2608, 2673), False, 'from integration_test import helpers\n'), ((2845, 2911), 'integration_test.helpers.enable_recipy', 'helpers.enable_recipy', (['TestMflag.original_script', 'TestMflag.script'], {}), '(TestMflag.original_script, TestMflag.script)\n', (2866, 2911), False, 'from integration_test import helpers\n'), ((2936, 3023), 'integration_test.helpers.execute_python', 'helpers.execute_python', (["['-m', 'recipy', TestMflag.script, input_file, output_file]"], {}), "(['-m', 'recipy', TestMflag.script, input_file,\n output_file])\n", (2958, 3023), False, 'from integration_test import helpers\n'), ((1433, 1458), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (1448, 1458), False, 'import os\n'), ((1891, 1925), 'shutil.rmtree', 'shutil.rmtree', (['TestMflag.directory'], {}), '(TestMflag.directory)\n', (1904, 1925), False, 'import shutil\n'), ((2810, 2834), 'integration_test.recipy_environment.get_recipydb', 'recipyenv.get_recipydb', ([], {}), '()\n', (2832, 2834), True, 'from integration_test import recipy_environment as recipyenv\n'), ((3160, 3184), 'integration_test.recipy_environment.get_recipydb', 'recipyenv.get_recipydb', ([], {}), '()\n', (3182, 3184), True, 'from integration_test import recipy_environment as recipyenv\n'), ((3555, 3584), 'os.path.basename', 'os.path.basename', (['import_file'], {}), '(import_file)\n', (3571, 3584), False, 'import os\n'), ((3609, 3638), 'os.path.basename', 'os.path.basename', (['module_file'], {}), '(module_file)\n', (3625, 3638), False, 'import os\n')]
|
from django import template
from ..music_handler.interpret import KEYS
register = template.Library()
@register.filter
def num2chord(value):
try:
return KEYS[int(value) % 12]
except Exception:
return value
|
[
"django.template.Library"
] |
[((84, 102), 'django.template.Library', 'template.Library', ([], {}), '()\n', (100, 102), False, 'from django import template\n')]
|
import pytest
import torch
from perceiver_pytorch.queries import LearnableQuery
from perceiver_pytorch.perceiver_io import PerceiverIO
from perceiver_pytorch.utils import encode_position
import einops
@pytest.mark.parametrize("layer_shape", ["2d", "3d"])
def test_learnable_query(layer_shape):
query_creator = LearnableQuery(
channel_dim=32,
query_shape=(6, 16, 16),
conv_layer=layer_shape,
max_frequency=64.0,
num_frequency_bands=128,
sine_only=False,
generate_fourier_features=True,
)
x = torch.randn((4, 6, 12, 16, 16))
out = query_creator(x)
# Output is flattened, so should be [B, T*H*W, C]
# Channels is from channel_dim + 3*(num_frequency_bands * 2 + 1)
# 32 + 3*(257) = 771 + 32 = 803
assert out.shape == (4, 16 * 16 * 6, 803)
@pytest.mark.parametrize("layer_shape", ["2d", "3d"])
def test_learnable_query_no_fourier(layer_shape):
query_creator = LearnableQuery(
channel_dim=32,
query_shape=(6, 16, 16),
conv_layer=layer_shape,
max_frequency=64.0,
num_frequency_bands=128,
sine_only=False,
generate_fourier_features=False,
)
x = torch.randn((4, 6, 12, 16, 16))
out = query_creator(x)
assert out.shape == (4, 16 * 16 * 6, 32)
@pytest.mark.parametrize("layer_shape", ["2d", "3d"])
def test_learnable_query_qpplication(layer_shape):
output_shape = (6, 16, 16)
query_creator = LearnableQuery(
channel_dim=32,
query_shape=output_shape,
conv_layer=layer_shape,
max_frequency=64.0,
num_frequency_bands=32,
sine_only=False,
generate_fourier_features=True,
)
with torch.no_grad():
query_creator.eval()
x = torch.randn((2, 6, 12, 16, 16))
out = query_creator(x)
model = PerceiverIO(depth=2, dim=100, queries_dim=query_creator.output_shape()[-1])
model.eval()
model_input = torch.randn((2, 256, 100))
model_out = model(model_input, queries=out)
# Reshape back to correct shape
model_out = einops.rearrange(
model_out,
"b (t h w) c -> b t c h w",
t=output_shape[0],
h=output_shape[1],
w=output_shape[2],
)
assert model_out.shape == (2, 6, 227, 16, 16)
@pytest.mark.parametrize("layer_shape", ["2d", "3d"])
def test_learnable_query_precomputed_fourier_only(layer_shape):
precomputed_features = encode_position(
1, # Batch size, 1 for this as it will be adapted in forward
axis=(10, 16, 16), # 4 history + 6 future steps
max_frequency=16.0,
num_frequency_bands=128,
sine_only=False,
)
# Only take future ones
precomputed_features = precomputed_features[:, 4:]
query_creator = LearnableQuery(
channel_dim=32,
query_shape=(6, 16, 16),
conv_layer=layer_shape,
max_frequency=64.0,
num_frequency_bands=16,
sine_only=False,
precomputed_fourier=precomputed_features,
generate_fourier_features=False,
)
x = torch.randn((4, 6, 12, 16, 16))
out = query_creator(x)
# Output is flattened, so should be [B, T*H*W, C]
# Channels is from channel_dim + 3*(num_frequency_bands * 2 + 1)
# 32 + 3*(257) = 771 + 32 = 803
assert out.shape == (4, 16 * 16 * 6, 803)
@pytest.mark.parametrize("layer_shape", ["2d", "3d"])
def test_learnable_query_precomputed_and_generated_fourer(layer_shape):
precomputed_features = encode_position(
1, # Batch size, 1 for this as it will be adapted in forward
axis=(10, 16, 16), # 4 history + 6 future steps
max_frequency=16.0,
num_frequency_bands=128,
sine_only=False,
)
# Only take future ones
precomputed_features = precomputed_features[:, 4:]
query_creator = LearnableQuery(
channel_dim=32,
query_shape=(6, 16, 16),
conv_layer=layer_shape,
max_frequency=64.0,
num_frequency_bands=128,
sine_only=False,
precomputed_fourier=precomputed_features,
generate_fourier_features=True,
)
x = torch.randn((4, 6, 12, 16, 16))
out = query_creator(x)
# Output is flattened, so should be [B, T*H*W, C]
# Channels is from channel_dim + 3*(num_frequency_bands * 2 + 1)
# 32 + 3*(257) = 771 + 32 = 803
# Then add 771 from the precomputed features, to get 803 + 771
assert out.shape == (4, 16 * 16 * 6, 803 + 771)
@pytest.mark.parametrize("layer_shape", ["2d", "3d"])
def test_learnable_query_pass_in_fourier(layer_shape):
precomputed_features = encode_position(
4,
axis=(10, 16, 16), # 4 history + 6 future steps
max_frequency=16.0,
num_frequency_bands=64,
sine_only=False,
)
# Only take future ones
precomputed_features = precomputed_features[:, 4:]
query_creator = LearnableQuery(
channel_dim=32,
query_shape=(6, 16, 16),
conv_layer=layer_shape,
max_frequency=64.0,
num_frequency_bands=128,
sine_only=False,
generate_fourier_features=False,
)
x = torch.randn((4, 6, 12, 16, 16))
out = query_creator(x, precomputed_features)
# Output is flattened, so should be [B, T*H*W, C]
# Channels is from channel_dim + 3*(num_frequency_bands * 2 + 1)
# 3*(129) = 389 + 32 = 419
# Since this is less than what is passed to LearnableQuery, we know its using the passed in features
assert out.shape == (4, 16 * 16 * 6, 419)
@pytest.mark.parametrize("layer_shape", ["2d", "3d"])
def test_learnable_query_all_fouriers(layer_shape):
batch_ff = encode_position(
4,
axis=(10, 16, 16), # 4 history + 6 future steps
max_frequency=16.0,
num_frequency_bands=32,
sine_only=False,
)
# Only take future ones
batch_ff = batch_ff[:, 4:]
precomputed_features = encode_position(
1,
axis=(10, 16, 16), # 4 history + 6 future steps
max_frequency=16.0,
num_frequency_bands=64,
sine_only=False,
)
# Only take future ones
precomputed_features = precomputed_features[:, 4:]
query_creator = LearnableQuery(
channel_dim=32,
query_shape=(6, 16, 16),
conv_layer=layer_shape,
max_frequency=64.0,
num_frequency_bands=128,
sine_only=False,
precomputed_fourier=precomputed_features,
generate_fourier_features=True,
)
x = torch.randn((4, 6, 12, 16, 16))
out = query_creator(x, batch_ff)
# Output is flattened, so should be [B, T*H*W, C]
# Channels is from channel_dim + 3*(num_frequency_bands * 2 + 1)
# 3*(129) = 389 + 32 = 419 + 771 from the generated ones + 195 from the batch features
# Since this is less than what is passed to LearnableQuery, we know its using the passed in features
assert out.shape == (4, 16 * 16 * 6, 1385)
|
[
"perceiver_pytorch.utils.encode_position",
"torch.randn",
"einops.rearrange",
"perceiver_pytorch.queries.LearnableQuery",
"pytest.mark.parametrize",
"torch.no_grad"
] |
[((204, 256), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""layer_shape"""', "['2d', '3d']"], {}), "('layer_shape', ['2d', '3d'])\n", (227, 256), False, 'import pytest\n'), ((828, 880), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""layer_shape"""', "['2d', '3d']"], {}), "('layer_shape', ['2d', '3d'])\n", (851, 880), False, 'import pytest\n'), ((1304, 1356), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""layer_shape"""', "['2d', '3d']"], {}), "('layer_shape', ['2d', '3d'])\n", (1327, 1356), False, 'import pytest\n'), ((2342, 2394), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""layer_shape"""', "['2d', '3d']"], {}), "('layer_shape', ['2d', '3d'])\n", (2365, 2394), False, 'import pytest\n'), ((3387, 3439), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""layer_shape"""', "['2d', '3d']"], {}), "('layer_shape', ['2d', '3d'])\n", (3410, 3439), False, 'import pytest\n'), ((4513, 4565), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""layer_shape"""', "['2d', '3d']"], {}), "('layer_shape', ['2d', '3d'])\n", (4536, 4565), False, 'import pytest\n'), ((5562, 5614), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""layer_shape"""', "['2d', '3d']"], {}), "('layer_shape', ['2d', '3d'])\n", (5585, 5614), False, 'import pytest\n'), ((316, 499), 'perceiver_pytorch.queries.LearnableQuery', 'LearnableQuery', ([], {'channel_dim': '(32)', 'query_shape': '(6, 16, 16)', 'conv_layer': 'layer_shape', 'max_frequency': '(64.0)', 'num_frequency_bands': '(128)', 'sine_only': '(False)', 'generate_fourier_features': '(True)'}), '(channel_dim=32, query_shape=(6, 16, 16), conv_layer=\n layer_shape, max_frequency=64.0, num_frequency_bands=128, sine_only=\n False, generate_fourier_features=True)\n', (330, 499), False, 'from perceiver_pytorch.queries import LearnableQuery\n'), ((561, 592), 'torch.randn', 'torch.randn', (['(4, 6, 12, 16, 16)'], {}), '((4, 6, 12, 16, 16))\n', (572, 592), False, 'import torch\n'), ((951, 1135), 'perceiver_pytorch.queries.LearnableQuery', 'LearnableQuery', ([], {'channel_dim': '(32)', 'query_shape': '(6, 16, 16)', 'conv_layer': 'layer_shape', 'max_frequency': '(64.0)', 'num_frequency_bands': '(128)', 'sine_only': '(False)', 'generate_fourier_features': '(False)'}), '(channel_dim=32, query_shape=(6, 16, 16), conv_layer=\n layer_shape, max_frequency=64.0, num_frequency_bands=128, sine_only=\n False, generate_fourier_features=False)\n', (965, 1135), False, 'from perceiver_pytorch.queries import LearnableQuery\n'), ((1197, 1228), 'torch.randn', 'torch.randn', (['(4, 6, 12, 16, 16)'], {}), '((4, 6, 12, 16, 16))\n', (1208, 1228), False, 'import torch\n'), ((1459, 1642), 'perceiver_pytorch.queries.LearnableQuery', 'LearnableQuery', ([], {'channel_dim': '(32)', 'query_shape': 'output_shape', 'conv_layer': 'layer_shape', 'max_frequency': '(64.0)', 'num_frequency_bands': '(32)', 'sine_only': '(False)', 'generate_fourier_features': '(True)'}), '(channel_dim=32, query_shape=output_shape, conv_layer=\n layer_shape, max_frequency=64.0, num_frequency_bands=32, sine_only=\n False, generate_fourier_features=True)\n', (1473, 1642), False, 'from perceiver_pytorch.queries import LearnableQuery\n'), ((2486, 2589), 'perceiver_pytorch.utils.encode_position', 'encode_position', (['(1)'], {'axis': '(10, 16, 16)', 'max_frequency': '(16.0)', 'num_frequency_bands': '(128)', 'sine_only': '(False)'}), '(1, axis=(10, 16, 16), max_frequency=16.0,\n num_frequency_bands=128, sine_only=False)\n', (2501, 2589), False, 'from perceiver_pytorch.utils import encode_position\n'), ((2825, 3054), 'perceiver_pytorch.queries.LearnableQuery', 'LearnableQuery', ([], {'channel_dim': '(32)', 'query_shape': '(6, 16, 16)', 'conv_layer': 'layer_shape', 'max_frequency': '(64.0)', 'num_frequency_bands': '(16)', 'sine_only': '(False)', 'precomputed_fourier': 'precomputed_features', 'generate_fourier_features': '(False)'}), '(channel_dim=32, query_shape=(6, 16, 16), conv_layer=\n layer_shape, max_frequency=64.0, num_frequency_bands=16, sine_only=\n False, precomputed_fourier=precomputed_features,\n generate_fourier_features=False)\n', (2839, 3054), False, 'from perceiver_pytorch.queries import LearnableQuery\n'), ((3120, 3151), 'torch.randn', 'torch.randn', (['(4, 6, 12, 16, 16)'], {}), '((4, 6, 12, 16, 16))\n', (3131, 3151), False, 'import torch\n'), ((3539, 3642), 'perceiver_pytorch.utils.encode_position', 'encode_position', (['(1)'], {'axis': '(10, 16, 16)', 'max_frequency': '(16.0)', 'num_frequency_bands': '(128)', 'sine_only': '(False)'}), '(1, axis=(10, 16, 16), max_frequency=16.0,\n num_frequency_bands=128, sine_only=False)\n', (3554, 3642), False, 'from perceiver_pytorch.utils import encode_position\n'), ((3878, 4107), 'perceiver_pytorch.queries.LearnableQuery', 'LearnableQuery', ([], {'channel_dim': '(32)', 'query_shape': '(6, 16, 16)', 'conv_layer': 'layer_shape', 'max_frequency': '(64.0)', 'num_frequency_bands': '(128)', 'sine_only': '(False)', 'precomputed_fourier': 'precomputed_features', 'generate_fourier_features': '(True)'}), '(channel_dim=32, query_shape=(6, 16, 16), conv_layer=\n layer_shape, max_frequency=64.0, num_frequency_bands=128, sine_only=\n False, precomputed_fourier=precomputed_features,\n generate_fourier_features=True)\n', (3892, 4107), False, 'from perceiver_pytorch.queries import LearnableQuery\n'), ((4173, 4204), 'torch.randn', 'torch.randn', (['(4, 6, 12, 16, 16)'], {}), '((4, 6, 12, 16, 16))\n', (4184, 4204), False, 'import torch\n'), ((4648, 4750), 'perceiver_pytorch.utils.encode_position', 'encode_position', (['(4)'], {'axis': '(10, 16, 16)', 'max_frequency': '(16.0)', 'num_frequency_bands': '(64)', 'sine_only': '(False)'}), '(4, axis=(10, 16, 16), max_frequency=16.0,\n num_frequency_bands=64, sine_only=False)\n', (4663, 4750), False, 'from perceiver_pytorch.utils import encode_position\n'), ((4927, 5111), 'perceiver_pytorch.queries.LearnableQuery', 'LearnableQuery', ([], {'channel_dim': '(32)', 'query_shape': '(6, 16, 16)', 'conv_layer': 'layer_shape', 'max_frequency': '(64.0)', 'num_frequency_bands': '(128)', 'sine_only': '(False)', 'generate_fourier_features': '(False)'}), '(channel_dim=32, query_shape=(6, 16, 16), conv_layer=\n layer_shape, max_frequency=64.0, num_frequency_bands=128, sine_only=\n False, generate_fourier_features=False)\n', (4941, 5111), False, 'from perceiver_pytorch.queries import LearnableQuery\n'), ((5173, 5204), 'torch.randn', 'torch.randn', (['(4, 6, 12, 16, 16)'], {}), '((4, 6, 12, 16, 16))\n', (5184, 5204), False, 'import torch\n'), ((5682, 5784), 'perceiver_pytorch.utils.encode_position', 'encode_position', (['(4)'], {'axis': '(10, 16, 16)', 'max_frequency': '(16.0)', 'num_frequency_bands': '(32)', 'sine_only': '(False)'}), '(4, axis=(10, 16, 16), max_frequency=16.0,\n num_frequency_bands=32, sine_only=False)\n', (5697, 5784), False, 'from perceiver_pytorch.utils import encode_position\n'), ((5944, 6046), 'perceiver_pytorch.utils.encode_position', 'encode_position', (['(1)'], {'axis': '(10, 16, 16)', 'max_frequency': '(16.0)', 'num_frequency_bands': '(64)', 'sine_only': '(False)'}), '(1, axis=(10, 16, 16), max_frequency=16.0,\n num_frequency_bands=64, sine_only=False)\n', (5959, 6046), False, 'from perceiver_pytorch.utils import encode_position\n'), ((6223, 6452), 'perceiver_pytorch.queries.LearnableQuery', 'LearnableQuery', ([], {'channel_dim': '(32)', 'query_shape': '(6, 16, 16)', 'conv_layer': 'layer_shape', 'max_frequency': '(64.0)', 'num_frequency_bands': '(128)', 'sine_only': '(False)', 'precomputed_fourier': 'precomputed_features', 'generate_fourier_features': '(True)'}), '(channel_dim=32, query_shape=(6, 16, 16), conv_layer=\n layer_shape, max_frequency=64.0, num_frequency_bands=128, sine_only=\n False, precomputed_fourier=precomputed_features,\n generate_fourier_features=True)\n', (6237, 6452), False, 'from perceiver_pytorch.queries import LearnableQuery\n'), ((6518, 6549), 'torch.randn', 'torch.randn', (['(4, 6, 12, 16, 16)'], {}), '((4, 6, 12, 16, 16))\n', (6529, 6549), False, 'import torch\n'), ((1705, 1720), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (1718, 1720), False, 'import torch\n'), ((1763, 1794), 'torch.randn', 'torch.randn', (['(2, 6, 12, 16, 16)'], {}), '((2, 6, 12, 16, 16))\n', (1774, 1794), False, 'import torch\n'), ((1962, 1988), 'torch.randn', 'torch.randn', (['(2, 256, 100)'], {}), '((2, 256, 100))\n', (1973, 1988), False, 'import torch\n'), ((2101, 2217), 'einops.rearrange', 'einops.rearrange', (['model_out', '"""b (t h w) c -> b t c h w"""'], {'t': 'output_shape[0]', 'h': 'output_shape[1]', 'w': 'output_shape[2]'}), "(model_out, 'b (t h w) c -> b t c h w', t=output_shape[0],\n h=output_shape[1], w=output_shape[2])\n", (2117, 2217), False, 'import einops\n')]
|
from django.db import models
from django.contrib.auth.models import AbstractBaseUser, PermissionsMixin, BaseUserManager, Permission
from django.utils.translation import ugettext, ugettext_lazy as _
from django.utils import six, timezone
from django.core.mail import send_mail
# Create your models here.
class StaffManager(models.Manager):
"""docstring for staffManager"""
@classmethod
def normalize_email(cls, email):
"""
Normalize the address by lowercasing the domain part of the email
address.
"""
email = email or ''
try:
email_name, domain_part = email.strip().rsplit('@', 1)
except ValueError:
pass
else:
email = '@'.join([email_name, domain_part.lower()])
return email
def make_random_password(self, length=10,
allowed_chars='abcdefghjkmnpqrstuvwxyz'
'ABCDEFGHJKLMNPQRSTUVWXYZ'
'<PASSWORD>89'):
"""
Generates a random password with the given length and given
allowed_chars. Note that the default value of allowed_chars does not
have "I" or "O" or letters and digits that look similar -- just to
avoid confusion.
"""
return get_random_string(length, allowed_chars)
def get_by_natural_key(self, username):
return self.get(**{self.model.USERNAME_FIELD: username})
def _create_user(self, username, email, password,
is_staff, is_superuser, **extra_fields):
"""
Creates and saves a User with the given username, email and password.
"""
now = timezone.now()
if not username:
raise ValueError('The given username must be set')
email = self.normalize_email(email)
user = self.model(username=username, email=email,
is_staff=is_staff, is_active=True,
is_superuser=is_superuser,
date_joined=now,
**extra_fields)
user.set_password(password)
user.save(using=self._db)
return user
def create_superuser(self, username, email, password, **extra_fields):
return self._create_user(username, email, password, True, True,
**extra_fields)
def create_user(self, username, email=None, password=None, **extra_fields):
username = email.split('@')[0]
return self._create_user(username, email, password, False, False,
**extra_fields)
def get_names(self):
staff_members = []
obj = self.all()
for staff in obj:
staff_members.append("{} {}".format(staff.first_name, staff.last_name))
return staff_members
class Staff(AbstractBaseUser, PermissionsMixin):
username = models.CharField(_('username'), max_length=30, unique=True)
first_name = models.CharField(_('first name'), max_length=45, blank=True)
last_name = models.CharField(_('last name'), max_length=30, blank=True)
email = models.EmailField(_('email address'), unique=True)
gender = models.CharField(max_length=16, blank=True)
picture = models.CharField(max_length=255, blank=True)
is_staff = models.BooleanField(_('staff status'), default=False,
help_text=_('Designates whether the user can log into this admin '
'site.'))
is_active = models.BooleanField(_('active'), default=True,
help_text=_('Designates whether this user should be treated as '
'active. Unselect this instead of deleting accounts.'))
date_joined = models.DateTimeField(_('date joined'), default=timezone.now)
objects = StaffManager()
USERNAME_FIELD = 'username'
REQUIRED_FIELDS = ['email']
class Meta:
"""docstring for Meta"""
verbose_name = _('staff')
verbose_name_plural = _('staff')
def get_full_name(self):
"""
Returns the first_name plus the last_name, with a space in between.
"""
full_name = '%s %s' % (self.first_name, self.last_name)
return full_name.strip()
def get_short_name(self):
"Returns the short name for the user."
return self.first_name
def email_user(self, subject, message, from_email=None, **kwargs):
"""
Sends an email to this User.
"""
send_mail(subject, message, from_email, [self.email], **kwargs)
class Parapo(models.Model):
rant = models.TextField()
staff = models.ForeignKey(Staff, related_name='rants')
class Meta:
verbose_name = "Parapo"
verbose_name_plural = "Parapos"
class TeamManager(models.Manager):
"""
The manager for the auth's Group model.
"""
use_in_migrations = True
def get_by_natural_key(self, name):
return self.get(name=name)
def get_teams(self):
obj = self.all()
teams = {}
for team in obj:
teams.update({'name':team.name, 'lead':team.lead})
return teams
class Team(models.Model):
name = models.CharField(_('name'), max_length=80, unique=True)
permissions = models.ManyToManyField(Permission,
verbose_name=_('permissions'), blank=True)
lead = models.ForeignKey(Staff, related_name='crews')
objects = TeamManager()
class Meta:
verbose_name = _('team')
verbose_name_plural = _('teams')
def __str__(self):
return self.name
def natural_key(self):
return (self.name,)
|
[
"django.db.models.TextField",
"django.db.models.CharField",
"django.db.models.ForeignKey",
"django.utils.timezone.now",
"django.core.mail.send_mail",
"django.utils.translation.ugettext_lazy"
] |
[((3221, 3264), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(16)', 'blank': '(True)'}), '(max_length=16, blank=True)\n', (3237, 3264), False, 'from django.db import models\n'), ((3279, 3323), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)', 'blank': '(True)'}), '(max_length=255, blank=True)\n', (3295, 3323), False, 'from django.db import models\n'), ((4592, 4610), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (4608, 4610), False, 'from django.db import models\n'), ((4623, 4669), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Staff'], {'related_name': '"""rants"""'}), "(Staff, related_name='rants')\n", (4640, 4669), False, 'from django.db import models\n'), ((5352, 5398), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Staff'], {'related_name': '"""crews"""'}), "(Staff, related_name='crews')\n", (5369, 5398), False, 'from django.db import models\n'), ((1712, 1726), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (1724, 1726), False, 'from django.utils import six, timezone\n'), ((2948, 2961), 'django.utils.translation.ugettext_lazy', '_', (['"""username"""'], {}), "('username')\n", (2949, 2961), True, 'from django.utils.translation import ugettext, ugettext_lazy as _\n'), ((3025, 3040), 'django.utils.translation.ugettext_lazy', '_', (['"""first name"""'], {}), "('first name')\n", (3026, 3040), True, 'from django.utils.translation import ugettext, ugettext_lazy as _\n'), ((3102, 3116), 'django.utils.translation.ugettext_lazy', '_', (['"""last name"""'], {}), "('last name')\n", (3103, 3116), True, 'from django.utils.translation import ugettext, ugettext_lazy as _\n'), ((3175, 3193), 'django.utils.translation.ugettext_lazy', '_', (['"""email address"""'], {}), "('email address')\n", (3176, 3193), True, 'from django.utils.translation import ugettext, ugettext_lazy as _\n'), ((3359, 3376), 'django.utils.translation.ugettext_lazy', '_', (['"""staff status"""'], {}), "('staff status')\n", (3360, 3376), True, 'from django.utils.translation import ugettext, ugettext_lazy as _\n'), ((3534, 3545), 'django.utils.translation.ugettext_lazy', '_', (['"""active"""'], {}), "('active')\n", (3535, 3545), True, 'from django.utils.translation import ugettext, ugettext_lazy as _\n'), ((3749, 3765), 'django.utils.translation.ugettext_lazy', '_', (['"""date joined"""'], {}), "('date joined')\n", (3750, 3765), True, 'from django.utils.translation import ugettext, ugettext_lazy as _\n'), ((3957, 3967), 'django.utils.translation.ugettext_lazy', '_', (['"""staff"""'], {}), "('staff')\n", (3958, 3967), True, 'from django.utils.translation import ugettext, ugettext_lazy as _\n'), ((3998, 4008), 'django.utils.translation.ugettext_lazy', '_', (['"""staff"""'], {}), "('staff')\n", (3999, 4008), True, 'from django.utils.translation import ugettext, ugettext_lazy as _\n'), ((4486, 4549), 'django.core.mail.send_mail', 'send_mail', (['subject', 'message', 'from_email', '[self.email]'], {}), '(subject, message, from_email, [self.email], **kwargs)\n', (4495, 4549), False, 'from django.core.mail import send_mail\n'), ((5198, 5207), 'django.utils.translation.ugettext_lazy', '_', (['"""name"""'], {}), "('name')\n", (5199, 5207), True, 'from django.utils.translation import ugettext, ugettext_lazy as _\n'), ((5468, 5477), 'django.utils.translation.ugettext_lazy', '_', (['"""team"""'], {}), "('team')\n", (5469, 5477), True, 'from django.utils.translation import ugettext, ugettext_lazy as _\n'), ((5508, 5518), 'django.utils.translation.ugettext_lazy', '_', (['"""teams"""'], {}), "('teams')\n", (5509, 5518), True, 'from django.utils.translation import ugettext, ugettext_lazy as _\n'), ((3411, 3473), 'django.utils.translation.ugettext_lazy', '_', (['"""Designates whether the user can log into this admin site."""'], {}), "('Designates whether the user can log into this admin site.')\n", (3412, 3473), True, 'from django.utils.translation import ugettext, ugettext_lazy as _\n'), ((3579, 3690), 'django.utils.translation.ugettext_lazy', '_', (['"""Designates whether this user should be treated as active. Unselect this instead of deleting accounts."""'], {}), "('Designates whether this user should be treated as active. Unselect this instead of deleting accounts.'\n )\n", (3580, 3690), True, 'from django.utils.translation import ugettext, ugettext_lazy as _\n'), ((5311, 5327), 'django.utils.translation.ugettext_lazy', '_', (['"""permissions"""'], {}), "('permissions')\n", (5312, 5327), True, 'from django.utils.translation import ugettext, ugettext_lazy as _\n')]
|
#!/usr/bin/env python
import os
import json
import pytz
from copy import copy
from glob import glob
from datetime import datetime
import argparse
import xgboost as xgb
import pandas as pd
import sklearn as sk
import numpy as np
import matplotlib.pyplot as plt
from statistics import mean
from statistics import stdev
def getPartialMatrix( vPd, vRank, colNum = 10 , pc = 0):
vRankSr = vRank.iloc[ :, pc ].abs().sort_values(ascending = False).index
new_vPd = vPd[ vRankSr[ 0:colNum] ]
return( new_vPd )
def main():
ap = argparse.ArgumentParser(description='python train_xgboost.py')
ap.add_argument('--root', '-r', nargs='?', default='/home/tokuoka/git/embryo_classification', help='Specify root path')
ap.add_argument('--save_dir', nargs='?', default='results/train_XGBoost', help='Specify output files directory for create figures')
ap.add_argument('--set', type=int, default=1, help='Specify index of set in KFCV')
ap.add_argument('--input', nargs='?', default='RAW', help='Specify input type [RAW, PCA]')
args = ap.parse_args()
allData = pd.read_csv(os.path.join(args.root, 'datasets', 'tree', 'learningInput.csv'))
allData = allData[ allData.learningInput != 'no_pups']
vRank = pd.read_csv(os.path.join(args.root, 'datasets', 'tree', 'PCARotation.csv'), index_col=0)
split_list_train = os.path.join(args.root, 'datasets', 'split_list', 'kfcv', 'set{0:02d}'.format(args.set), 'train.txt')
split_list_val = os.path.join(args.root, 'datasets', 'split_list', 'kfcv', 'set{0:02d}'.format(args.set), 'validation.txt')
split_list_test = os.path.join(args.root, 'datasets', 'split_list', 'kfcv', 'test.txt')
with open(split_list_train, 'r') as f:
file_list_train = [line.rstrip() for line in f]
with open(split_list_val, 'r') as f:
file_list_val = [line.rstrip() for line in f]
with open(split_list_test, 'r') as f:
file_list_test = [line.rstrip() for line in f]
trainData, valData, testData = copy(allData), copy(allData), copy(allData)
for i in file_list_val + file_list_test:
trainData = trainData[trainData.name != i]
for i in file_list_train + file_list_test:
valData = valData[valData.name != i]
for i in file_list_train + file_list_val:
testData = testData[testData.name != i]
x_train = trainData.iloc[:,:(trainData.shape[1]-1)]
y_train = trainData['learningInput'].str.replace('born', '1').str.replace('abort', '0').astype(int)
x_val = valData.iloc[:,:(valData.shape[1]-1)]
y_val = valData['learningInput'].str.replace('born', '1').str.replace('abort', '0').astype(int)
x_test = testData.iloc[:,:(testData.shape[1]-1)]
y_test = testData['learningInput'].str.replace('born', '1').str.replace('abort', '0').astype(int)
# Make Directory
current_datetime = datetime.now(pytz.timezone('Asia/Tokyo')).strftime('%Y%m%d_%H%M%S')
save_dir = args.save_dir + '_set{0:02d}_'.format(args.set) + str(current_datetime)
os.makedirs(save_dir, exist_ok=True)
save_dir_test = 'results/test_XGBoost_set{0:02d}_'.format(args.set) + str(current_datetime)
os.makedirs(save_dir_test, exist_ok=True)
# numRank = [100,200,300,400,500,600,700,800,900,1000]
numRank = [0]
numDepth = [1,2,3,4,5]
best_rank = 0
best_depth = 0
best_f1_val = 0
log_iteration = {}
for r in numRank:
for d in numDepth:
# partialx_train = getPartialMatrix(x_train, vRank, r, 0)
# partialx_val = getPartialMatrix(x_val, vRank, r, 0)
partialx_train = x_train.drop('name', axis=1)
partialx_val = x_val.drop('name', axis=1)
eval_set = [(partialx_train, y_train),(partialx_val, y_val)]
eval_metric = ["logloss"]
clf = xgb.XGBClassifier(n_estimators=10000, learning_rate=0.001, max_depth=d, gamma=0, subsample=1.0)
clf.fit(partialx_train, y_train, eval_metric=eval_metric, eval_set=eval_set, early_stopping_rounds=100, verbose=False)
clf_best = xgb.XGBClassifier(n_estimators=clf.get_booster().best_iteration, learning_rate=0.001, max_depth=d, gamma=0, subsample=1.0)
clf_best.fit(partialx_train, y_train, eval_metric=eval_metric , eval_set=eval_set, verbose=False)
y_train_pred = clf_best.predict(partialx_train)
y_val_pred = clf_best.predict(partialx_val)
acc_train = sk.metrics.accuracy_score(y_train, y_train_pred)
acc_val = sk.metrics.accuracy_score(y_val, y_val_pred)
pre_val = sk.metrics.precision_score(y_val, y_val_pred, pos_label=1)
rec_val = sk.metrics.recall_score(y_val, y_val_pred, pos_label=1)
f1_val = sk.metrics.f1_score(y_val, y_val_pred, pos_label=1)
y_train_pred = clf_best.predict(partialx_train, output_margin=True)
auroc_train = sk.metrics.roc_auc_score(y_train, y_train_pred)
aupr_train = sk.metrics.average_precision_score(y_train, y_train_pred)
y_val_pred = clf_best.predict(partialx_val, output_margin=True)
auroc_val = sk.metrics.roc_auc_score(y_val, y_val_pred)
aupr_val = sk.metrics.average_precision_score(y_val, y_val_pred)
log = {'rank': r, 'depth': d, 'accuracy_train': acc_train, 'accuracy_validation': acc_val,
'precision_validation': pre_val, 'recall_validation': rec_val, 'f1_validation': f1_val,
'AUROC_validation': auroc_val, 'AUPR_validation': aupr_val}
with open(os.path.join(save_dir, 'log'), 'a') as f:
json.dump(log, f, indent=4)
if best_f1_val <= f1_val:
print('Updated best model.')
best_f1_val = f1_val
best_rank = r
best_depth = d
clf_best_best = clf_best
log = {'best f1 validation': best_f1_val, 'best rank': best_rank, 'best depth': best_depth, 'best iteration': clf_best_best.get_booster().best_iteration}
with open(os.path.join(save_dir, 'best_result'), 'a') as f:
json.dump(log, f, indent=4)
print(log)
# partialx_test = getPartialMatrix(x_test, vRank, best_rank, 0)
partialx_test = x_test.drop('name', axis=1)
clf_best_best.predict(partialx_test)
y_test_pred = clf_best_best.predict(partialx_test)
acc_test = sk.metrics.accuracy_score(y_test, y_test_pred)
pre_test = sk.metrics.precision_score(y_test, y_test_pred, pos_label=1)
rec_test = sk.metrics.recall_score(y_test, y_test_pred, pos_label=1)
f1_test = sk.metrics.f1_score(y_test, y_test_pred, pos_label=1)
y_test_pred = clf_best_best.predict(partialx_test, output_margin=True)
auroc_test = sk.metrics.roc_auc_score(y_test, y_test_pred)
aupr_test = sk.metrics.average_precision_score(y_test, y_test_pred)
TP, TN, FP, FN = 0, 0, 0, 0
for i in range(len(y_test)):
print('y_test_pred: {}'.format(y_test_pred[i]))
if y_test.iloc[i] == y_test_pred[i]:
if y_test.iloc[i] == 1:
TP += 1
elif y_test.iloc[i] == 0:
TN += 1
else:
if y_test.iloc[i] == 1:
FN += 1
elif y_test.iloc[i] == 0:
FP += 1
log = {'accuracy': acc_test,
'precision': pre_test,
'recall': rec_test,
'f1': f1_test,
'TP': TP,
'TN': TN,
'FP': FP,
'FN': FN,
'AUROC': auroc_test,
'AUPR': aupr_test
}
with open(os.path.join(save_dir_test, 'log'), 'w') as f:
json.dump(log, f, indent=4)
np.savez(os.path.join(save_dir_test, 'log_auc.npz'), y_pred=y_test_pred, y_true=y_test)
print('y_test: {}'.format(list(y_test)))
print('y_pred: {}'.format(list(y_test_pred)))
print(log)
if __name__ == '__main__':
main()
|
[
"json.dump",
"os.makedirs",
"argparse.ArgumentParser",
"sklearn.metrics.accuracy_score",
"copy.copy",
"sklearn.metrics.recall_score",
"sklearn.metrics.roc_auc_score",
"sklearn.metrics.f1_score",
"pytz.timezone",
"sklearn.metrics.precision_score",
"xgboost.XGBClassifier",
"sklearn.metrics.average_precision_score",
"os.path.join"
] |
[((541, 603), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""python train_xgboost.py"""'}), "(description='python train_xgboost.py')\n", (564, 603), False, 'import argparse\n'), ((1606, 1675), 'os.path.join', 'os.path.join', (['args.root', '"""datasets"""', '"""split_list"""', '"""kfcv"""', '"""test.txt"""'], {}), "(args.root, 'datasets', 'split_list', 'kfcv', 'test.txt')\n", (1618, 1675), False, 'import os\n'), ((2999, 3035), 'os.makedirs', 'os.makedirs', (['save_dir'], {'exist_ok': '(True)'}), '(save_dir, exist_ok=True)\n', (3010, 3035), False, 'import os\n'), ((3136, 3177), 'os.makedirs', 'os.makedirs', (['save_dir_test'], {'exist_ok': '(True)'}), '(save_dir_test, exist_ok=True)\n', (3147, 3177), False, 'import os\n'), ((6367, 6413), 'sklearn.metrics.accuracy_score', 'sk.metrics.accuracy_score', (['y_test', 'y_test_pred'], {}), '(y_test, y_test_pred)\n', (6392, 6413), True, 'import sklearn as sk\n'), ((6429, 6489), 'sklearn.metrics.precision_score', 'sk.metrics.precision_score', (['y_test', 'y_test_pred'], {'pos_label': '(1)'}), '(y_test, y_test_pred, pos_label=1)\n', (6455, 6489), True, 'import sklearn as sk\n'), ((6505, 6562), 'sklearn.metrics.recall_score', 'sk.metrics.recall_score', (['y_test', 'y_test_pred'], {'pos_label': '(1)'}), '(y_test, y_test_pred, pos_label=1)\n', (6528, 6562), True, 'import sklearn as sk\n'), ((6577, 6630), 'sklearn.metrics.f1_score', 'sk.metrics.f1_score', (['y_test', 'y_test_pred'], {'pos_label': '(1)'}), '(y_test, y_test_pred, pos_label=1)\n', (6596, 6630), True, 'import sklearn as sk\n'), ((6724, 6769), 'sklearn.metrics.roc_auc_score', 'sk.metrics.roc_auc_score', (['y_test', 'y_test_pred'], {}), '(y_test, y_test_pred)\n', (6748, 6769), True, 'import sklearn as sk\n'), ((6786, 6841), 'sklearn.metrics.average_precision_score', 'sk.metrics.average_precision_score', (['y_test', 'y_test_pred'], {}), '(y_test, y_test_pred)\n', (6820, 6841), True, 'import sklearn as sk\n'), ((1104, 1168), 'os.path.join', 'os.path.join', (['args.root', '"""datasets"""', '"""tree"""', '"""learningInput.csv"""'], {}), "(args.root, 'datasets', 'tree', 'learningInput.csv')\n", (1116, 1168), False, 'import os\n'), ((1253, 1315), 'os.path.join', 'os.path.join', (['args.root', '"""datasets"""', '"""tree"""', '"""PCARotation.csv"""'], {}), "(args.root, 'datasets', 'tree', 'PCARotation.csv')\n", (1265, 1315), False, 'import os\n'), ((2003, 2016), 'copy.copy', 'copy', (['allData'], {}), '(allData)\n', (2007, 2016), False, 'from copy import copy\n'), ((2018, 2031), 'copy.copy', 'copy', (['allData'], {}), '(allData)\n', (2022, 2031), False, 'from copy import copy\n'), ((2033, 2046), 'copy.copy', 'copy', (['allData'], {}), '(allData)\n', (2037, 2046), False, 'from copy import copy\n'), ((6092, 6119), 'json.dump', 'json.dump', (['log', 'f'], {'indent': '(4)'}), '(log, f, indent=4)\n', (6101, 6119), False, 'import json\n'), ((7612, 7639), 'json.dump', 'json.dump', (['log', 'f'], {'indent': '(4)'}), '(log, f, indent=4)\n', (7621, 7639), False, 'import json\n'), ((7653, 7695), 'os.path.join', 'os.path.join', (['save_dir_test', '"""log_auc.npz"""'], {}), "(save_dir_test, 'log_auc.npz')\n", (7665, 7695), False, 'import os\n'), ((3791, 3890), 'xgboost.XGBClassifier', 'xgb.XGBClassifier', ([], {'n_estimators': '(10000)', 'learning_rate': '(0.001)', 'max_depth': 'd', 'gamma': '(0)', 'subsample': '(1.0)'}), '(n_estimators=10000, learning_rate=0.001, max_depth=d,\n gamma=0, subsample=1.0)\n', (3808, 3890), True, 'import xgboost as xgb\n'), ((4433, 4481), 'sklearn.metrics.accuracy_score', 'sk.metrics.accuracy_score', (['y_train', 'y_train_pred'], {}), '(y_train, y_train_pred)\n', (4458, 4481), True, 'import sklearn as sk\n'), ((4504, 4548), 'sklearn.metrics.accuracy_score', 'sk.metrics.accuracy_score', (['y_val', 'y_val_pred'], {}), '(y_val, y_val_pred)\n', (4529, 4548), True, 'import sklearn as sk\n'), ((4571, 4629), 'sklearn.metrics.precision_score', 'sk.metrics.precision_score', (['y_val', 'y_val_pred'], {'pos_label': '(1)'}), '(y_val, y_val_pred, pos_label=1)\n', (4597, 4629), True, 'import sklearn as sk\n'), ((4652, 4707), 'sklearn.metrics.recall_score', 'sk.metrics.recall_score', (['y_val', 'y_val_pred'], {'pos_label': '(1)'}), '(y_val, y_val_pred, pos_label=1)\n', (4675, 4707), True, 'import sklearn as sk\n'), ((4729, 4780), 'sklearn.metrics.f1_score', 'sk.metrics.f1_score', (['y_val', 'y_val_pred'], {'pos_label': '(1)'}), '(y_val, y_val_pred, pos_label=1)\n', (4748, 4780), True, 'import sklearn as sk\n'), ((4888, 4935), 'sklearn.metrics.roc_auc_score', 'sk.metrics.roc_auc_score', (['y_train', 'y_train_pred'], {}), '(y_train, y_train_pred)\n', (4912, 4935), True, 'import sklearn as sk\n'), ((4961, 5018), 'sklearn.metrics.average_precision_score', 'sk.metrics.average_precision_score', (['y_train', 'y_train_pred'], {}), '(y_train, y_train_pred)\n', (4995, 5018), True, 'import sklearn as sk\n'), ((5120, 5163), 'sklearn.metrics.roc_auc_score', 'sk.metrics.roc_auc_score', (['y_val', 'y_val_pred'], {}), '(y_val, y_val_pred)\n', (5144, 5163), True, 'import sklearn as sk\n'), ((5187, 5240), 'sklearn.metrics.average_precision_score', 'sk.metrics.average_precision_score', (['y_val', 'y_val_pred'], {}), '(y_val, y_val_pred)\n', (5221, 5240), True, 'import sklearn as sk\n'), ((6034, 6071), 'os.path.join', 'os.path.join', (['save_dir', '"""best_result"""'], {}), "(save_dir, 'best_result')\n", (6046, 6071), False, 'import os\n'), ((7557, 7591), 'os.path.join', 'os.path.join', (['save_dir_test', '"""log"""'], {}), "(save_dir_test, 'log')\n", (7569, 7591), False, 'import os\n'), ((2853, 2880), 'pytz.timezone', 'pytz.timezone', (['"""Asia/Tokyo"""'], {}), "('Asia/Tokyo')\n", (2866, 2880), False, 'import pytz\n'), ((5611, 5638), 'json.dump', 'json.dump', (['log', 'f'], {'indent': '(4)'}), '(log, f, indent=4)\n', (5620, 5638), False, 'import json\n'), ((5553, 5582), 'os.path.join', 'os.path.join', (['save_dir', '"""log"""'], {}), "(save_dir, 'log')\n", (5565, 5582), False, 'import os\n')]
|
"""
Tests for the datastore logic used to define the behaviour of the API.
Copyright (C) 2020 <NAME>.
"Commons Clause" License Condition v1.0:
The Software is provided to you by the Licensor under the License, as defined
below, subject to the following condition.
Without limiting other conditions in the License, the grant of rights under the
License will not include, and the License does not grant to you, the right to
Sell the Software.
For purposes of the foregoing, "Sell" means practicing any or all of the rights
granted to you under the License to provide to third parties, for a fee or
other consideration (including without limitation fees for hosting or
consulting/support services related to the Software), a product or service
whose value derives, entirely or substantially, from the functionality of the
Software. Any license notice or attribution required by the License must also
include this Commons Clause License Condition notice.
MIT License:
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
from unittest import mock
from django.test import TestCase
from datastore import logic
from datastore import models
class NamespaceTestCase(TestCase):
"""
Exercises the namespace related administrative functions.
"""
def setUp(self):
self.site_admin_user = models.User.objects.create_user(
username="site_admin_user",
email="<EMAIL>",
password="password",
is_superuser=True,
)
self.admin_user = models.User.objects.create_user(
username="admin_user",
email="<EMAIL>",
password="password",
)
self.tag_user = models.User.objects.create_user(
username="tag_user",
email="<EMAIL>",
password="password",
)
self.tag_reader = models.User.objects.create_user(
username="tag_reader",
email="<EMAIL>",
password="password",
)
self.normal_user = models.User.objects.create_user(
username="normal_user",
email="<EMAIL>",
password="password",
)
self.namespace_name = "test_namespace"
self.namespace_description = "This is a test namespace."
self.test_namespace = logic.create_namespace(
self.site_admin_user,
self.namespace_name,
self.namespace_description,
admins=[
self.admin_user,
],
)
self.public_tag_name = "public_tag"
self.public_tag_description = "This is a public tag."
self.public_tag_type_of = "s"
self.public_tag = logic.create_tag(
user=self.site_admin_user,
name=self.public_tag_name,
description=self.public_tag_description,
type_of=self.public_tag_type_of,
namespace=self.test_namespace,
private=False,
)
self.user_tag_name = "user_tag"
self.user_tag_description = "This is a user tag."
self.user_tag_type_of = "b"
self.user_tag = logic.create_tag(
user=self.site_admin_user,
name=self.user_tag_name,
description=self.user_tag_description,
type_of=self.user_tag_type_of,
namespace=self.test_namespace,
private=True,
users=[
self.tag_user,
],
)
self.reader_tag_name = "reader_tag"
self.reader_tag_description = "This is a reader tag."
self.reader_tag_type_of = "i"
self.reader_tag = logic.create_tag(
user=self.site_admin_user,
name=self.reader_tag_name,
description=self.reader_tag_description,
type_of=self.reader_tag_type_of,
namespace=self.test_namespace,
private=True,
readers=[
self.tag_reader,
],
)
def test_create_namespace_as_site_admin(self):
"""
Ensure a site admin user who creates the namespace is assigned the
expected admin role.
"""
name = "my_namespace"
description = "This is a test namespace."
mock_logger = mock.MagicMock()
with mock.patch("datastore.logic.logger", mock_logger):
ns = logic.create_namespace(
self.site_admin_user, name, description
)
self.assertEqual(ns.name, name)
self.assertEqual(ns.description, description)
self.assertIn(self.site_admin_user, ns.admins.all())
mock_logger.msg.assert_called_once_with(
"Create namespace.",
user=self.site_admin_user.username,
namespace=name,
description=description,
admins=[
self.site_admin_user.username,
],
)
def test_create_namespace_as_site_admin_with_admin_list(self):
"""
Ensure a site admin user who creates the namespace is assigned the
expected admin role along with any further users included in the
admin list.
"""
name = "my_namespace"
description = "This is a test namespace."
admins = [
self.admin_user,
]
mock_logger = mock.MagicMock()
with mock.patch("datastore.logic.logger", mock_logger):
ns = logic.create_namespace(
self.site_admin_user, name, description, admins
)
self.assertEqual(ns.name, name)
self.assertEqual(ns.description, description)
self.assertIn(self.site_admin_user, ns.admins.all())
mock_logger.msg.assert_called_once_with(
"Create namespace.",
user=self.site_admin_user.username,
namespace=name,
description=description,
admins=[
self.site_admin_user.username,
self.admin_user.username,
],
)
def test_create_namespace_with_regular_users_username(self):
"""
Non-site-admin users are allowed to create namespaces that match their
username.
"""
name = self.admin_user.username
# Delete the namespace created when the user was created.
models.Namespace.objects.get(name=name).delete()
description = "This is a test namespace."
mock_logger = mock.MagicMock()
with mock.patch("datastore.logic.logger", mock_logger):
ns = logic.create_namespace(self.admin_user, name, description)
self.assertEqual(ns.name, name)
self.assertEqual(ns.description, description)
self.assertIn(self.admin_user, ns.admins.all())
mock_logger.msg.assert_called_once_with(
"Create namespace.",
user=self.admin_user.username,
namespace=name,
description=description,
admins=[
self.admin_user.username,
],
)
def test_create_namespace_fails_with_non_site_admin_user(self):
"""
Non-site-admin users are not allowed to create namespaces that do not
match their username.
"""
name = "my_namespace"
description = "This is a test namespace."
with self.assertRaises(PermissionError):
logic.create_namespace(self.admin_user, name, description)
def test_get_namespace_as_normal_user(self):
"""
Regular users see a limited set of attributes on only those aspects of
the namespace for which they have privileges to see.
In this case, the normal user can see the public tag but none of the
private tags because they are not either a user or reader of those
tags.
"""
result = logic.get_namespace(self.normal_user, self.namespace_name)
self.assertEqual(result["name"], self.namespace_name)
self.assertEqual(result["description"], self.namespace_description)
self.assertEqual(1, len(result["tags"]))
tag1 = result["tags"][0]
self.assertEqual(tag1["name"], self.public_tag_name)
self.assertEqual(tag1["description"], self.public_tag_description)
self.assertEqual(tag1["type_of"], "string")
def test_get_namespace_as_user(self):
"""
Regular users see a limited set of attributes on only those aspects of
the namespace for which they have privileges to see.
In this case, the user can see the public tag, and the private tag in
which they are assigned a user role.
"""
result = logic.get_namespace(self.tag_user, self.namespace_name)
self.assertEqual(result["name"], self.namespace_name)
self.assertEqual(result["description"], self.namespace_description)
self.assertEqual(2, len(result["tags"]))
tag1 = result["tags"][0]
tag2 = result["tags"][1]
self.assertEqual(tag1["name"], self.public_tag_name)
self.assertEqual(tag1["description"], self.public_tag_description)
self.assertEqual(tag1["type_of"], "string")
self.assertEqual(tag2["name"], self.user_tag_name)
self.assertEqual(tag2["description"], self.user_tag_description)
self.assertEqual(tag2["type_of"], "boolean")
def test_get_namespace_as_reader(self):
"""
Regular users see a limited set of attributes on only those aspects of
the namespace for which they have privileges to see.
In this case, the user can see the public tag, and the private tag in
which they are assigned a reader role.
"""
result = logic.get_namespace(self.tag_reader, self.namespace_name)
self.assertEqual(result["name"], self.namespace_name)
self.assertEqual(result["description"], self.namespace_description)
self.assertEqual(2, len(result["tags"]))
tag1 = result["tags"][0]
tag2 = result["tags"][1]
self.assertEqual(tag1["name"], self.public_tag_name)
self.assertEqual(tag1["description"], self.public_tag_description)
self.assertEqual(tag1["type_of"], "string")
self.assertEqual(tag2["name"], self.reader_tag_name)
self.assertEqual(tag2["description"], self.reader_tag_description)
self.assertEqual(tag2["type_of"], "integer")
def test_get_namespace_as_namespace_admin(self):
"""
Users who have the role of the namespace admin see an enhanced view of
the namespace and child tags: system meta-data about roles, changes
made and visibility.
"""
result = logic.get_namespace(self.admin_user, self.namespace_name)
self.assertEqual(result["name"], self.namespace_name)
self.assertEqual(result["description"], self.namespace_description)
self.assertEqual(result["created_by"], self.site_admin_user.username)
self.assertEqual(
result["created_on"], str(self.test_namespace.created_on)
)
self.assertEqual(result["updated_by"], self.site_admin_user.username)
self.assertEqual(
result["updated_on"], str(self.test_namespace.updated_on)
)
self.assertEqual(3, len(result["tags"]))
tag1 = result["tags"][0]
tag2 = result["tags"][1]
tag3 = result["tags"][2]
# Tag 1
self.assertEqual(tag1["name"], self.public_tag_name)
self.assertEqual(tag1["description"], self.public_tag_description)
self.assertEqual(tag1["type_of"], "string")
self.assertEqual(tag1["created_by"], self.site_admin_user.username)
self.assertEqual(tag1["created_on"], str(self.public_tag.created_on))
self.assertFalse(tag1["private"])
self.assertEqual(tag1["updated_by"], self.site_admin_user.username)
self.assertEqual(tag1["updated_on"], str(self.public_tag.updated_on))
self.assertEqual(
tag1["users"],
[
self.site_admin_user.username,
],
)
self.assertEqual(tag1["readers"], [])
# Tag 2
self.assertEqual(tag2["name"], self.reader_tag_name)
self.assertEqual(tag2["description"], self.reader_tag_description)
self.assertEqual(tag2["type_of"], "integer")
self.assertEqual(tag2["created_by"], self.site_admin_user.username)
self.assertEqual(tag2["created_on"], str(self.reader_tag.created_on))
self.assertTrue(tag2["private"])
self.assertEqual(tag2["updated_by"], self.site_admin_user.username)
self.assertEqual(tag2["updated_on"], str(self.reader_tag.updated_on))
self.assertEqual(
tag2["users"],
[
self.site_admin_user.username,
],
)
self.assertEqual(
tag2["readers"],
[
self.tag_reader.username,
],
)
# Tag 3
self.assertEqual(tag3["name"], self.user_tag_name)
self.assertEqual(tag3["description"], self.user_tag_description)
self.assertEqual(tag3["type_of"], "boolean")
self.assertEqual(tag3["created_by"], self.site_admin_user.username)
self.assertEqual(tag3["created_on"], str(self.user_tag.created_on))
self.assertTrue(tag3["private"])
self.assertEqual(tag3["updated_by"], self.site_admin_user.username)
self.assertEqual(tag3["updated_on"], str(self.user_tag.updated_on))
self.assertEqual(
tag3["users"],
[
self.site_admin_user.username,
self.tag_user.username,
],
)
self.assertEqual(tag3["readers"], [])
def test_get_namespace_as_site_admin(self):
"""
Users who are a site admin see an enhanced view of the namespace and
child tags: system meta-data about roles, changes made and visibility.
"""
result = logic.get_namespace(self.site_admin_user, self.namespace_name)
self.assertEqual(result["name"], self.namespace_name)
self.assertEqual(result["description"], self.namespace_description)
self.assertEqual(result["created_by"], self.site_admin_user.username)
self.assertEqual(
result["created_on"], str(self.test_namespace.created_on)
)
self.assertEqual(result["updated_by"], self.site_admin_user.username)
self.assertEqual(
result["updated_on"], str(self.test_namespace.updated_on)
)
self.assertEqual(3, len(result["tags"]))
tag1 = result["tags"][0]
tag2 = result["tags"][1]
tag3 = result["tags"][2]
# Tag 1
self.assertEqual(tag1["name"], self.public_tag_name)
self.assertEqual(tag1["description"], self.public_tag_description)
self.assertEqual(tag1["type_of"], "string")
self.assertEqual(tag1["created_by"], self.site_admin_user.username)
self.assertEqual(tag1["created_on"], str(self.public_tag.created_on))
self.assertFalse(tag1["private"])
self.assertEqual(tag1["updated_by"], self.site_admin_user.username)
self.assertEqual(tag1["updated_on"], str(self.public_tag.updated_on))
self.assertEqual(
tag1["users"],
[
self.site_admin_user.username,
],
)
self.assertEqual(tag1["readers"], [])
# Tag 2
self.assertEqual(tag2["name"], self.reader_tag_name)
self.assertEqual(tag2["description"], self.reader_tag_description)
self.assertEqual(tag2["type_of"], "integer")
self.assertEqual(tag2["created_by"], self.site_admin_user.username)
self.assertEqual(tag2["created_on"], str(self.reader_tag.created_on))
self.assertTrue(tag2["private"])
self.assertEqual(tag2["updated_by"], self.site_admin_user.username)
self.assertEqual(tag2["updated_on"], str(self.reader_tag.updated_on))
self.assertEqual(
tag2["users"],
[
self.site_admin_user.username,
],
)
self.assertEqual(
tag2["readers"],
[
self.tag_reader.username,
],
)
# Tag 3
self.assertEqual(tag3["name"], self.user_tag_name)
self.assertEqual(tag3["description"], self.user_tag_description)
self.assertEqual(tag3["type_of"], "boolean")
self.assertEqual(tag3["created_by"], self.site_admin_user.username)
self.assertEqual(tag3["created_on"], str(self.user_tag.created_on))
self.assertTrue(tag3["private"])
self.assertEqual(tag3["updated_by"], self.site_admin_user.username)
self.assertEqual(tag3["updated_on"], str(self.user_tag.updated_on))
self.assertEqual(
tag3["users"],
[
self.site_admin_user.username,
self.tag_user.username,
],
)
self.assertEqual(tag3["readers"], [])
def test_update_namespace_description_as_admin(self):
"""
Those with administrator privileges on the namesapce are able to
update the namespace's description.
"""
new_description = "This is an updated namespace description."
mock_logger = mock.MagicMock()
with mock.patch("datastore.logic.logger", mock_logger):
result = logic.update_namespace_description(
self.admin_user, self.namespace_name, new_description
)
self.assertEqual(result.description, new_description)
mock_logger.msg.assert_called_once_with(
"Update namespace description.",
user=self.admin_user.username,
namespace=self.namespace_name,
description=new_description,
)
def test_update_namespace_description_as_site_admin(self):
"""
Those with site administrator privileges are able to update the
namespace's description.
"""
new_description = "This is an updated namespace description."
mock_logger = mock.MagicMock()
with mock.patch("datastore.logic.logger", mock_logger):
result = logic.update_namespace_description(
self.site_admin_user, self.namespace_name, new_description
)
self.assertEqual(result.description, new_description)
mock_logger.msg.assert_called_once_with(
"Update namespace description.",
user=self.site_admin_user.username,
namespace=self.namespace_name,
description=new_description,
)
def test_update_namespace_description_as_normal_user(self):
"""
Normal users may not update a namespace's description - a
PermissionError is raised as a result.
"""
new_description = "This is an updated namespace description."
with self.assertRaises(PermissionError):
logic.update_namespace_description(
self.normal_user, self.namespace_name, new_description
)
def test_add_namespace_admins_as_admin(self):
"""
Admin users are allowed to add other users to the admin role.
"""
new_admins = [self.normal_user, self.tag_reader]
mock_logger = mock.MagicMock()
with mock.patch("datastore.logic.logger", mock_logger):
result = logic.add_namespace_admins(
self.admin_user, self.namespace_name, new_admins
)
current_admins = result.admins.all()
for user in new_admins:
self.assertIn(user, current_admins)
mock_logger.msg.assert_called_once_with(
"Add namespace administrators.",
user=self.admin_user.username,
namespace=self.namespace_name,
admins=[u.username for u in new_admins],
)
def test_add_namespace_admins_as_site_admin(self):
"""
Site admin users are allowed to add other users to the admin role.
"""
new_admins = [self.normal_user, self.tag_reader]
mock_logger = mock.MagicMock()
with mock.patch("datastore.logic.logger", mock_logger):
result = logic.add_namespace_admins(
self.site_admin_user, self.namespace_name, new_admins
)
current_admins = result.admins.all()
for user in new_admins:
self.assertIn(user, current_admins)
mock_logger.msg.assert_called_once_with(
"Add namespace administrators.",
user=self.site_admin_user.username,
namespace=self.namespace_name,
admins=[u.username for u in new_admins],
)
def test_add_namespace_admins_as_normal_user(self):
"""
Normal users may not add other users to the admin role - a
PermissionError is raised as a result.
"""
new_admins = [self.normal_user, self.tag_reader]
with self.assertRaises(PermissionError):
logic.add_namespace_admins(
self.normal_user, self.namespace_name, new_admins
)
def test_remove_namespace_admins_as_admin(self):
"""
Admin users are allowed to remove other users (including themselves)
from the admin role.
"""
old_admins = [self.admin_user, self.tag_reader]
mock_logger = mock.MagicMock()
with mock.patch("datastore.logic.logger", mock_logger):
result = logic.remove_namespace_admins(
self.admin_user, self.namespace_name, old_admins
)
current_admins = result.admins.all()
for user in old_admins:
self.assertNotIn(user, current_admins)
mock_logger.msg.assert_called_once_with(
"Remove namespace administrators.",
user=self.admin_user.username,
namespace=self.namespace_name,
admins=[u.username for u in old_admins],
)
def test_remove_namespace_admins_as_site_admin(self):
"""
Site admin users are allowed to remove other users from the admin role.
"""
old_admins = [self.admin_user, self.tag_reader]
mock_logger = mock.MagicMock()
with mock.patch("datastore.logic.logger", mock_logger):
result = logic.remove_namespace_admins(
self.site_admin_user, self.namespace_name, old_admins
)
current_admins = result.admins.all()
for user in old_admins:
self.assertNotIn(user, current_admins)
mock_logger.msg.assert_called_once_with(
"Remove namespace administrators.",
user=self.site_admin_user.username,
namespace=self.namespace_name,
admins=[u.username for u in old_admins],
)
def test_remove_namespace_admins_as_normal_user(self):
"""
Normal users may not remove other users from the admin role - a
PermissionError is raised as a result.
"""
old_admins = [
self.admin_user,
]
with self.assertRaises(PermissionError):
logic.remove_namespace_admins(
self.normal_user, self.namespace_name, old_admins
)
class TagTestCase(TestCase):
"""
Exercises the tag related administrative functions.
"""
def setUp(self):
self.site_admin_user = models.User.objects.create_user(
username="site_admin_user",
email="<EMAIL>",
password="password",
is_superuser=True,
)
self.admin_user = models.User.objects.create_user(
username="admin_user",
email="<EMAIL>",
password="password",
)
self.tag_user = models.User.objects.create_user(
username="tag_user",
email="<EMAIL>",
password="password",
)
self.tag_reader = models.User.objects.create_user(
username="tag_reader",
email="<EMAIL>",
password="password",
)
self.normal_user = models.User.objects.create_user(
username="normal_user",
email="<EMAIL>",
password="password",
)
self.namespace_name = "test_namespace"
self.namespace_description = "This is a test namespace."
self.test_namespace = logic.create_namespace(
self.site_admin_user,
self.namespace_name,
self.namespace_description,
admins=[
self.admin_user,
],
)
self.public_tag_name = "public_tag"
self.public_tag_description = "This is a public tag."
self.public_tag_type_of = "s"
self.public_tag = logic.create_tag(
user=self.admin_user,
name=self.public_tag_name,
description=self.public_tag_description,
type_of=self.public_tag_type_of,
namespace=self.test_namespace,
private=False,
)
self.user_tag_name = "user_tag"
self.user_tag_description = "This is a user tag."
self.user_tag_type_of = "b"
self.user_tag = logic.create_tag(
user=self.admin_user,
name=self.user_tag_name,
description=self.user_tag_description,
type_of=self.user_tag_type_of,
namespace=self.test_namespace,
private=True,
users=[
self.tag_user,
],
)
self.reader_tag_name = "reader_tag"
self.reader_tag_description = "This is a reader tag."
self.reader_tag_type_of = "i"
self.reader_tag = logic.create_tag(
user=self.admin_user,
name=self.reader_tag_name,
description=self.reader_tag_description,
type_of=self.reader_tag_type_of,
namespace=self.test_namespace,
private=True,
readers=[
self.tag_reader,
],
)
def test_create_tag_as_site_admin(self):
"""
Ensure a site admin user who creates the tag is assigned the
expected user role and the tag's creation is logged.
"""
name = "my_tag"
description = "This is a test tag."
type_of = "s" # string
is_private = False
mock_logger = mock.MagicMock()
with mock.patch("datastore.logic.logger", mock_logger):
tag = logic.create_tag(
self.site_admin_user,
name,
description,
type_of,
self.test_namespace,
is_private,
)
self.assertEqual(tag.name, name)
self.assertEqual(tag.description, description)
self.assertEqual(tag.type_of, type_of)
self.assertEqual(tag.namespace, self.test_namespace)
self.assertFalse(tag.private)
self.assertIn(self.site_admin_user, tag.users.all())
self.assertEqual(0, len(tag.readers.all()))
mock_logger.msg.assert_called_once_with(
"Create tag.",
user=self.site_admin_user.username,
name=name,
description=description,
type_of=tag.get_type_of_display(),
namespace=self.test_namespace.name,
private=is_private,
users=[
self.site_admin_user.username,
],
readers=[],
)
def test_create_tag_as_admin(self):
"""
Ensure a namespace admin user who creates the tag is assigned the
expected user role and the tag's creation is logged.
"""
name = "my_tag"
description = "This is a test tag."
type_of = "s" # string
is_private = False
mock_logger = mock.MagicMock()
with mock.patch("datastore.logic.logger", mock_logger):
tag = logic.create_tag(
self.admin_user,
name,
description,
type_of,
self.test_namespace,
is_private,
)
self.assertEqual(tag.name, name)
self.assertEqual(tag.description, description)
self.assertEqual(tag.type_of, type_of)
self.assertEqual(tag.namespace, self.test_namespace)
self.assertFalse(tag.private)
self.assertIn(self.admin_user, tag.users.all())
self.assertEqual(0, len(tag.readers.all()))
mock_logger.msg.assert_called_once_with(
"Create tag.",
user=self.admin_user.username,
name=name,
description=description,
type_of=tag.get_type_of_display(),
namespace=self.test_namespace.name,
private=is_private,
users=[
self.admin_user.username,
],
readers=[],
)
def test_create_tag_with_users_and_readers_list(self):
"""
If there are users with users and readers roles passed into the
create_tag function, then they are found with the expected roles in
relation to the tag.
"""
name = "my_tag"
description = "This is a test tag."
type_of = "s" # string
is_private = False
users = [
self.tag_user,
]
readers = [
self.tag_reader,
]
mock_logger = mock.MagicMock()
with mock.patch("datastore.logic.logger", mock_logger):
tag = logic.create_tag(
self.site_admin_user,
name,
description,
type_of,
self.test_namespace,
is_private,
users,
readers,
)
self.assertEqual(tag.name, name)
self.assertEqual(tag.description, description)
self.assertEqual(tag.type_of, type_of)
self.assertEqual(tag.namespace, self.test_namespace)
self.assertFalse(tag.private)
self.assertEqual(2, len(tag.users.all()))
self.assertIn(self.site_admin_user, tag.users.all())
self.assertIn(self.tag_user, tag.users.all())
self.assertEqual(1, len(tag.readers.all()))
self.assertIn(self.tag_reader, tag.readers.all())
mock_logger.msg.assert_called_once_with(
"Create tag.",
user=self.site_admin_user.username,
name=name,
description=description,
type_of=tag.get_type_of_display(),
namespace=self.test_namespace.name,
private=is_private,
users=[
self.site_admin_user.username,
self.tag_user.username,
],
readers=[
self.tag_reader.username,
],
)
def test_create_tag_with_normal_user(self):
"""
A user who isn't a site admin or who has the role of admin for the
referenced namespace cannot create a new tag. A PermissionError is
raised instead.
"""
name = "my_tag"
description = "This is a test tag."
type_of = "s" # string
is_private = False
with self.assertRaises(PermissionError):
logic.create_tag(
self.normal_user,
name,
description,
type_of,
self.test_namespace,
is_private,
)
def test_get_tag_as_admin_user(self):
"""
Those with administrator privileges on the namespace are able to see
the full metadata associated with the referenced tag.
"""
n = models.Namespace.objects.get(name=self.namespace_name)
tag = models.Tag.objects.get(name=self.public_tag_name, namespace=n)
result = logic.get_tag(
self.admin_user, self.public_tag_name, self.namespace_name
)
self.assertEqual(result["name"], tag.name)
self.assertEqual(result["namespace"], n.name)
self.assertEqual(result["description"], tag.description)
self.assertEqual(result["path"], tag.path)
self.assertEqual(result["type_of"], tag.get_type_of_display())
self.assertEqual(result["private"], tag.private)
self.assertEqual(
result["users"], [user.username for user in tag.users.all()]
)
self.assertEqual(
result["readers"],
[reader.username for reader in tag.readers.all()],
)
self.assertEqual(result["created_by"], tag.created_by.username)
self.assertEqual(result["created_on"], str(tag.created_on))
self.assertEqual(result["updated_by"], tag.updated_by.username)
self.assertEqual(result["updated_on"], str(tag.updated_on))
def test_get_tag_as_tag_user(self):
"""
Those with user privileges on the tag are able to see limited metadata
associated with the referenced tag.
"""
n = models.Namespace.objects.get(name=self.namespace_name)
tag = models.Tag.objects.get(name=self.public_tag_name, namespace=n)
tag.users.add(self.tag_user)
tag.private = True
tag.save()
result = logic.get_tag(
self.tag_user, self.public_tag_name, self.namespace_name
)
self.assertEqual(result["name"], tag.name)
self.assertEqual(result["namespace"], n.name)
self.assertEqual(result["description"], tag.description)
self.assertEqual(result["path"], tag.path)
self.assertEqual(result["type_of"], tag.get_type_of_display())
self.assertEqual(result["private"], tag.private)
def test_get_tag_as_tag_reader(self):
"""
Those with reader privileges on the tag are able to see limited
metadata associated with the referenced tag.
"""
n = models.Namespace.objects.get(name=self.namespace_name)
tag = models.Tag.objects.get(name=self.public_tag_name, namespace=n)
tag.readers.add(self.tag_reader)
tag.private = True
tag.save()
result = logic.get_tag(
self.tag_reader, self.public_tag_name, self.namespace_name
)
self.assertEqual(result["name"], tag.name)
self.assertEqual(result["namespace"], n.name)
self.assertEqual(result["description"], tag.description)
self.assertEqual(result["path"], tag.path)
self.assertEqual(result["type_of"], tag.get_type_of_display())
self.assertEqual(result["private"], tag.private)
def test_get_tag_as_normal_user(self):
"""
Normal users can see limited metadata associated with the referenced
non-private tag.
"""
n = models.Namespace.objects.get(name=self.namespace_name)
tag = models.Tag.objects.get(name=self.public_tag_name, namespace=n)
result = logic.get_tag(
self.normal_user, self.public_tag_name, self.namespace_name
)
self.assertEqual(result["name"], tag.name)
self.assertEqual(result["namespace"], n.name)
self.assertEqual(result["description"], tag.description)
self.assertEqual(result["path"], tag.path)
self.assertEqual(result["type_of"], tag.get_type_of_display())
self.assertEqual(result["private"], tag.private)
def test_get_tag_as_private_normal_user(self):
"""
Normal users cannot see any metadata associated with a non-private tag.
Results in a PermissionError being thrown.
"""
n = models.Namespace.objects.get(name=self.namespace_name)
tag = models.Tag.objects.get(name=self.public_tag_name, namespace=n)
tag.private = True
tag.save()
with self.assertRaises(PermissionError):
logic.get_tag(
self.normal_user, self.public_tag_name, self.namespace_name
)
def test_update_tag_description_as_admin(self):
"""
Those with administrator privileges on the namesapce are able to
update the tag's description.
"""
new_description = "This is an updated tag description."
mock_logger = mock.MagicMock()
with mock.patch("datastore.logic.logger", mock_logger):
result = logic.update_tag_description(
self.admin_user,
self.public_tag_name,
self.namespace_name,
new_description,
)
self.assertEqual(result.description, new_description)
mock_logger.msg.assert_called_once_with(
"Update tag description.",
user=self.admin_user.username,
tag=self.public_tag_name,
namespace=self.namespace_name,
description=new_description,
)
def test_update_tag_description_as_site_admin(self):
"""
Those with site administrator privileges are able to update the
tag's description.
"""
new_description = "This is an updated tag description."
mock_logger = mock.MagicMock()
with mock.patch("datastore.logic.logger", mock_logger):
result = logic.update_tag_description(
self.site_admin_user,
self.public_tag_name,
self.namespace_name,
new_description,
)
self.assertEqual(result.description, new_description)
mock_logger.msg.assert_called_once_with(
"Update tag description.",
user=self.site_admin_user.username,
tag=self.public_tag_name,
namespace=self.namespace_name,
description=new_description,
)
def test_update_tag_description_as_normal_user(self):
"""
Normal users may not update a namespace's description - a
PermissionError is raised as a result.
"""
new_description = "This is an updated namespace description."
with self.assertRaises(PermissionError):
logic.update_tag_description(
self.normal_user,
self.public_tag_name,
self.namespace_name,
new_description,
)
def test_set_tag_private_as_admin(self):
"""
Those with administrator privileges on the namesapce are able to
update the tag's "private" flag.
"""
self.assertFalse(self.public_tag.private)
mock_logger = mock.MagicMock()
with mock.patch("datastore.logic.logger", mock_logger):
result = logic.set_tag_private(
self.admin_user,
self.public_tag_name,
self.namespace_name,
True,
)
self.assertTrue(result.private)
mock_logger.msg.assert_called_once_with(
"Update tag privacy.",
user=self.admin_user.username,
tag=self.public_tag_name,
namespace=self.namespace_name,
private=True,
)
def test_set_tag_private_as_site_admin(self):
"""
Those with site administrator privileges are able to update the
tag's "private" flag.
"""
self.assertFalse(self.public_tag.private)
mock_logger = mock.MagicMock()
with mock.patch("datastore.logic.logger", mock_logger):
result = logic.set_tag_private(
self.site_admin_user,
self.public_tag_name,
self.namespace_name,
True,
)
self.assertTrue(result.private)
mock_logger.msg.assert_called_once_with(
"Update tag privacy.",
user=self.site_admin_user.username,
tag=self.public_tag_name,
namespace=self.namespace_name,
private=True,
)
def test_set_tag_private_as_normal_user(self):
"""
Normal users may not update a tag's "private" flag - a
PermissionError is raised as a result.
"""
self.assertFalse(self.public_tag.private)
with self.assertRaises(PermissionError):
logic.set_tag_private(
self.normal_user,
self.public_tag_name,
self.namespace_name,
True,
)
def test_add_tag_users_as_admin(self):
"""
Admin users are allowed to add users to the users role.
"""
new_users = [
self.normal_user,
self.tag_user,
]
mock_logger = mock.MagicMock()
with mock.patch("datastore.logic.logger", mock_logger):
result = logic.add_tag_users(
self.admin_user,
self.public_tag_name,
self.namespace_name,
new_users,
)
current_users = result.users.all()
for user in new_users:
self.assertIn(user, current_users)
mock_logger.msg.assert_called_once_with(
"Add tag users.",
user=self.admin_user.username,
tag=self.public_tag_name,
namespace=self.namespace_name,
users=[u.username for u in new_users],
)
def test_add_tag_users_as_site_admin(self):
"""
Site admin users are allowed to add users to the users role for the
tag.
"""
new_users = [
self.normal_user,
self.tag_user,
]
mock_logger = mock.MagicMock()
with mock.patch("datastore.logic.logger", mock_logger):
result = logic.add_tag_users(
self.site_admin_user,
self.public_tag_name,
self.namespace_name,
new_users,
)
current_users = result.users.all()
for user in new_users:
self.assertIn(user, current_users)
mock_logger.msg.assert_called_once_with(
"Add tag users.",
user=self.site_admin_user.username,
tag=self.public_tag_name,
namespace=self.namespace_name,
users=[u.username for u in new_users],
)
def test_add_tag_users_as_normal_user(self):
"""
Normal users may not add other users to the users role - a
PermissionError is raised as a result.
"""
new_users = [
self.normal_user,
self.tag_user,
]
with self.assertRaises(PermissionError):
logic.add_tag_users(
self.normal_user,
self.public_tag_name,
self.namespace_name,
new_users,
)
def test_remove_tag_users_as_admin(self):
"""
Admin users are allowed to remove other users (including themselves)
from the users role associated with the tag.
"""
old_users = [self.tag_user]
logic.add_tag_users(
self.site_admin_user,
self.public_tag_name,
self.namespace_name,
old_users,
)
mock_logger = mock.MagicMock()
with mock.patch("datastore.logic.logger", mock_logger):
result = logic.remove_tag_users(
self.admin_user,
self.public_tag_name,
self.namespace_name,
old_users,
)
current_users = result.users.all()
for user in old_users:
self.assertNotIn(user, current_users)
mock_logger.msg.assert_called_once_with(
"Remove tag users.",
user=self.admin_user.username,
tag=self.public_tag_name,
namespace=self.namespace_name,
users=[u.username for u in old_users],
)
def test_remove_tag_users_as_site_admin(self):
"""
Site admin users are allowed to remove other users from the tag's
users role.
"""
old_users = [self.tag_user]
logic.add_tag_users(
self.site_admin_user,
self.public_tag_name,
self.namespace_name,
old_users,
)
mock_logger = mock.MagicMock()
with mock.patch("datastore.logic.logger", mock_logger):
result = logic.remove_tag_users(
self.site_admin_user,
self.public_tag_name,
self.namespace_name,
old_users,
)
current_users = result.users.all()
for user in old_users:
self.assertNotIn(user, current_users)
mock_logger.msg.assert_called_once_with(
"Remove tag users.",
user=self.site_admin_user.username,
tag=self.public_tag_name,
namespace=self.namespace_name,
users=[u.username for u in old_users],
)
def test_remove_tag_users_as_normal_user(self):
"""
Normal users may not remove other users from the tag's users role - a
PermissionError is raised as a result.
"""
old_users = [
self.tag_user,
]
with self.assertRaises(PermissionError):
logic.remove_tag_users(
self.normal_user,
self.public_tag_name,
self.namespace_name,
old_users,
)
def test_add_tag_readers_as_admin(self):
"""
Admin users are allowed to add users to the readers role.
"""
new_readers = [
self.normal_user,
self.tag_reader,
]
mock_logger = mock.MagicMock()
with mock.patch("datastore.logic.logger", mock_logger):
result = logic.add_tag_readers(
self.admin_user,
self.public_tag_name,
self.namespace_name,
new_readers,
)
current_readers = result.readers.all()
for user in new_readers:
self.assertIn(user, current_readers)
mock_logger.msg.assert_called_once_with(
"Add tag readers.",
user=self.admin_user.username,
tag=self.public_tag_name,
namespace=self.namespace_name,
readers=[u.username for u in new_readers],
)
def test_add_tag_readers_as_site_admin(self):
"""
Site admin users are allowed to add users to the readers role for the
tag.
"""
new_readers = [
self.normal_user,
self.tag_reader,
]
mock_logger = mock.MagicMock()
with mock.patch("datastore.logic.logger", mock_logger):
result = logic.add_tag_readers(
self.site_admin_user,
self.public_tag_name,
self.namespace_name,
new_readers,
)
current_readers = result.readers.all()
for user in new_readers:
self.assertIn(user, current_readers)
mock_logger.msg.assert_called_once_with(
"Add tag readers.",
user=self.site_admin_user.username,
tag=self.public_tag_name,
namespace=self.namespace_name,
readers=[u.username for u in new_readers],
)
def test_add_tag_readers_as_normal_user(self):
"""
Normal users may not add other users to the readers role - a
PermissionError is raised as a result.
"""
new_readers = [
self.normal_user,
self.tag_user,
]
with self.assertRaises(PermissionError):
logic.add_tag_readers(
self.normal_user,
self.public_tag_name,
self.namespace_name,
new_readers,
)
def test_remove_tag_readers_as_admin(self):
"""
Admin users are allowed to remove other users (including themselves)
from the readers role associated with the tag.
"""
old_readers = [self.tag_reader]
logic.add_tag_readers(
self.site_admin_user,
self.public_tag_name,
self.namespace_name,
old_readers,
)
mock_logger = mock.MagicMock()
with mock.patch("datastore.logic.logger", mock_logger):
result = logic.remove_tag_readers(
self.admin_user,
self.public_tag_name,
self.namespace_name,
old_readers,
)
current_readers = result.readers.all()
for user in old_readers:
self.assertNotIn(user, current_readers)
mock_logger.msg.assert_called_once_with(
"Remove tag readers.",
user=self.admin_user.username,
tag=self.public_tag_name,
namespace=self.namespace_name,
readers=[u.username for u in old_readers],
)
def test_remove_tag_readers_as_site_admin(self):
"""
Site admin users are allowed to remove other users from the tag's
readers role.
"""
old_readers = [self.tag_reader]
logic.add_tag_readers(
self.site_admin_user,
self.public_tag_name,
self.namespace_name,
old_readers,
)
mock_logger = mock.MagicMock()
with mock.patch("datastore.logic.logger", mock_logger):
result = logic.remove_tag_readers(
self.site_admin_user,
self.public_tag_name,
self.namespace_name,
old_readers,
)
current_readers = result.readers.all()
for user in old_readers:
self.assertNotIn(user, current_readers)
mock_logger.msg.assert_called_once_with(
"Remove tag readers.",
user=self.site_admin_user.username,
tag=self.public_tag_name,
namespace=self.namespace_name,
readers=[u.username for u in old_readers],
)
def test_remove_tag_readers_as_normal_user(self):
"""
Normal users may not remove other users from the tag's readers role - a
PermissionError is raised as a result.
"""
old_readers = [
self.tag_reader,
]
with self.assertRaises(PermissionError):
logic.remove_tag_readers(
self.normal_user,
self.public_tag_name,
self.namespace_name,
old_readers,
)
def test_check_users_tags_as_admin_user(self):
"""
Given a user and a collection of namespace/tag tuples, ensure the
expected True value is returned if the user has permission to use the
referenced tags to annotate values onto objects.
In this case, if a user is an admin of the parent namespace, the
response is always True.
"""
tag_set = set(
[
f"{self.namespace_name}/{self.public_tag_name}",
f"{self.namespace_name}/{self.user_tag_name}",
f"{self.namespace_name}/{self.reader_tag_name}",
]
)
self.assertTrue(logic.check_users_tags(self.admin_user, tag_set))
def test_check_users_tags_as_site_admin(self):
"""
Given a user and a collection of namespace/tag tuples, ensure the
expected True value is returned if the user has permission to use the
referenced tags to annotate values onto objects.
In this case, if a user is a site admin so the response is always True.
"""
tag_set = set(
[
f"{self.namespace_name}/{self.public_tag_name}",
f"{self.namespace_name}/{self.user_tag_name}",
f"{self.namespace_name}/{self.reader_tag_name}",
]
)
self.assertTrue(logic.check_users_tags(self.site_admin_user, tag_set))
def test_check_users_tags_as_normal_user(self):
"""
Given a user and a collection of namespace/tag tuples, ensure the
expected True value is returned if the user has permission to use the
referenced tags to annotate values onto objects.
In this case, if a user is a normal user and the tags are not in scope
with them, so the result will be False.
"""
tag_set = set(
[
f"{self.namespace_name}/{self.public_tag_name}",
f"{self.namespace_name}/{self.user_tag_name}",
f"{self.namespace_name}/{self.reader_tag_name}",
]
)
self.assertFalse(logic.check_users_tags(self.normal_user, tag_set))
def test_check_users_tags_as_tag_user(self):
"""
Given a user and a collection of namespace/tag tuples, ensure the
expected True value is returned if the user has permission to use the
referenced tags to annotate values onto objects.
In this case, if a user is a tag user the response is True. If the tag
collection contains a tag for which the user doesn't have the "user"
role, then the response if False.
"""
tag_set = set(
[
f"{self.namespace_name}/{self.user_tag_name}",
]
)
self.assertTrue(logic.check_users_tags(self.tag_user, tag_set))
tag_set = set(
[
f"{self.namespace_name}/{self.public_tag_name}",
f"{self.namespace_name}/{self.user_tag_name}",
]
)
self.assertFalse(logic.check_users_tags(self.tag_user, tag_set))
def test_check_users_tags_with_duplicate_tags(self):
"""
Given a user and a collection of namespace/tag tuples, ensure the
expected True value is returned if the user has permission to use the
referenced tags to annotate values onto objects.
If there are duplicates of the same tag, this doesn't effect the
outcome.
"""
tag_set = set(
[
f"{self.namespace_name}/{self.user_tag_name}",
f"{self.namespace_name}/{self.user_tag_name}",
f"{self.namespace_name}/{self.user_tag_name}",
f"{self.namespace_name}/{self.user_tag_name}",
f"{self.namespace_name}/{self.user_tag_name}",
]
)
self.assertTrue(logic.check_users_tags(self.tag_user, tag_set))
def test_check_users_tags_as_tag_reader(self):
"""
Given a user and a collection of namespace/tag tuples, ensure the
expected True value is returned if the user has permission to use the
referenced tags to annotate values onto objects.
In this case, if a user is a tag reader the response is False because
readers cannot use the tag to annotate (they can only read values
associated with it), unless, of course, they are also have the "users"
role.
"""
tag_set = set(
[
f"{self.namespace_name}/{self.reader_tag_name}",
]
)
self.assertFalse(logic.check_users_tags(self.tag_reader, tag_set))
def test_check_readers_tags_as_admin_user(self):
"""
Given a user and a collection of namespace/tag tuples, ensure the
expected True value is returned if the user has permission to use the
referenced tags to read values from objects.
In this case, if a user is an admin of the parent namespace, the
response is always True.
"""
tag_set = set(
[
f"{self.namespace_name}/{self.public_tag_name}",
f"{self.namespace_name}/{self.user_tag_name}",
f"{self.namespace_name}/{self.reader_tag_name}",
]
)
self.assertTrue(logic.check_readers_tags(self.admin_user, tag_set))
def test_check_readers_tags_as_site_admin(self):
"""
Given a user and a collection of namespace/tag tuples, ensure the
expected True value is returned if the user has permission to use the
referenced tags to read values from objects.
In this case, if a user is a site admin so the response is always True.
"""
tag_set = set(
[
f"{self.namespace_name}/{self.public_tag_name}",
f"{self.namespace_name}/{self.user_tag_name}",
f"{self.namespace_name}/{self.reader_tag_name}",
]
)
self.assertTrue(
logic.check_readers_tags(self.site_admin_user, tag_set)
)
def test_check_readers_tags_as_normal_user(self):
"""
Given a user and a collection of namespace/tag tuples, ensure the
expected True value is returned if the user has permission to use the
referenced tags to read values from objects.
In this case, if a user is a normal user and the tags are not in scope
with them, so the result will be False.
"""
tag_set = set(
[
f"{self.namespace_name}/{self.public_tag_name}",
f"{self.namespace_name}/{self.user_tag_name}",
f"{self.namespace_name}/{self.reader_tag_name}",
]
)
self.assertFalse(logic.check_readers_tags(self.normal_user, tag_set))
def test_check_readers_tags_as_normal_user_public_tag(self):
"""
Given a user and a collection of namespace/tag tuples, ensure the
expected True value is returned if the user has permission to use the
referenced tags to read values from objects.
In this case, if a user is a normal user and the tags are all public
the result will be True.
"""
tag_set = set(
[
f"{self.namespace_name}/{self.public_tag_name}",
]
)
self.assertTrue(logic.check_readers_tags(self.normal_user, tag_set))
def test_check_readers_tags_as_tag_reader(self):
"""
Given a user and a collection of namespace/tag tuples, ensure the
expected True value is returned if the user has permission to use the
referenced tags to read values from objects.
In this case, if a user is a tag reader the response is True. If the
tag collection contains a tag for which the user doesn't have the
"reader" role, then the response if False.
"""
tag_set = set(
[
f"{self.namespace_name}/{self.reader_tag_name}",
]
)
self.assertTrue(logic.check_readers_tags(self.tag_reader, tag_set))
tag_set = set(
[
f"{self.namespace_name}/{self.reader_tag_name}",
f"{self.namespace_name}/{self.user_tag_name}",
]
)
self.assertFalse(logic.check_readers_tags(self.tag_reader, tag_set))
def test_check_readers_tags_with_duplicate_tags(self):
"""
Given a user and a collection of namespace/tag tuples, ensure the
expected True value is returned if the user has permission to use the
referenced tags to read values from objects.
If there are duplicates of the same tag, this doesn't effect the
outcome.
"""
tag_set = set(
[
f"{self.namespace_name}/{self.reader_tag_name}",
f"{self.namespace_name}/{self.reader_tag_name}",
f"{self.namespace_name}/{self.reader_tag_name}",
f"{self.namespace_name}/{self.reader_tag_name}",
f"{self.namespace_name}/{self.reader_tag_name}",
f"{self.namespace_name}/{self.reader_tag_name}",
]
)
self.assertTrue(logic.check_readers_tags(self.tag_reader, tag_set))
def test_check_readers_tags_as_tag_user(self):
"""
Given a user and a collection of namespace/tag tuples, ensure the
expected True value is returned if the user has permission to use the
referenced tags to read values from objects.
In this case, if a user is a tag user the response is True because
users can always read tags for which they have permission to annotate
data.
"""
tag_set = set(
[
f"{self.namespace_name}/{self.reader_tag_name}",
]
)
self.assertFalse(logic.check_readers_tags(self.tag_user, tag_set))
|
[
"datastore.logic.create_namespace",
"datastore.logic.update_tag_description",
"datastore.logic.create_tag",
"datastore.models.Tag.objects.get",
"datastore.models.User.objects.create_user",
"unittest.mock.MagicMock",
"datastore.models.Namespace.objects.get",
"datastore.logic.get_namespace",
"datastore.logic.check_readers_tags",
"datastore.logic.add_namespace_admins",
"unittest.mock.patch",
"datastore.logic.get_tag",
"datastore.logic.remove_namespace_admins",
"datastore.logic.remove_tag_readers",
"datastore.logic.add_tag_users",
"datastore.logic.remove_tag_users",
"datastore.logic.check_users_tags",
"datastore.logic.set_tag_private",
"datastore.logic.add_tag_readers",
"datastore.logic.update_namespace_description"
] |
[((2282, 2402), 'datastore.models.User.objects.create_user', 'models.User.objects.create_user', ([], {'username': '"""site_admin_user"""', 'email': '"""<EMAIL>"""', 'password': '"""password"""', 'is_superuser': '(True)'}), "(username='site_admin_user', email='<EMAIL>',\n password='password', is_superuser=True)\n", (2313, 2402), False, 'from datastore import models\n'), ((2484, 2580), 'datastore.models.User.objects.create_user', 'models.User.objects.create_user', ([], {'username': '"""admin_user"""', 'email': '"""<EMAIL>"""', 'password': '"""password"""'}), "(username='admin_user', email='<EMAIL>',\n password='password')\n", (2515, 2580), False, 'from datastore import models\n'), ((2648, 2742), 'datastore.models.User.objects.create_user', 'models.User.objects.create_user', ([], {'username': '"""tag_user"""', 'email': '"""<EMAIL>"""', 'password': '"""password"""'}), "(username='tag_user', email='<EMAIL>',\n password='password')\n", (2679, 2742), False, 'from datastore import models\n'), ((2812, 2908), 'datastore.models.User.objects.create_user', 'models.User.objects.create_user', ([], {'username': '"""tag_reader"""', 'email': '"""<EMAIL>"""', 'password': '"""password"""'}), "(username='tag_reader', email='<EMAIL>',\n password='password')\n", (2843, 2908), False, 'from datastore import models\n'), ((2979, 3076), 'datastore.models.User.objects.create_user', 'models.User.objects.create_user', ([], {'username': '"""normal_user"""', 'email': '"""<EMAIL>"""', 'password': '"""password"""'}), "(username='normal_user', email='<EMAIL>',\n password='password')\n", (3010, 3076), False, 'from datastore import models\n'), ((3262, 3386), 'datastore.logic.create_namespace', 'logic.create_namespace', (['self.site_admin_user', 'self.namespace_name', 'self.namespace_description'], {'admins': '[self.admin_user]'}), '(self.site_admin_user, self.namespace_name, self.\n namespace_description, admins=[self.admin_user])\n', (3284, 3386), False, 'from datastore import logic\n'), ((3642, 3841), 'datastore.logic.create_tag', 'logic.create_tag', ([], {'user': 'self.site_admin_user', 'name': 'self.public_tag_name', 'description': 'self.public_tag_description', 'type_of': 'self.public_tag_type_of', 'namespace': 'self.test_namespace', 'private': '(False)'}), '(user=self.site_admin_user, name=self.public_tag_name,\n description=self.public_tag_description, type_of=self.\n public_tag_type_of, namespace=self.test_namespace, private=False)\n', (3658, 3841), False, 'from datastore import logic\n'), ((4074, 4288), 'datastore.logic.create_tag', 'logic.create_tag', ([], {'user': 'self.site_admin_user', 'name': 'self.user_tag_name', 'description': 'self.user_tag_description', 'type_of': 'self.user_tag_type_of', 'namespace': 'self.test_namespace', 'private': '(True)', 'users': '[self.tag_user]'}), '(user=self.site_admin_user, name=self.user_tag_name,\n description=self.user_tag_description, type_of=self.user_tag_type_of,\n namespace=self.test_namespace, private=True, users=[self.tag_user])\n', (4090, 4288), False, 'from datastore import logic\n'), ((4577, 4806), 'datastore.logic.create_tag', 'logic.create_tag', ([], {'user': 'self.site_admin_user', 'name': 'self.reader_tag_name', 'description': 'self.reader_tag_description', 'type_of': 'self.reader_tag_type_of', 'namespace': 'self.test_namespace', 'private': '(True)', 'readers': '[self.tag_reader]'}), '(user=self.site_admin_user, name=self.reader_tag_name,\n description=self.reader_tag_description, type_of=self.\n reader_tag_type_of, namespace=self.test_namespace, private=True,\n readers=[self.tag_reader])\n', (4593, 4806), False, 'from datastore import logic\n'), ((5202, 5218), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (5216, 5218), False, 'from unittest import mock\n'), ((6305, 6321), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (6319, 6321), False, 'from unittest import mock\n'), ((7464, 7480), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (7478, 7480), False, 'from unittest import mock\n'), ((8896, 8954), 'datastore.logic.get_namespace', 'logic.get_namespace', (['self.normal_user', 'self.namespace_name'], {}), '(self.normal_user, self.namespace_name)\n', (8915, 8954), False, 'from datastore import logic\n'), ((9711, 9766), 'datastore.logic.get_namespace', 'logic.get_namespace', (['self.tag_user', 'self.namespace_name'], {}), '(self.tag_user, self.namespace_name)\n', (9730, 9766), False, 'from datastore import logic\n'), ((10745, 10802), 'datastore.logic.get_namespace', 'logic.get_namespace', (['self.tag_reader', 'self.namespace_name'], {}), '(self.tag_reader, self.namespace_name)\n', (10764, 10802), False, 'from datastore import logic\n'), ((11712, 11769), 'datastore.logic.get_namespace', 'logic.get_namespace', (['self.admin_user', 'self.namespace_name'], {}), '(self.admin_user, self.namespace_name)\n', (11731, 11769), False, 'from datastore import logic\n'), ((15009, 15071), 'datastore.logic.get_namespace', 'logic.get_namespace', (['self.site_admin_user', 'self.namespace_name'], {}), '(self.site_admin_user, self.namespace_name)\n', (15028, 15071), False, 'from datastore import logic\n'), ((18357, 18373), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (18371, 18373), False, 'from unittest import mock\n'), ((19157, 19173), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (19171, 19173), False, 'from unittest import mock\n'), ((20360, 20376), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (20374, 20376), False, 'from unittest import mock\n'), ((21171, 21187), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (21185, 21187), False, 'from unittest import mock\n'), ((22441, 22457), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (22455, 22457), False, 'from unittest import mock\n'), ((23268, 23284), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (23282, 23284), False, 'from unittest import mock\n'), ((24457, 24577), 'datastore.models.User.objects.create_user', 'models.User.objects.create_user', ([], {'username': '"""site_admin_user"""', 'email': '"""<EMAIL>"""', 'password': '"""password"""', 'is_superuser': '(True)'}), "(username='site_admin_user', email='<EMAIL>',\n password='password', is_superuser=True)\n", (24488, 24577), False, 'from datastore import models\n'), ((24659, 24755), 'datastore.models.User.objects.create_user', 'models.User.objects.create_user', ([], {'username': '"""admin_user"""', 'email': '"""<EMAIL>"""', 'password': '"""password"""'}), "(username='admin_user', email='<EMAIL>',\n password='password')\n", (24690, 24755), False, 'from datastore import models\n'), ((24823, 24917), 'datastore.models.User.objects.create_user', 'models.User.objects.create_user', ([], {'username': '"""tag_user"""', 'email': '"""<EMAIL>"""', 'password': '"""password"""'}), "(username='tag_user', email='<EMAIL>',\n password='password')\n", (24854, 24917), False, 'from datastore import models\n'), ((24987, 25083), 'datastore.models.User.objects.create_user', 'models.User.objects.create_user', ([], {'username': '"""tag_reader"""', 'email': '"""<EMAIL>"""', 'password': '"""password"""'}), "(username='tag_reader', email='<EMAIL>',\n password='password')\n", (25018, 25083), False, 'from datastore import models\n'), ((25154, 25251), 'datastore.models.User.objects.create_user', 'models.User.objects.create_user', ([], {'username': '"""normal_user"""', 'email': '"""<EMAIL>"""', 'password': '"""password"""'}), "(username='normal_user', email='<EMAIL>',\n password='password')\n", (25185, 25251), False, 'from datastore import models\n'), ((25437, 25561), 'datastore.logic.create_namespace', 'logic.create_namespace', (['self.site_admin_user', 'self.namespace_name', 'self.namespace_description'], {'admins': '[self.admin_user]'}), '(self.site_admin_user, self.namespace_name, self.\n namespace_description, admins=[self.admin_user])\n', (25459, 25561), False, 'from datastore import logic\n'), ((25817, 26011), 'datastore.logic.create_tag', 'logic.create_tag', ([], {'user': 'self.admin_user', 'name': 'self.public_tag_name', 'description': 'self.public_tag_description', 'type_of': 'self.public_tag_type_of', 'namespace': 'self.test_namespace', 'private': '(False)'}), '(user=self.admin_user, name=self.public_tag_name,\n description=self.public_tag_description, type_of=self.\n public_tag_type_of, namespace=self.test_namespace, private=False)\n', (25833, 26011), False, 'from datastore import logic\n'), ((26244, 26455), 'datastore.logic.create_tag', 'logic.create_tag', ([], {'user': 'self.admin_user', 'name': 'self.user_tag_name', 'description': 'self.user_tag_description', 'type_of': 'self.user_tag_type_of', 'namespace': 'self.test_namespace', 'private': '(True)', 'users': '[self.tag_user]'}), '(user=self.admin_user, name=self.user_tag_name, description\n =self.user_tag_description, type_of=self.user_tag_type_of, namespace=\n self.test_namespace, private=True, users=[self.tag_user])\n', (26260, 26455), False, 'from datastore import logic\n'), ((26742, 26966), 'datastore.logic.create_tag', 'logic.create_tag', ([], {'user': 'self.admin_user', 'name': 'self.reader_tag_name', 'description': 'self.reader_tag_description', 'type_of': 'self.reader_tag_type_of', 'namespace': 'self.test_namespace', 'private': '(True)', 'readers': '[self.tag_reader]'}), '(user=self.admin_user, name=self.reader_tag_name,\n description=self.reader_tag_description, type_of=self.\n reader_tag_type_of, namespace=self.test_namespace, private=True,\n readers=[self.tag_reader])\n', (26758, 26966), False, 'from datastore import logic\n'), ((27429, 27445), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (27443, 27445), False, 'from unittest import mock\n'), ((28950, 28966), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (28964, 28966), False, 'from unittest import mock\n'), ((30626, 30642), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (30640, 30642), False, 'from unittest import mock\n'), ((32987, 33041), 'datastore.models.Namespace.objects.get', 'models.Namespace.objects.get', ([], {'name': 'self.namespace_name'}), '(name=self.namespace_name)\n', (33015, 33041), False, 'from datastore import models\n'), ((33056, 33118), 'datastore.models.Tag.objects.get', 'models.Tag.objects.get', ([], {'name': 'self.public_tag_name', 'namespace': 'n'}), '(name=self.public_tag_name, namespace=n)\n', (33078, 33118), False, 'from datastore import models\n'), ((33136, 33209), 'datastore.logic.get_tag', 'logic.get_tag', (['self.admin_user', 'self.public_tag_name', 'self.namespace_name'], {}), '(self.admin_user, self.public_tag_name, self.namespace_name)\n', (33149, 33209), False, 'from datastore import logic\n'), ((34300, 34354), 'datastore.models.Namespace.objects.get', 'models.Namespace.objects.get', ([], {'name': 'self.namespace_name'}), '(name=self.namespace_name)\n', (34328, 34354), False, 'from datastore import models\n'), ((34369, 34431), 'datastore.models.Tag.objects.get', 'models.Tag.objects.get', ([], {'name': 'self.public_tag_name', 'namespace': 'n'}), '(name=self.public_tag_name, namespace=n)\n', (34391, 34431), False, 'from datastore import models\n'), ((34532, 34603), 'datastore.logic.get_tag', 'logic.get_tag', (['self.tag_user', 'self.public_tag_name', 'self.namespace_name'], {}), '(self.tag_user, self.public_tag_name, self.namespace_name)\n', (34545, 34603), False, 'from datastore import logic\n'), ((35179, 35233), 'datastore.models.Namespace.objects.get', 'models.Namespace.objects.get', ([], {'name': 'self.namespace_name'}), '(name=self.namespace_name)\n', (35207, 35233), False, 'from datastore import models\n'), ((35248, 35310), 'datastore.models.Tag.objects.get', 'models.Tag.objects.get', ([], {'name': 'self.public_tag_name', 'namespace': 'n'}), '(name=self.public_tag_name, namespace=n)\n', (35270, 35310), False, 'from datastore import models\n'), ((35415, 35488), 'datastore.logic.get_tag', 'logic.get_tag', (['self.tag_reader', 'self.public_tag_name', 'self.namespace_name'], {}), '(self.tag_reader, self.public_tag_name, self.namespace_name)\n', (35428, 35488), False, 'from datastore import logic\n'), ((36042, 36096), 'datastore.models.Namespace.objects.get', 'models.Namespace.objects.get', ([], {'name': 'self.namespace_name'}), '(name=self.namespace_name)\n', (36070, 36096), False, 'from datastore import models\n'), ((36111, 36173), 'datastore.models.Tag.objects.get', 'models.Tag.objects.get', ([], {'name': 'self.public_tag_name', 'namespace': 'n'}), '(name=self.public_tag_name, namespace=n)\n', (36133, 36173), False, 'from datastore import models\n'), ((36191, 36265), 'datastore.logic.get_tag', 'logic.get_tag', (['self.normal_user', 'self.public_tag_name', 'self.namespace_name'], {}), '(self.normal_user, self.public_tag_name, self.namespace_name)\n', (36204, 36265), False, 'from datastore import logic\n'), ((36856, 36910), 'datastore.models.Namespace.objects.get', 'models.Namespace.objects.get', ([], {'name': 'self.namespace_name'}), '(name=self.namespace_name)\n', (36884, 36910), False, 'from datastore import models\n'), ((36925, 36987), 'datastore.models.Tag.objects.get', 'models.Tag.objects.get', ([], {'name': 'self.public_tag_name', 'namespace': 'n'}), '(name=self.public_tag_name, namespace=n)\n', (36947, 36987), False, 'from datastore import models\n'), ((37474, 37490), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (37488, 37490), False, 'from unittest import mock\n'), ((38353, 38369), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (38367, 38369), False, 'from unittest import mock\n'), ((39744, 39760), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (39758, 39760), False, 'from unittest import mock\n'), ((40546, 40562), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (40560, 40562), False, 'from unittest import mock\n'), ((41817, 41833), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (41831, 41833), False, 'from unittest import mock\n'), ((42747, 42763), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (42761, 42763), False, 'from unittest import mock\n'), ((44168, 44268), 'datastore.logic.add_tag_users', 'logic.add_tag_users', (['self.site_admin_user', 'self.public_tag_name', 'self.namespace_name', 'old_users'], {}), '(self.site_admin_user, self.public_tag_name, self.\n namespace_name, old_users)\n', (44187, 44268), False, 'from datastore import logic\n'), ((44345, 44361), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (44359, 44361), False, 'from unittest import mock\n'), ((45225, 45325), 'datastore.logic.add_tag_users', 'logic.add_tag_users', (['self.site_admin_user', 'self.public_tag_name', 'self.namespace_name', 'old_users'], {}), '(self.site_admin_user, self.public_tag_name, self.\n namespace_name, old_users)\n', (45244, 45325), False, 'from datastore import logic\n'), ((45402, 45418), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (45416, 45418), False, 'from unittest import mock\n'), ((46825, 46841), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (46839, 46841), False, 'from unittest import mock\n'), ((47781, 47797), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (47795, 47797), False, 'from unittest import mock\n'), ((49238, 49342), 'datastore.logic.add_tag_readers', 'logic.add_tag_readers', (['self.site_admin_user', 'self.public_tag_name', 'self.namespace_name', 'old_readers'], {}), '(self.site_admin_user, self.public_tag_name, self.\n namespace_name, old_readers)\n', (49259, 49342), False, 'from datastore import logic\n'), ((49419, 49435), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (49433, 49435), False, 'from unittest import mock\n'), ((50325, 50429), 'datastore.logic.add_tag_readers', 'logic.add_tag_readers', (['self.site_admin_user', 'self.public_tag_name', 'self.namespace_name', 'old_readers'], {}), '(self.site_admin_user, self.public_tag_name, self.\n namespace_name, old_readers)\n', (50346, 50429), False, 'from datastore import logic\n'), ((50506, 50522), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (50520, 50522), False, 'from unittest import mock\n'), ((5232, 5281), 'unittest.mock.patch', 'mock.patch', (['"""datastore.logic.logger"""', 'mock_logger'], {}), "('datastore.logic.logger', mock_logger)\n", (5242, 5281), False, 'from unittest import mock\n'), ((5300, 5363), 'datastore.logic.create_namespace', 'logic.create_namespace', (['self.site_admin_user', 'name', 'description'], {}), '(self.site_admin_user, name, description)\n', (5322, 5363), False, 'from datastore import logic\n'), ((6335, 6384), 'unittest.mock.patch', 'mock.patch', (['"""datastore.logic.logger"""', 'mock_logger'], {}), "('datastore.logic.logger', mock_logger)\n", (6345, 6384), False, 'from unittest import mock\n'), ((6403, 6474), 'datastore.logic.create_namespace', 'logic.create_namespace', (['self.site_admin_user', 'name', 'description', 'admins'], {}), '(self.site_admin_user, name, description, admins)\n', (6425, 6474), False, 'from datastore import logic\n'), ((7494, 7543), 'unittest.mock.patch', 'mock.patch', (['"""datastore.logic.logger"""', 'mock_logger'], {}), "('datastore.logic.logger', mock_logger)\n", (7504, 7543), False, 'from unittest import mock\n'), ((7562, 7620), 'datastore.logic.create_namespace', 'logic.create_namespace', (['self.admin_user', 'name', 'description'], {}), '(self.admin_user, name, description)\n', (7584, 7620), False, 'from datastore import logic\n'), ((8439, 8497), 'datastore.logic.create_namespace', 'logic.create_namespace', (['self.admin_user', 'name', 'description'], {}), '(self.admin_user, name, description)\n', (8461, 8497), False, 'from datastore import logic\n'), ((18387, 18436), 'unittest.mock.patch', 'mock.patch', (['"""datastore.logic.logger"""', 'mock_logger'], {}), "('datastore.logic.logger', mock_logger)\n", (18397, 18436), False, 'from unittest import mock\n'), ((18459, 18552), 'datastore.logic.update_namespace_description', 'logic.update_namespace_description', (['self.admin_user', 'self.namespace_name', 'new_description'], {}), '(self.admin_user, self.namespace_name,\n new_description)\n', (18493, 18552), False, 'from datastore import logic\n'), ((19187, 19236), 'unittest.mock.patch', 'mock.patch', (['"""datastore.logic.logger"""', 'mock_logger'], {}), "('datastore.logic.logger', mock_logger)\n", (19197, 19236), False, 'from unittest import mock\n'), ((19259, 19358), 'datastore.logic.update_namespace_description', 'logic.update_namespace_description', (['self.site_admin_user', 'self.namespace_name', 'new_description'], {}), '(self.site_admin_user, self.\n namespace_name, new_description)\n', (19293, 19358), False, 'from datastore import logic\n'), ((20015, 20109), 'datastore.logic.update_namespace_description', 'logic.update_namespace_description', (['self.normal_user', 'self.namespace_name', 'new_description'], {}), '(self.normal_user, self.namespace_name,\n new_description)\n', (20049, 20109), False, 'from datastore import logic\n'), ((20390, 20439), 'unittest.mock.patch', 'mock.patch', (['"""datastore.logic.logger"""', 'mock_logger'], {}), "('datastore.logic.logger', mock_logger)\n", (20400, 20439), False, 'from unittest import mock\n'), ((20462, 20538), 'datastore.logic.add_namespace_admins', 'logic.add_namespace_admins', (['self.admin_user', 'self.namespace_name', 'new_admins'], {}), '(self.admin_user, self.namespace_name, new_admins)\n', (20488, 20538), False, 'from datastore import logic\n'), ((21201, 21250), 'unittest.mock.patch', 'mock.patch', (['"""datastore.logic.logger"""', 'mock_logger'], {}), "('datastore.logic.logger', mock_logger)\n", (21211, 21250), False, 'from unittest import mock\n'), ((21273, 21358), 'datastore.logic.add_namespace_admins', 'logic.add_namespace_admins', (['self.site_admin_user', 'self.namespace_name', 'new_admins'], {}), '(self.site_admin_user, self.namespace_name,\n new_admins)\n', (21299, 21358), False, 'from datastore import logic\n'), ((22071, 22148), 'datastore.logic.add_namespace_admins', 'logic.add_namespace_admins', (['self.normal_user', 'self.namespace_name', 'new_admins'], {}), '(self.normal_user, self.namespace_name, new_admins)\n', (22097, 22148), False, 'from datastore import logic\n'), ((22471, 22520), 'unittest.mock.patch', 'mock.patch', (['"""datastore.logic.logger"""', 'mock_logger'], {}), "('datastore.logic.logger', mock_logger)\n", (22481, 22520), False, 'from unittest import mock\n'), ((22543, 22622), 'datastore.logic.remove_namespace_admins', 'logic.remove_namespace_admins', (['self.admin_user', 'self.namespace_name', 'old_admins'], {}), '(self.admin_user, self.namespace_name, old_admins)\n', (22572, 22622), False, 'from datastore import logic\n'), ((23298, 23347), 'unittest.mock.patch', 'mock.patch', (['"""datastore.logic.logger"""', 'mock_logger'], {}), "('datastore.logic.logger', mock_logger)\n", (23308, 23347), False, 'from unittest import mock\n'), ((23370, 23458), 'datastore.logic.remove_namespace_admins', 'logic.remove_namespace_admins', (['self.site_admin_user', 'self.namespace_name', 'old_admins'], {}), '(self.site_admin_user, self.namespace_name,\n old_admins)\n', (23399, 23458), False, 'from datastore import logic\n'), ((24190, 24275), 'datastore.logic.remove_namespace_admins', 'logic.remove_namespace_admins', (['self.normal_user', 'self.namespace_name', 'old_admins'], {}), '(self.normal_user, self.namespace_name, old_admins\n )\n', (24219, 24275), False, 'from datastore import logic\n'), ((27459, 27508), 'unittest.mock.patch', 'mock.patch', (['"""datastore.logic.logger"""', 'mock_logger'], {}), "('datastore.logic.logger', mock_logger)\n", (27469, 27508), False, 'from unittest import mock\n'), ((27528, 27632), 'datastore.logic.create_tag', 'logic.create_tag', (['self.site_admin_user', 'name', 'description', 'type_of', 'self.test_namespace', 'is_private'], {}), '(self.site_admin_user, name, description, type_of, self.\n test_namespace, is_private)\n', (27544, 27632), False, 'from datastore import logic\n'), ((28980, 29029), 'unittest.mock.patch', 'mock.patch', (['"""datastore.logic.logger"""', 'mock_logger'], {}), "('datastore.logic.logger', mock_logger)\n", (28990, 29029), False, 'from unittest import mock\n'), ((29049, 29148), 'datastore.logic.create_tag', 'logic.create_tag', (['self.admin_user', 'name', 'description', 'type_of', 'self.test_namespace', 'is_private'], {}), '(self.admin_user, name, description, type_of, self.\n test_namespace, is_private)\n', (29065, 29148), False, 'from datastore import logic\n'), ((30656, 30705), 'unittest.mock.patch', 'mock.patch', (['"""datastore.logic.logger"""', 'mock_logger'], {}), "('datastore.logic.logger', mock_logger)\n", (30666, 30705), False, 'from unittest import mock\n'), ((30725, 30845), 'datastore.logic.create_tag', 'logic.create_tag', (['self.site_admin_user', 'name', 'description', 'type_of', 'self.test_namespace', 'is_private', 'users', 'readers'], {}), '(self.site_admin_user, name, description, type_of, self.\n test_namespace, is_private, users, readers)\n', (30741, 30845), False, 'from datastore import logic\n'), ((32562, 32662), 'datastore.logic.create_tag', 'logic.create_tag', (['self.normal_user', 'name', 'description', 'type_of', 'self.test_namespace', 'is_private'], {}), '(self.normal_user, name, description, type_of, self.\n test_namespace, is_private)\n', (32578, 32662), False, 'from datastore import logic\n'), ((37095, 37169), 'datastore.logic.get_tag', 'logic.get_tag', (['self.normal_user', 'self.public_tag_name', 'self.namespace_name'], {}), '(self.normal_user, self.public_tag_name, self.namespace_name)\n', (37108, 37169), False, 'from datastore import logic\n'), ((37504, 37553), 'unittest.mock.patch', 'mock.patch', (['"""datastore.logic.logger"""', 'mock_logger'], {}), "('datastore.logic.logger', mock_logger)\n", (37514, 37553), False, 'from unittest import mock\n'), ((37576, 37686), 'datastore.logic.update_tag_description', 'logic.update_tag_description', (['self.admin_user', 'self.public_tag_name', 'self.namespace_name', 'new_description'], {}), '(self.admin_user, self.public_tag_name, self.\n namespace_name, new_description)\n', (37604, 37686), False, 'from datastore import logic\n'), ((38383, 38432), 'unittest.mock.patch', 'mock.patch', (['"""datastore.logic.logger"""', 'mock_logger'], {}), "('datastore.logic.logger', mock_logger)\n", (38393, 38432), False, 'from unittest import mock\n'), ((38455, 38569), 'datastore.logic.update_tag_description', 'logic.update_tag_description', (['self.site_admin_user', 'self.public_tag_name', 'self.namespace_name', 'new_description'], {}), '(self.site_admin_user, self.public_tag_name,\n self.namespace_name, new_description)\n', (38483, 38569), False, 'from datastore import logic\n'), ((39302, 39413), 'datastore.logic.update_tag_description', 'logic.update_tag_description', (['self.normal_user', 'self.public_tag_name', 'self.namespace_name', 'new_description'], {}), '(self.normal_user, self.public_tag_name, self.\n namespace_name, new_description)\n', (39330, 39413), False, 'from datastore import logic\n'), ((39774, 39823), 'unittest.mock.patch', 'mock.patch', (['"""datastore.logic.logger"""', 'mock_logger'], {}), "('datastore.logic.logger', mock_logger)\n", (39784, 39823), False, 'from unittest import mock\n'), ((39846, 39938), 'datastore.logic.set_tag_private', 'logic.set_tag_private', (['self.admin_user', 'self.public_tag_name', 'self.namespace_name', '(True)'], {}), '(self.admin_user, self.public_tag_name, self.\n namespace_name, True)\n', (39867, 39938), False, 'from datastore import logic\n'), ((40576, 40625), 'unittest.mock.patch', 'mock.patch', (['"""datastore.logic.logger"""', 'mock_logger'], {}), "('datastore.logic.logger', mock_logger)\n", (40586, 40625), False, 'from unittest import mock\n'), ((40648, 40745), 'datastore.logic.set_tag_private', 'logic.set_tag_private', (['self.site_admin_user', 'self.public_tag_name', 'self.namespace_name', '(True)'], {}), '(self.site_admin_user, self.public_tag_name, self.\n namespace_name, True)\n', (40669, 40745), False, 'from datastore import logic\n'), ((41406, 41499), 'datastore.logic.set_tag_private', 'logic.set_tag_private', (['self.normal_user', 'self.public_tag_name', 'self.namespace_name', '(True)'], {}), '(self.normal_user, self.public_tag_name, self.\n namespace_name, True)\n', (41427, 41499), False, 'from datastore import logic\n'), ((41847, 41896), 'unittest.mock.patch', 'mock.patch', (['"""datastore.logic.logger"""', 'mock_logger'], {}), "('datastore.logic.logger', mock_logger)\n", (41857, 41896), False, 'from unittest import mock\n'), ((41919, 42014), 'datastore.logic.add_tag_users', 'logic.add_tag_users', (['self.admin_user', 'self.public_tag_name', 'self.namespace_name', 'new_users'], {}), '(self.admin_user, self.public_tag_name, self.\n namespace_name, new_users)\n', (41938, 42014), False, 'from datastore import logic\n'), ((42777, 42826), 'unittest.mock.patch', 'mock.patch', (['"""datastore.logic.logger"""', 'mock_logger'], {}), "('datastore.logic.logger', mock_logger)\n", (42787, 42826), False, 'from unittest import mock\n'), ((42849, 42949), 'datastore.logic.add_tag_users', 'logic.add_tag_users', (['self.site_admin_user', 'self.public_tag_name', 'self.namespace_name', 'new_users'], {}), '(self.site_admin_user, self.public_tag_name, self.\n namespace_name, new_users)\n', (42868, 42949), False, 'from datastore import logic\n'), ((43752, 43848), 'datastore.logic.add_tag_users', 'logic.add_tag_users', (['self.normal_user', 'self.public_tag_name', 'self.namespace_name', 'new_users'], {}), '(self.normal_user, self.public_tag_name, self.\n namespace_name, new_users)\n', (43771, 43848), False, 'from datastore import logic\n'), ((44375, 44424), 'unittest.mock.patch', 'mock.patch', (['"""datastore.logic.logger"""', 'mock_logger'], {}), "('datastore.logic.logger', mock_logger)\n", (44385, 44424), False, 'from unittest import mock\n'), ((44447, 44545), 'datastore.logic.remove_tag_users', 'logic.remove_tag_users', (['self.admin_user', 'self.public_tag_name', 'self.namespace_name', 'old_users'], {}), '(self.admin_user, self.public_tag_name, self.\n namespace_name, old_users)\n', (44469, 44545), False, 'from datastore import logic\n'), ((45432, 45481), 'unittest.mock.patch', 'mock.patch', (['"""datastore.logic.logger"""', 'mock_logger'], {}), "('datastore.logic.logger', mock_logger)\n", (45442, 45481), False, 'from unittest import mock\n'), ((45504, 45607), 'datastore.logic.remove_tag_users', 'logic.remove_tag_users', (['self.site_admin_user', 'self.public_tag_name', 'self.namespace_name', 'old_users'], {}), '(self.site_admin_user, self.public_tag_name, self.\n namespace_name, old_users)\n', (45526, 45607), False, 'from datastore import logic\n'), ((46400, 46499), 'datastore.logic.remove_tag_users', 'logic.remove_tag_users', (['self.normal_user', 'self.public_tag_name', 'self.namespace_name', 'old_users'], {}), '(self.normal_user, self.public_tag_name, self.\n namespace_name, old_users)\n', (46422, 46499), False, 'from datastore import logic\n'), ((46855, 46904), 'unittest.mock.patch', 'mock.patch', (['"""datastore.logic.logger"""', 'mock_logger'], {}), "('datastore.logic.logger', mock_logger)\n", (46865, 46904), False, 'from unittest import mock\n'), ((46927, 47026), 'datastore.logic.add_tag_readers', 'logic.add_tag_readers', (['self.admin_user', 'self.public_tag_name', 'self.namespace_name', 'new_readers'], {}), '(self.admin_user, self.public_tag_name, self.\n namespace_name, new_readers)\n', (46948, 47026), False, 'from datastore import logic\n'), ((47811, 47860), 'unittest.mock.patch', 'mock.patch', (['"""datastore.logic.logger"""', 'mock_logger'], {}), "('datastore.logic.logger', mock_logger)\n", (47821, 47860), False, 'from unittest import mock\n'), ((47883, 47987), 'datastore.logic.add_tag_readers', 'logic.add_tag_readers', (['self.site_admin_user', 'self.public_tag_name', 'self.namespace_name', 'new_readers'], {}), '(self.site_admin_user, self.public_tag_name, self.\n namespace_name, new_readers)\n', (47904, 47987), False, 'from datastore import logic\n'), ((48810, 48910), 'datastore.logic.add_tag_readers', 'logic.add_tag_readers', (['self.normal_user', 'self.public_tag_name', 'self.namespace_name', 'new_readers'], {}), '(self.normal_user, self.public_tag_name, self.\n namespace_name, new_readers)\n', (48831, 48910), False, 'from datastore import logic\n'), ((49449, 49498), 'unittest.mock.patch', 'mock.patch', (['"""datastore.logic.logger"""', 'mock_logger'], {}), "('datastore.logic.logger', mock_logger)\n", (49459, 49498), False, 'from unittest import mock\n'), ((49521, 49623), 'datastore.logic.remove_tag_readers', 'logic.remove_tag_readers', (['self.admin_user', 'self.public_tag_name', 'self.namespace_name', 'old_readers'], {}), '(self.admin_user, self.public_tag_name, self.\n namespace_name, old_readers)\n', (49545, 49623), False, 'from datastore import logic\n'), ((50536, 50585), 'unittest.mock.patch', 'mock.patch', (['"""datastore.logic.logger"""', 'mock_logger'], {}), "('datastore.logic.logger', mock_logger)\n", (50546, 50585), False, 'from unittest import mock\n'), ((50608, 50715), 'datastore.logic.remove_tag_readers', 'logic.remove_tag_readers', (['self.site_admin_user', 'self.public_tag_name', 'self.namespace_name', 'old_readers'], {}), '(self.site_admin_user, self.public_tag_name, self.\n namespace_name, old_readers)\n', (50632, 50715), False, 'from datastore import logic\n'), ((51530, 51633), 'datastore.logic.remove_tag_readers', 'logic.remove_tag_readers', (['self.normal_user', 'self.public_tag_name', 'self.namespace_name', 'old_readers'], {}), '(self.normal_user, self.public_tag_name, self.\n namespace_name, old_readers)\n', (51554, 51633), False, 'from datastore import logic\n'), ((52378, 52426), 'datastore.logic.check_users_tags', 'logic.check_users_tags', (['self.admin_user', 'tag_set'], {}), '(self.admin_user, tag_set)\n', (52400, 52426), False, 'from datastore import logic\n'), ((53072, 53125), 'datastore.logic.check_users_tags', 'logic.check_users_tags', (['self.site_admin_user', 'tag_set'], {}), '(self.site_admin_user, tag_set)\n', (53094, 53125), False, 'from datastore import logic\n'), ((53820, 53869), 'datastore.logic.check_users_tags', 'logic.check_users_tags', (['self.normal_user', 'tag_set'], {}), '(self.normal_user, tag_set)\n', (53842, 53869), False, 'from datastore import logic\n'), ((54501, 54547), 'datastore.logic.check_users_tags', 'logic.check_users_tags', (['self.tag_user', 'tag_set'], {}), '(self.tag_user, tag_set)\n', (54523, 54547), False, 'from datastore import logic\n'), ((54763, 54809), 'datastore.logic.check_users_tags', 'logic.check_users_tags', (['self.tag_user', 'tag_set'], {}), '(self.tag_user, tag_set)\n', (54785, 54809), False, 'from datastore import logic\n'), ((55593, 55639), 'datastore.logic.check_users_tags', 'logic.check_users_tags', (['self.tag_user', 'tag_set'], {}), '(self.tag_user, tag_set)\n', (55615, 55639), False, 'from datastore import logic\n'), ((56323, 56371), 'datastore.logic.check_users_tags', 'logic.check_users_tags', (['self.tag_reader', 'tag_set'], {}), '(self.tag_reader, tag_set)\n', (56345, 56371), False, 'from datastore import logic\n'), ((57041, 57091), 'datastore.logic.check_readers_tags', 'logic.check_readers_tags', (['self.admin_user', 'tag_set'], {}), '(self.admin_user, tag_set)\n', (57065, 57091), False, 'from datastore import logic\n'), ((57748, 57803), 'datastore.logic.check_readers_tags', 'logic.check_readers_tags', (['self.site_admin_user', 'tag_set'], {}), '(self.site_admin_user, tag_set)\n', (57772, 57803), False, 'from datastore import logic\n'), ((58505, 58556), 'datastore.logic.check_readers_tags', 'logic.check_readers_tags', (['self.normal_user', 'tag_set'], {}), '(self.normal_user, tag_set)\n', (58529, 58556), False, 'from datastore import logic\n'), ((59114, 59165), 'datastore.logic.check_readers_tags', 'logic.check_readers_tags', (['self.normal_user', 'tag_set'], {}), '(self.normal_user, tag_set)\n', (59138, 59165), False, 'from datastore import logic\n'), ((59803, 59853), 'datastore.logic.check_readers_tags', 'logic.check_readers_tags', (['self.tag_reader', 'tag_set'], {}), '(self.tag_reader, tag_set)\n', (59827, 59853), False, 'from datastore import logic\n'), ((60069, 60119), 'datastore.logic.check_readers_tags', 'logic.check_readers_tags', (['self.tag_reader', 'tag_set'], {}), '(self.tag_reader, tag_set)\n', (60093, 60119), False, 'from datastore import logic\n'), ((60976, 61026), 'datastore.logic.check_readers_tags', 'logic.check_readers_tags', (['self.tag_reader', 'tag_set'], {}), '(self.tag_reader, tag_set)\n', (61000, 61026), False, 'from datastore import logic\n'), ((61628, 61676), 'datastore.logic.check_readers_tags', 'logic.check_readers_tags', (['self.tag_user', 'tag_set'], {}), '(self.tag_user, tag_set)\n', (61652, 61676), False, 'from datastore import logic\n'), ((7343, 7382), 'datastore.models.Namespace.objects.get', 'models.Namespace.objects.get', ([], {'name': 'name'}), '(name=name)\n', (7371, 7382), False, 'from datastore import models\n')]
|
from typing import Dict, Tuple, TYPE_CHECKING
import numpy as np
from ..continuous_sensor import ContinuousSensor
if TYPE_CHECKING:
from task import StackingTask
# TODO: This should be a DiscreteSensor
class CurrentPartReleasedSensor(ContinuousSensor["StackingEnv"]):
def __init__(self, part_release_distance: float = 0.05, **kwargs):
super().__init__(normalize=False, clip=False, **kwargs)
self.__part_release_distance = part_release_distance
self.__observation_name = "current_part_released"
def _get_limits(self) -> Dict[str, Tuple[np.ndarray, np.ndarray]]:
return {self.__observation_name: (np.zeros(1), np.ones(1))}
def _reset_unnormalized(self) -> Dict[str, np.ndarray]:
return self._observe_unnormalized()
def _observe_unnormalized(self) -> Dict[str, np.ndarray]:
part = self.task.current_part
robot = self.task.robot
# TODO: Fix typing
part_released = max(robot.finger_distances_to_object(part.scene_object)) \
> self.__part_release_distance
return {self.__observation_name: np.array([float(part_released)])}
|
[
"numpy.zeros",
"numpy.ones"
] |
[((645, 656), 'numpy.zeros', 'np.zeros', (['(1)'], {}), '(1)\n', (653, 656), True, 'import numpy as np\n'), ((658, 668), 'numpy.ones', 'np.ones', (['(1)'], {}), '(1)\n', (665, 668), True, 'import numpy as np\n')]
|
#!/usr/bin/env python
from json import dumps
from circuits.web import Controller, Server
def json(f):
def wrapper(self, *args, **kwargs):
return dumps(f(self, *args, **kwargs))
return wrapper
class Root(Controller):
@json
def getrange(self, limit=4):
return list(range(int(limit)))
app = Server(("0.0.0.0", 8000))
Root().register(app)
app.run()
|
[
"circuits.web.Server"
] |
[((328, 353), 'circuits.web.Server', 'Server', (["('0.0.0.0', 8000)"], {}), "(('0.0.0.0', 8000))\n", (334, 353), False, 'from circuits.web import Controller, Server\n')]
|
# date: 2019年11月3日
# author: lw
# e-mail: <EMAIL>
# description: 实现后端的处理返回函数,基于flask包
# -*- coding: utf-8 -*-
from flask import Flask, jsonify, render_template,request
from flask_cors import *
from query_op import *
import json
app = Flask(__name__) # 实例化app对象
CORS(app,supports_credentials=True) # 解决跨域请求无响应问题
# 响应文本详情请求
@app.route('/detail',methods=['GET','POST'])
def author_detail():
author_name = dict(request.form)["author"][0]
f_s = open("./create/song/authors.json",'r',encoding='utf-8')
temp = json.load(f_s)
for k in range(len(temp)):
if temp[k]["name"] == author_name:
return temp[k]["desc"]
f_t = open("./create/tang/authors.json",'r',encoding='utf-8')
temp1 = json.load(f_t)
for k in range(len(temp1)):
if temp1[k]["name"] == author_name:
return temp1[k]["desc"]
return "没有找到相关信息!"
@app.route('/',methods=['GET','POST'])
def query_poets():
request_items = dict(request.form)
print(request_items)
print(request_items["sentence"][0],request_items["type_s"][0])
result = query(request_items["sentence"][0],request_items["type_s"][0])
number = len(result)
if(number>1000):
result = result[0:1000]
print(result)
return jsonify({"number":number,"result":convert_result_for_html(result,request_items["sentence"][0],request_items["type_s"][0])})
if __name__ == '__main__':
app.run(debug=True,use_reloader=False)
|
[
"flask.Flask",
"json.load"
] |
[((237, 252), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (242, 252), False, 'from flask import Flask, jsonify, render_template, request\n'), ((522, 536), 'json.load', 'json.load', (['f_s'], {}), '(f_s)\n', (531, 536), False, 'import json\n'), ((724, 738), 'json.load', 'json.load', (['f_t'], {}), '(f_t)\n', (733, 738), False, 'import json\n')]
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from heat.engine import environment
from heat.engine import resources
from heat.tests import generic_resource
from heat.tests import common
class EnvironmentTest(common.HeatTestCase):
def setUp(self):
super(EnvironmentTest, self).setUp()
self.g_env = resources.global_env()
def test_load_old_parameters(self):
old = {u'a': u'ff', u'b': u'ss'}
expected = {u'parameters': old,
u'resource_registry': {u'resources': {}}}
env = environment.Environment(old)
self.assertEqual(expected, env.user_env_as_dict())
def test_load_new_env(self):
new_env = {u'parameters': {u'a': u'ff', u'b': u'ss'},
u'resource_registry': {u'OS::Food': u'fruity.yaml',
u'resources': {}}}
env = environment.Environment(new_env)
self.assertEqual(new_env, env.user_env_as_dict())
def test_global_registry(self):
self.g_env.register_class('CloudX::Compute::Server',
generic_resource.GenericResource)
new_env = {u'parameters': {u'a': u'ff', u'b': u'ss'},
u'resource_registry': {u'OS::*': 'CloudX::*'}}
env = environment.Environment(new_env)
self.assertEqual('CloudX::Compute::Server',
env.get_resource_info('OS::Compute::Server',
'my_db_server').name)
def test_map_one_resource_type(self):
new_env = {u'parameters': {u'a': u'ff', u'b': u'ss'},
u'resource_registry': {u'resources':
{u'my_db_server':
{u'OS::DBInstance': 'db.yaml'}}}}
env = environment.Environment(new_env)
info = env.get_resource_info('OS::DBInstance', 'my_db_server')
self.assertEqual('db.yaml', info.value)
def test_map_all_resources_of_type(self):
self.g_env.register_class('OS::Nova::FloatingIP',
generic_resource.GenericResource)
new_env = {u'parameters': {u'a': u'ff', u'b': u'ss'},
u'resource_registry':
{u'OS::Networking::FloatingIP': 'OS::Nova::FloatingIP',
u'OS::Loadbalancer': 'lb.yaml'}}
env = environment.Environment(new_env)
self.assertEqual('OS::Nova::FloatingIP',
env.get_resource_info('OS::Networking::FloatingIP',
'my_fip').name)
def test_resource_sort_order_len(self):
new_env = {u'resource_registry': {u'resources': {u'my_fip': {
u'OS::Networking::FloatingIP': 'ip.yaml'}}},
u'OS::Networking::FloatingIP': 'OS::Nova::FloatingIP'}
env = environment.Environment(new_env)
self.assertEqual('ip.yaml',
env.get_resource_info('OS::Networking::FloatingIP',
'my_fip').value)
def test_env_load(self):
new_env = {u'resource_registry': {u'resources': {u'my_fip': {
u'OS::Networking::FloatingIP': 'ip.yaml'}}}}
env = environment.Environment()
self.assertEqual(None,
env.get_resource_info('OS::Networking::FloatingIP',
'my_fip'))
env.load(new_env)
self.assertEqual('ip.yaml',
env.get_resource_info('OS::Networking::FloatingIP',
'my_fip').value)
class GlobalEnvLoadingTest(common.HeatTestCase):
def test_happy_path(self):
list_dir = 'heat.engine.resources._list_environment_files'
with mock.patch(list_dir) as m_ldir:
m_ldir.return_value = ['a.yaml']
env_dir = '/etc_etc/heat/enviroment.d'
env_content = '{"resource_registry": {}}'
with mock.patch('heat.engine.resources.open',
mock.mock_open(read_data=env_content),
create=True) as m_open:
resources._load_global_environment(env_dir)
m_ldir.assert_called_once_with(env_dir)
m_open.assert_called_once_with('%s/a.yaml' % env_dir)
def test_empty_env_dir(self):
list_dir = 'heat.engine.resources._list_environment_files'
with mock.patch(list_dir) as m_ldir:
m_ldir.return_value = []
env_dir = '/etc_etc/heat/enviroment.d'
resources._load_global_environment(env_dir)
m_ldir.assert_called_once_with(env_dir)
def test_continue_on_ioerror(self):
"""assert we get all files processed even if there are
processing exceptions.
"""
list_dir = 'heat.engine.resources._list_environment_files'
with mock.patch(list_dir) as m_ldir:
m_ldir.return_value = ['a.yaml', 'b.yaml']
env_dir = '/etc_etc/heat/enviroment.d'
env_content = '{}'
with mock.patch('heat.engine.resources.open',
mock.mock_open(read_data=env_content),
create=True) as m_open:
m_open.side_effect = IOError
resources._load_global_environment(env_dir)
m_ldir.assert_called_once_with(env_dir)
expected = [mock.call('%s/a.yaml' % env_dir),
mock.call('%s/b.yaml' % env_dir)]
self.assertEqual(expected, m_open.call_args_list)
def test_continue_on_parse_error(self):
"""assert we get all files processed even if there are
processing exceptions.
"""
list_dir = 'heat.engine.resources._list_environment_files'
with mock.patch(list_dir) as m_ldir:
m_ldir.return_value = ['a.yaml', 'b.yaml']
env_dir = '/etc_etc/heat/enviroment.d'
env_content = '{@$%#$%'
with mock.patch('heat.engine.resources.open',
mock.mock_open(read_data=env_content),
create=True) as m_open:
resources._load_global_environment(env_dir)
m_ldir.assert_called_once_with(env_dir)
expected = [mock.call('%s/a.yaml' % env_dir),
mock.call('%s/b.yaml' % env_dir)]
self.assertEqual(expected, m_open.call_args_list)
|
[
"mock.call",
"mock.mock_open",
"heat.engine.environment.Environment",
"mock.patch",
"heat.engine.resources.global_env",
"heat.engine.resources._load_global_environment"
] |
[((907, 929), 'heat.engine.resources.global_env', 'resources.global_env', ([], {}), '()\n', (927, 929), False, 'from heat.engine import resources\n'), ((1128, 1156), 'heat.engine.environment.Environment', 'environment.Environment', (['old'], {}), '(old)\n', (1151, 1156), False, 'from heat.engine import environment\n'), ((1458, 1490), 'heat.engine.environment.Environment', 'environment.Environment', (['new_env'], {}), '(new_env)\n', (1481, 1490), False, 'from heat.engine import environment\n'), ((1857, 1889), 'heat.engine.environment.Environment', 'environment.Environment', (['new_env'], {}), '(new_env)\n', (1880, 1889), False, 'from heat.engine import environment\n'), ((2393, 2425), 'heat.engine.environment.Environment', 'environment.Environment', (['new_env'], {}), '(new_env)\n', (2416, 2425), False, 'from heat.engine import environment\n'), ((2966, 2998), 'heat.engine.environment.Environment', 'environment.Environment', (['new_env'], {}), '(new_env)\n', (2989, 2998), False, 'from heat.engine import environment\n'), ((3442, 3474), 'heat.engine.environment.Environment', 'environment.Environment', (['new_env'], {}), '(new_env)\n', (3465, 3474), False, 'from heat.engine import environment\n'), ((3824, 3849), 'heat.engine.environment.Environment', 'environment.Environment', ([], {}), '()\n', (3847, 3849), False, 'from heat.engine import environment\n'), ((4383, 4403), 'mock.patch', 'mock.patch', (['list_dir'], {}), '(list_dir)\n', (4393, 4403), False, 'import mock\n'), ((5029, 5049), 'mock.patch', 'mock.patch', (['list_dir'], {}), '(list_dir)\n', (5039, 5049), False, 'import mock\n'), ((5161, 5204), 'heat.engine.resources._load_global_environment', 'resources._load_global_environment', (['env_dir'], {}), '(env_dir)\n', (5195, 5204), False, 'from heat.engine import resources\n'), ((5481, 5501), 'mock.patch', 'mock.patch', (['list_dir'], {}), '(list_dir)\n', (5491, 5501), False, 'import mock\n'), ((6002, 6034), 'mock.call', 'mock.call', (["('%s/a.yaml' % env_dir)"], {}), "('%s/a.yaml' % env_dir)\n", (6011, 6034), False, 'import mock\n'), ((6056, 6088), 'mock.call', 'mock.call', (["('%s/b.yaml' % env_dir)"], {}), "('%s/b.yaml' % env_dir)\n", (6065, 6088), False, 'import mock\n'), ((6379, 6399), 'mock.patch', 'mock.patch', (['list_dir'], {}), '(list_dir)\n', (6389, 6399), False, 'import mock\n'), ((6860, 6892), 'mock.call', 'mock.call', (["('%s/a.yaml' % env_dir)"], {}), "('%s/a.yaml' % env_dir)\n", (6869, 6892), False, 'import mock\n'), ((6914, 6946), 'mock.call', 'mock.call', (["('%s/b.yaml' % env_dir)"], {}), "('%s/b.yaml' % env_dir)\n", (6923, 6946), False, 'import mock\n'), ((4759, 4802), 'heat.engine.resources._load_global_environment', 'resources._load_global_environment', (['env_dir'], {}), '(env_dir)\n', (4793, 4802), False, 'from heat.engine import resources\n'), ((5889, 5932), 'heat.engine.resources._load_global_environment', 'resources._load_global_environment', (['env_dir'], {}), '(env_dir)\n', (5923, 5932), False, 'from heat.engine import resources\n'), ((6747, 6790), 'heat.engine.resources._load_global_environment', 'resources._load_global_environment', (['env_dir'], {}), '(env_dir)\n', (6781, 6790), False, 'from heat.engine import resources\n'), ((4652, 4689), 'mock.mock_open', 'mock.mock_open', ([], {'read_data': 'env_content'}), '(read_data=env_content)\n', (4666, 4689), False, 'import mock\n'), ((5737, 5774), 'mock.mock_open', 'mock.mock_open', ([], {'read_data': 'env_content'}), '(read_data=env_content)\n', (5751, 5774), False, 'import mock\n'), ((6640, 6677), 'mock.mock_open', 'mock.mock_open', ([], {'read_data': 'env_content'}), '(read_data=env_content)\n', (6654, 6677), False, 'import mock\n')]
|
import json
import random
from collections import defaultdict
from dataclasses import dataclass
from typing import Optional, Dict, Union
import pkg_resources
from nextcord import TextChannel, Thread
from nextcord.ext import commands
from shlimpbot.cogs.config import is_config_channel
@dataclass
class ChannelState:
word: Optional[str] = None
guesses: int = 0
class Wordle(commands.Cog):
"""Wordle"""
def __init__(self, bot):
self.bot: commands.Bot = bot
self.state: Dict[int, ChannelState] = defaultdict(ChannelState)
@commands.group()
async def wordle(self, ctx: commands.Context):
pass
@wordle.command(name='cheat')
@is_config_channel('wordle.channel')
async def get_word(self, ctx: commands.Context):
current_state = self.state[ctx.channel.id]
await ctx.send(current_state.word or "No game in progress")
@wordle.command(name='start')
@is_config_channel('wordle.channel')
async def start(self, ctx: commands.Context, *, length: int = 5):
current_state = self.state[ctx.channel.id]
if current_state.word:
await ctx.send('Game already running!')
return
with pkg_resources.resource_stream(__name__, 'data/answers.json') as answer_file:
answers = json.load(answer_file)
if length > 7 or length < 4:
await ctx.send('Sorry I only know words between 4 and 7 letters long')
return
current_state.word = random.choice(answers[str(length)])
await ctx.send('⬛' * len(current_state.word))
@wordle.command('guess')
@is_config_channel('wordle.channel')
async def guess(self, ctx: commands.Context, *, guess: str):
current_state = self.state[ctx.channel.id]
if not current_state.word:
await ctx.send('Game not running. Use command `wordle start` to begin')
return
with pkg_resources.resource_stream(__name__, 'data/words.json') as guess_file:
valid_guesses = json.load(guess_file)
if guess not in valid_guesses[str(len(current_state.word))]:
await ctx.reply('Invalid guess')
return
current_state.guesses += 1
response = ['⬛'] * len(current_state.word)
letter_counts = {letter: current_state.word.count(letter) for letter in current_state.word}
for idx, letter in enumerate(guess):
if current_state.word[idx] == letter:
response[idx] = '🟩'
letter_counts[letter] -= 1
for idx, letter in enumerate(guess):
if current_state.word[idx] != letter and letter_counts.get(letter, 0) > 0:
response[idx] = '🟨'
letter_counts[letter] -= 1
response = ''.join(response)
if current_state.guesses == 6 and not guess == current_state.word:
response += f"\nGame over, the word was {current_state.word}"
if guess == current_state.word:
response += '\nCongratulations!'
await ctx.reply(response)
if current_state.guesses == 6 or guess == current_state.word:
self.state.pop(ctx.channel.id)
@wordle.command('channel')
@commands.guild_only()
@commands.has_guild_permissions(manage_messages=True)
async def set_channel(self, ctx: commands.Context, *, channel: Union[TextChannel, Thread]):
config = self.bot.get_cog('Config')
await config.set_guild(ctx, 'wordle.channel', data=channel.id)
await ctx.send(f'Wordle channel set to {channel.mention}')
def setup(bot):
bot.add_cog(Wordle(bot))
|
[
"nextcord.ext.commands.has_guild_permissions",
"nextcord.ext.commands.group",
"json.load",
"pkg_resources.resource_stream",
"collections.defaultdict",
"nextcord.ext.commands.guild_only",
"shlimpbot.cogs.config.is_config_channel"
] |
[((564, 580), 'nextcord.ext.commands.group', 'commands.group', ([], {}), '()\n', (578, 580), False, 'from nextcord.ext import commands\n'), ((685, 720), 'shlimpbot.cogs.config.is_config_channel', 'is_config_channel', (['"""wordle.channel"""'], {}), "('wordle.channel')\n", (702, 720), False, 'from shlimpbot.cogs.config import is_config_channel\n'), ((933, 968), 'shlimpbot.cogs.config.is_config_channel', 'is_config_channel', (['"""wordle.channel"""'], {}), "('wordle.channel')\n", (950, 968), False, 'from shlimpbot.cogs.config import is_config_channel\n'), ((1622, 1657), 'shlimpbot.cogs.config.is_config_channel', 'is_config_channel', (['"""wordle.channel"""'], {}), "('wordle.channel')\n", (1639, 1657), False, 'from shlimpbot.cogs.config import is_config_channel\n'), ((3219, 3240), 'nextcord.ext.commands.guild_only', 'commands.guild_only', ([], {}), '()\n', (3238, 3240), False, 'from nextcord.ext import commands\n'), ((3246, 3298), 'nextcord.ext.commands.has_guild_permissions', 'commands.has_guild_permissions', ([], {'manage_messages': '(True)'}), '(manage_messages=True)\n', (3276, 3298), False, 'from nextcord.ext import commands\n'), ((532, 557), 'collections.defaultdict', 'defaultdict', (['ChannelState'], {}), '(ChannelState)\n', (543, 557), False, 'from collections import defaultdict\n'), ((1206, 1266), 'pkg_resources.resource_stream', 'pkg_resources.resource_stream', (['__name__', '"""data/answers.json"""'], {}), "(__name__, 'data/answers.json')\n", (1235, 1266), False, 'import pkg_resources\n'), ((1305, 1327), 'json.load', 'json.load', (['answer_file'], {}), '(answer_file)\n', (1314, 1327), False, 'import json\n'), ((1926, 1984), 'pkg_resources.resource_stream', 'pkg_resources.resource_stream', (['__name__', '"""data/words.json"""'], {}), "(__name__, 'data/words.json')\n", (1955, 1984), False, 'import pkg_resources\n'), ((2028, 2049), 'json.load', 'json.load', (['guess_file'], {}), '(guess_file)\n', (2037, 2049), False, 'import json\n')]
|
# Generated by Django 2.1.2 on 2019-01-30 08:39
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('extrequests', '0001_initial'),
]
operations = [
migrations.RemoveField(
model_name='profileupdaterequest',
name='domains',
),
migrations.RemoveField(
model_name='profileupdaterequest',
name='languages',
),
migrations.RemoveField(
model_name='profileupdaterequest',
name='lessons',
),
migrations.DeleteModel(
name='ProfileUpdateRequest',
),
]
|
[
"django.db.migrations.RemoveField",
"django.db.migrations.DeleteModel"
] |
[((220, 293), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""profileupdaterequest"""', 'name': '"""domains"""'}), "(model_name='profileupdaterequest', name='domains')\n", (242, 293), False, 'from django.db import migrations\n'), ((338, 413), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""profileupdaterequest"""', 'name': '"""languages"""'}), "(model_name='profileupdaterequest', name='languages')\n", (360, 413), False, 'from django.db import migrations\n'), ((458, 531), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""profileupdaterequest"""', 'name': '"""lessons"""'}), "(model_name='profileupdaterequest', name='lessons')\n", (480, 531), False, 'from django.db import migrations\n'), ((576, 627), 'django.db.migrations.DeleteModel', 'migrations.DeleteModel', ([], {'name': '"""ProfileUpdateRequest"""'}), "(name='ProfileUpdateRequest')\n", (598, 627), False, 'from django.db import migrations\n')]
|
import paddle
import paddle.fluid as fluid
from .operations import OPS
def AuxiliaryHeadCIFAR(inputs, C, class_num):
print('AuxiliaryHeadCIFAR : inputs-shape : {:}'.format(inputs.shape))
temp = fluid.layers.relu(inputs)
temp = fluid.layers.pool2d(
temp, pool_size=5, pool_stride=3, pool_padding=0, pool_type='avg')
temp = fluid.layers.conv2d(
temp,
filter_size=1,
num_filters=128,
stride=1,
padding=0,
act=None,
bias_attr=False)
temp = fluid.layers.batch_norm(input=temp, act='relu', bias_attr=None)
temp = fluid.layers.conv2d(
temp,
filter_size=1,
num_filters=768,
stride=2,
padding=0,
act=None,
bias_attr=False)
temp = fluid.layers.batch_norm(input=temp, act='relu', bias_attr=None)
print('AuxiliaryHeadCIFAR : last---shape : {:}'.format(temp.shape))
predict = fluid.layers.fc(input=temp, size=class_num, act='softmax')
return predict
def InferCell(name, inputs_prev_prev, inputs_prev, genotype, C_prev_prev,
C_prev, C, reduction, reduction_prev):
print(
'[{:}] C_prev_prev={:} C_prev={:}, C={:}, reduction_prev={:}, reduction={:}'.
format(name, C_prev_prev, C_prev, C, reduction_prev, reduction))
print('inputs_prev_prev : {:}'.format(inputs_prev_prev.shape))
print('inputs_prev : {:}'.format(inputs_prev.shape))
inputs_prev_prev = OPS['skip_connect'](inputs_prev_prev, C_prev_prev, C, 2
if reduction_prev else 1)
inputs_prev = OPS['skip_connect'](inputs_prev, C_prev, C, 1)
print('inputs_prev_prev : {:}'.format(inputs_prev_prev.shape))
print('inputs_prev : {:}'.format(inputs_prev.shape))
if reduction: step_ops, concat = genotype.reduce, genotype.reduce_concat
else: step_ops, concat = genotype.normal, genotype.normal_concat
states = [inputs_prev_prev, inputs_prev]
for istep, operations in enumerate(step_ops):
op_a, op_b = operations
# the first operation
#print ('-->>[{:}/{:}] [{:}] + [{:}]'.format(istep, len(step_ops), op_a, op_b))
stride = 2 if reduction and op_a[1] < 2 else 1
tensor1 = OPS[op_a[0]](states[op_a[1]], C, C, stride)
stride = 2 if reduction and op_b[1] < 2 else 1
tensor2 = OPS[op_b[0]](states[op_b[1]], C, C, stride)
state = fluid.layers.elementwise_add(x=tensor1, y=tensor2, act=None)
assert tensor1.shape == tensor2.shape, 'invalid shape {:} vs. {:}'.format(
tensor1.shape, tensor2.shape)
print('-->>[{:}/{:}] tensor={:} from {:} + {:}'.format(
istep, len(step_ops), state.shape, tensor1.shape, tensor2.shape))
states.append(state)
states_to_cat = [states[x] for x in concat]
outputs = fluid.layers.concat(states_to_cat, axis=1)
print('-->> output-shape : {:} from concat={:}'.format(outputs.shape,
concat))
return outputs
# NASCifarNet(inputs, 36, 6, 3, 10, 'xxx', True)
def NASCifarNet(ipt, C, N, stem_multiplier, class_num, genotype, auxiliary):
# cifar head module
C_curr = stem_multiplier * C
stem = fluid.layers.conv2d(
ipt,
filter_size=3,
num_filters=C_curr,
stride=1,
padding=1,
act=None,
bias_attr=False)
stem = fluid.layers.batch_norm(input=stem, act=None, bias_attr=None)
print('stem-shape : {:}'.format(stem.shape))
# N + 1 + N + 1 + N cells
layer_channels = [C] * N + [C * 2] + [C * 2] * N + [C * 4] + [C * 4] * N
layer_reductions = [False] * N + [True] + [False] * N + [True] + [False] * N
C_prev_prev, C_prev, C_curr = C_curr, C_curr, C
reduction_prev = False
auxiliary_pred = None
cell_results = [stem, stem]
for index, (C_curr,
reduction) in enumerate(zip(layer_channels, layer_reductions)):
xstr = '{:02d}/{:02d}'.format(index, len(layer_channels))
cell_result = InferCell(xstr, cell_results[-2], cell_results[-1],
genotype, C_prev_prev, C_prev, C_curr,
reduction, reduction_prev)
reduction_prev = reduction
C_prev_prev, C_prev = C_prev, cell_result.shape[1]
cell_results.append(cell_result)
if auxiliary and reduction and C_curr == C * 4:
auxiliary_pred = AuxiliaryHeadCIFAR(cell_result, C_prev, class_num)
global_P = fluid.layers.pool2d(
input=cell_results[-1], pool_size=8, pool_type='avg', pool_stride=1)
predicts = fluid.layers.fc(input=global_P, size=class_num, act='softmax')
print('predict-shape : {:}'.format(predicts.shape))
if auxiliary_pred is None:
return predicts
else:
return [predicts, auxiliary_pred]
|
[
"paddle.fluid.layers.concat",
"paddle.fluid.layers.relu",
"paddle.fluid.layers.conv2d",
"paddle.fluid.layers.batch_norm",
"paddle.fluid.layers.fc",
"paddle.fluid.layers.elementwise_add",
"paddle.fluid.layers.pool2d"
] |
[((204, 229), 'paddle.fluid.layers.relu', 'fluid.layers.relu', (['inputs'], {}), '(inputs)\n', (221, 229), True, 'import paddle.fluid as fluid\n'), ((241, 331), 'paddle.fluid.layers.pool2d', 'fluid.layers.pool2d', (['temp'], {'pool_size': '(5)', 'pool_stride': '(3)', 'pool_padding': '(0)', 'pool_type': '"""avg"""'}), "(temp, pool_size=5, pool_stride=3, pool_padding=0,\n pool_type='avg')\n", (260, 331), True, 'import paddle.fluid as fluid\n'), ((348, 458), 'paddle.fluid.layers.conv2d', 'fluid.layers.conv2d', (['temp'], {'filter_size': '(1)', 'num_filters': '(128)', 'stride': '(1)', 'padding': '(0)', 'act': 'None', 'bias_attr': '(False)'}), '(temp, filter_size=1, num_filters=128, stride=1, padding\n =0, act=None, bias_attr=False)\n', (367, 458), True, 'import paddle.fluid as fluid\n'), ((522, 585), 'paddle.fluid.layers.batch_norm', 'fluid.layers.batch_norm', ([], {'input': 'temp', 'act': '"""relu"""', 'bias_attr': 'None'}), "(input=temp, act='relu', bias_attr=None)\n", (545, 585), True, 'import paddle.fluid as fluid\n'), ((597, 707), 'paddle.fluid.layers.conv2d', 'fluid.layers.conv2d', (['temp'], {'filter_size': '(1)', 'num_filters': '(768)', 'stride': '(2)', 'padding': '(0)', 'act': 'None', 'bias_attr': '(False)'}), '(temp, filter_size=1, num_filters=768, stride=2, padding\n =0, act=None, bias_attr=False)\n', (616, 707), True, 'import paddle.fluid as fluid\n'), ((771, 834), 'paddle.fluid.layers.batch_norm', 'fluid.layers.batch_norm', ([], {'input': 'temp', 'act': '"""relu"""', 'bias_attr': 'None'}), "(input=temp, act='relu', bias_attr=None)\n", (794, 834), True, 'import paddle.fluid as fluid\n'), ((921, 979), 'paddle.fluid.layers.fc', 'fluid.layers.fc', ([], {'input': 'temp', 'size': 'class_num', 'act': '"""softmax"""'}), "(input=temp, size=class_num, act='softmax')\n", (936, 979), True, 'import paddle.fluid as fluid\n'), ((2829, 2871), 'paddle.fluid.layers.concat', 'fluid.layers.concat', (['states_to_cat'], {'axis': '(1)'}), '(states_to_cat, axis=1)\n', (2848, 2871), True, 'import paddle.fluid as fluid\n'), ((3229, 3340), 'paddle.fluid.layers.conv2d', 'fluid.layers.conv2d', (['ipt'], {'filter_size': '(3)', 'num_filters': 'C_curr', 'stride': '(1)', 'padding': '(1)', 'act': 'None', 'bias_attr': '(False)'}), '(ipt, filter_size=3, num_filters=C_curr, stride=1,\n padding=1, act=None, bias_attr=False)\n', (3248, 3340), True, 'import paddle.fluid as fluid\n'), ((3405, 3466), 'paddle.fluid.layers.batch_norm', 'fluid.layers.batch_norm', ([], {'input': 'stem', 'act': 'None', 'bias_attr': 'None'}), '(input=stem, act=None, bias_attr=None)\n', (3428, 3466), True, 'import paddle.fluid as fluid\n'), ((4503, 4595), 'paddle.fluid.layers.pool2d', 'fluid.layers.pool2d', ([], {'input': 'cell_results[-1]', 'pool_size': '(8)', 'pool_type': '"""avg"""', 'pool_stride': '(1)'}), "(input=cell_results[-1], pool_size=8, pool_type='avg',\n pool_stride=1)\n", (4522, 4595), True, 'import paddle.fluid as fluid\n'), ((4616, 4678), 'paddle.fluid.layers.fc', 'fluid.layers.fc', ([], {'input': 'global_P', 'size': 'class_num', 'act': '"""softmax"""'}), "(input=global_P, size=class_num, act='softmax')\n", (4631, 4678), True, 'import paddle.fluid as fluid\n'), ((2410, 2470), 'paddle.fluid.layers.elementwise_add', 'fluid.layers.elementwise_add', ([], {'x': 'tensor1', 'y': 'tensor2', 'act': 'None'}), '(x=tensor1, y=tensor2, act=None)\n', (2438, 2470), True, 'import paddle.fluid as fluid\n')]
|
from math import sqrt
import copy
import sys
from utils import tools
from utils import config
log_file = open("trace.log","w")
old_stdout = sys.stdout
COUNTER = config.RECURSION_LIMIT
def find_solution(grid, n, i, j, pos, pre, back_depth):
global COUNTER
COUNTER -= 1
if (COUNTER == 0):
COUNTER = config.RECURSION_LIMIT #RESET COUNTER
return grid, False, False
if grid[i][j] == 0: #blank square
if [i,j] not in pre:
pre.append([i,j])
pos.append(0) #first, try the very first possibility among `possibilities`
log("\nactual-square:("+str(i)+","+str(j)+")")
digit = get_valid_digit(grid, n, i, j, pos[pre.index([i,j])])
if digit == 0:
#backtrack (if possible):
pre_i = pre[pre.index([i,j])-1][0]
pre_j = pre[pre.index([i,j])-1][1]
log("back-to-square:("+str(pre_i)+","+str(pre_j)+")")
log("back-depth:"+str(back_depth))
grid[pre_i][pre_j] = 0
pos[pre.index([pre_i,pre_j])] += 1
# The follwing if statement means that if we repeat searching for solutions for a square
# repeatedly, this means that the search engine is stuck at this position, an that
# no other options are possible. Then, this only means UNSATISFIABLE PROBLEM:
if pos[pre.index([pre_i,pre_j])] > len( get_all_valid_digits_so_far(grid, n, pre_i, pre_j) ):
log_file.close()
return grid, True, False #unsatisfiable
# backtrack:
return find_solution(grid, n, pre_i, pre_j, pos, pre, back_depth+1)
else:
# RESET all `pos` indexes to zero after resuming forward search:
for e in range(pre.index([i,j])+1, len(pos)):
pos[e] = 0
grid[i][j] = digit
if i == n-1 and j == n-1:
# Sudoku solved
log_file.close()
return grid, True, True
else:
square = get_next_blank_square(i, j)
if square != -1:
# `square` is of the format [i,j]
return find_solution(grid, n, square[0], square[1], pos, pre, 0)
else:
# Sudoku solved
log_file.close()
return grid, True, True
def store_blank_squares(grid, n):
global blank_squares
blank_squares = []
# add first grid square even if is initially filled. This is to avoid handling the existing or not of this square digit
if grid[0][0] != 0:
blank_squares.append([0,0])
for i in range(n):
for j in range(n):
if grid[i][j] == 0:
blank_squares.append([i,j])
def get_next_blank_square(i, j):
index_i_j = blank_squares.index([i,j])
if len(blank_squares) > index_i_j + 1:
return blank_squares[index_i_j+1]
else:
# reached the end of blank squares list
return -1
def get_valid_digit(sol, n, i, j, pos):
possibilities = get_all_valid_digits_so_far(sol, n, i, j)
log("possibilities:"+str(possibilities))
if len(possibilities) > pos:
log("picked-digit:"+str(possibilities[pos]))
return possibilities[pos]
else: return 0
def get_all_valid_digits_so_far(sol, n, i, j):
possibilities = []
for digit in range(1, n+1):
if is_valid_digit(sol, n, i, j, digit):
possibilities.append(digit)
return possibilities
# Checking the integrity of the grid in the presence of `digit` in square (i,j)
def is_valid_digit(sol, n, i, j, digit):
#checking the line
for pos_j in range(n):
if pos_j != j:
if sol[i][pos_j] == digit:
return False
#checking the column
for pos_i in range(n):
if pos_i != i:
if sol[pos_i][j] == digit:
return False
#checking the box (for n=9, the box itself is recongnized by int(i/3),int(j/3))
base_box_i = int(sqrt(n))*int(i/int(sqrt(n)))
base_box_j = int(sqrt(n))*int(j/int(sqrt(n)))
for ii in range(int(sqrt(n))):
index_ii = base_box_i + ii%int(sqrt(n))
for jj in range(int(sqrt(n))):
index_jj = base_box_j + jj%int(sqrt(n))
if index_ii != i and index_jj != j:
if sol[index_ii][index_jj] == digit:
return False
return True
# Checks the integrity of the grid: lines, cols, boxes
def check_integrity(grid, n):
for i in range(n):
for j in range(n):
if grid[i][j] != 0:
if not is_valid_digit(grid, n, i, j, grid[i][j]):
return False
return True
def log(message):
sys.stdout = log_file
print(message)
sys.stdout = old_stdout
|
[
"math.sqrt"
] |
[((3949, 3956), 'math.sqrt', 'sqrt', (['n'], {}), '(n)\n', (3953, 3956), False, 'from math import sqrt\n'), ((3999, 4006), 'math.sqrt', 'sqrt', (['n'], {}), '(n)\n', (4003, 4006), False, 'from math import sqrt\n'), ((4052, 4059), 'math.sqrt', 'sqrt', (['n'], {}), '(n)\n', (4056, 4059), False, 'from math import sqrt\n'), ((4139, 4146), 'math.sqrt', 'sqrt', (['n'], {}), '(n)\n', (4143, 4146), False, 'from math import sqrt\n'), ((3968, 3975), 'math.sqrt', 'sqrt', (['n'], {}), '(n)\n', (3972, 3975), False, 'from math import sqrt\n'), ((4018, 4025), 'math.sqrt', 'sqrt', (['n'], {}), '(n)\n', (4022, 4025), False, 'from math import sqrt\n'), ((4102, 4109), 'math.sqrt', 'sqrt', (['n'], {}), '(n)\n', (4106, 4109), False, 'from math import sqrt\n'), ((4193, 4200), 'math.sqrt', 'sqrt', (['n'], {}), '(n)\n', (4197, 4200), False, 'from math import sqrt\n')]
|
from flask import Blueprint, request
blueprint = Blueprint('users', __name__, url_prefix='/users')
@blueprint.route('', methods=['POST', 'GET'])
def create_list():
if request.method == 'POST':
return 'Create user'
return 'List users'
@blueprint.route('/<user_id>', methods=['GET', 'DELETE', 'PUT'])
def read_delete_update(user_id):
if request.method == 'GET':
return 'Read user %s' % user_id
elif request.method == 'DELETE':
return 'Delete user %s' % user_id
return 'Update user %s' % user_id
|
[
"flask.Blueprint"
] |
[((50, 99), 'flask.Blueprint', 'Blueprint', (['"""users"""', '__name__'], {'url_prefix': '"""/users"""'}), "('users', __name__, url_prefix='/users')\n", (59, 99), False, 'from flask import Blueprint, request\n')]
|
import os
import redis
from rq import Worker, Queue, Connection
listen = ["default"]
redis_url = 'redis://localhost:6379'
conn = redis.from_url(redis_url)
if __name__ == '__main__':
with Connection(conn):
worker = Worker(list(map(Queue, listen)))
worker.work()
|
[
"redis.from_url",
"rq.Connection"
] |
[((130, 155), 'redis.from_url', 'redis.from_url', (['redis_url'], {}), '(redis_url)\n', (144, 155), False, 'import redis\n'), ((193, 209), 'rq.Connection', 'Connection', (['conn'], {}), '(conn)\n', (203, 209), False, 'from rq import Worker, Queue, Connection\n')]
|
#定义枚举类
from enum import Enum
Month = Enum('Month',('Jan','Feb','Mar','Apr','May','Jun','Jul','Aug','Sep','Oct','Nov','Dec'))
#使用枚举
for name, member in Month.__members__.items():
print(name, '=>', member, ',', member.value)
from enum import Enum, unique
@unique #@unique装饰器可以帮助我们检查保证没有重复值。
class Weekday(Enum):
Sun = 0 #Sun的value被设定为0
Mon = 1
Tue = 2
Wed = 3
Thu = 4
Fri = 5
Sat = 6
#访问枚举值
day1 = Weekday.Sun
print(day1)
print(Weekday(4))
|
[
"enum.Enum"
] |
[((39, 142), 'enum.Enum', 'Enum', (['"""Month"""', "('Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct',\n 'Nov', 'Dec')"], {}), "('Month', ('Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug',\n 'Sep', 'Oct', 'Nov', 'Dec'))\n", (43, 142), False, 'from enum import Enum, unique\n')]
|
from gym.envs.registration import register
from .environment import MarsLanderEnv
register(
id="MarsLander-v1",
entry_point=MarsLanderEnv,
)
|
[
"gym.envs.registration.register"
] |
[((84, 139), 'gym.envs.registration.register', 'register', ([], {'id': '"""MarsLander-v1"""', 'entry_point': 'MarsLanderEnv'}), "(id='MarsLander-v1', entry_point=MarsLanderEnv)\n", (92, 139), False, 'from gym.envs.registration import register\n')]
|
# Copyright (C) 2013 - <NAME> <<EMAIL>>
# This program is Free Software see LICENSE file for details
"""Package Control progress bar like
"""
import threading
import sublime
class ProgressBar(threading.Thread):
"""A progress bar animation that runs in other thread
"""
class Status(object):
NONE = None
SUCCESS = 'end'
FAILURE = 'fail'
TIMEOUT = 'timeout'
def __init__(self, messages):
threading.Thread.__init__(self)
self.messages = messages
self.addition = 1
self.die = False
def run(self):
"""Just run the thread
"""
sublime.set_timeout(lambda: self.update(0), 100)
def update(self, i):
"""Update the progress bar
"""
if self.die:
return
size = 8
pos = i % size
status = '{}={}'.format(' ' * pos, ' ' * ((size - 1) - pos))
sublime.status_message('{} [{}]'.format(
self.messages['start'], status)
)
if not (size - 1) - pos:
self.addition = -1
if not pos:
self.addition = 1
i += self.addition
sublime.set_timeout_async(lambda: self.update(i), 100)
def terminate(self, status=None):
"""Terminate this thread
"""
status = status or self.Status.SUCCESS
message = self.messages.get(status) or self.messages[self.Status.SUCCESS] # noqa
sublime.status_message(message)
self.die = True
|
[
"threading.Thread.__init__",
"sublime.status_message"
] |
[((450, 481), 'threading.Thread.__init__', 'threading.Thread.__init__', (['self'], {}), '(self)\n', (475, 481), False, 'import threading\n'), ((1452, 1483), 'sublime.status_message', 'sublime.status_message', (['message'], {}), '(message)\n', (1474, 1483), False, 'import sublime\n')]
|
from django.conf.urls import url
from django.http import Http404, HttpResponse
def view(request, *args, **kwargs):
if request.path == '/raise404/':
raise Http404
return HttpResponse('Hello!')
urlpatterns = [
url("^$", view, name="index"),
url("^(?P<slug>[^/]+)/$", view, name="detail"),
]
|
[
"django.http.HttpResponse",
"django.conf.urls.url"
] |
[((187, 209), 'django.http.HttpResponse', 'HttpResponse', (['"""Hello!"""'], {}), "('Hello!')\n", (199, 209), False, 'from django.http import Http404, HttpResponse\n'), ((232, 261), 'django.conf.urls.url', 'url', (['"""^$"""', 'view'], {'name': '"""index"""'}), "('^$', view, name='index')\n", (235, 261), False, 'from django.conf.urls import url\n'), ((267, 313), 'django.conf.urls.url', 'url', (['"""^(?P<slug>[^/]+)/$"""', 'view'], {'name': '"""detail"""'}), "('^(?P<slug>[^/]+)/$', view, name='detail')\n", (270, 313), False, 'from django.conf.urls import url\n')]
|
import asyncio
import re
import traceback
from typing import Union
import discord
from discord.ext.commands import AutoShardedBot, Cog
from discord_slash import ButtonStyle, ComponentContext, SlashContext, cog_ext
from discord_slash.utils import manage_components
from utils import punishments, utils
class ServerUtils(Cog):
"""
This class is supposed to contain a lot of server based stuff like "channel create/delete" or "user punish/unpunish"
"""
def __init__(self, bot):
self.bot: AutoShardedBot = bot
utils.LOGGER.debug(f"Successfully loaded cog {self.__class__.__name__}")
async def _get_role_permissions(self, ctx: ComponentContext, roleperm):
sel = manage_components.create_select(
placeholder="Choose the permissions you want for your role (1/2)",
min_values=1,
max_values=18,
options=[
manage_components.create_select_option(
label="add reactions to messages",
value="add_reactions",
),
manage_components.create_select_option(
label="attach files to messages",
value="attach_files",
),
manage_components.create_select_option(
label="ban members",
value="ban_members",
),
manage_components.create_select_option(
label="change own nickname",
value="change_nickname",
),
manage_components.create_select_option(
label="connect to voice channels",
value="connect",
),
manage_components.create_select_option(
label="create instant invite to a channel",
value="create_instant_invite",
),
manage_components.create_select_option(
label="deafen other members in voice channels",
value="deafen_members",
),
manage_components.create_select_option(
label="embed links in channels",
value="embed_links",
),
manage_components.create_select_option(
label="send external emojis",
value="external_emojis",
),
manage_components.create_select_option(
label="kick members",
value="kick_members",
),
manage_components.create_select_option(
label="manage channels on the server",
value="manage_channels",
),
manage_components.create_select_option(
label="manage emojis of the server",
value="manage_emojis",
),
manage_components.create_select_option(
label="manage guild",
value="manage_guild",
),
manage_components.create_select_option(
label="manage messages",
value="manage_messages",
),
manage_components.create_select_option(
label="manage all nicknames",
value="manage_nicknames",
),
manage_components.create_select_option(
label="manage the permission of roles",
value="manage_permissions",
),
manage_components.create_select_option(
label="manage roles and their permissions",
value="manage_roles",
),
manage_components.create_select_option(
label="manage webhooks", value="manage_webhooks"
),
],
)
selrow = manage_components.create_actionrow(sel)
await ctx.edit_origin(
content="Please choose the permissions you want to assign to the role (1/2)",
components=[selrow],
)
try:
firstperms = await manage_components.wait_for_component(
self.bot,
components=[selrow],
timeout=600,
check=lambda msg: msg.author.id == ctx.author.id,
)
await firstperms.defer(edit_origin=True)
except asyncio.TimeoutError:
selrow["components"][0]["disabled"] = True
await ctx.origin_message.edit("Timed out.", components=[selrow])
return
roleperm.add_reactions = True if "add_reactions" in firstperms.selected_options else False
roleperm.attach_files = True if "attach_files" in firstperms.selected_options else False
roleperm.ban_members = True if "ban_members" in firstperms.selected_options else False
roleperm.change_nickname = (
True if "change_nickname" in firstperms.selected_options else False
)
roleperm.connect = True if "connect" in firstperms.selected_options else False
roleperm.create_instant_invite = (
True if "create_instant_invite" in firstperms.selected_options else False
)
roleperm.deafen_members = True if "deafen_members" in firstperms.selected_options else False
roleperm.embed_links = True if "embed_links" in firstperms.selected_options else False
roleperm.external_emojis = (
True if "external_emojis" in firstperms.selected_options else False
)
roleperm.kick_members = True if "kick_members" in firstperms.selected_options else False
roleperm.manage_channels = (
True if "manage_channels" in firstperms.selected_options else False
)
roleperm.manage_emojis = True if "manage_emojis" in firstperms.selected_options else False
roleperm.manage_guild = True if "manage_guild" in firstperms.selected_options else False
roleperm.manage_messages = (
True if "manage_messages" in firstperms.selected_options else False
)
roleperm.manage_nicknames = (
True if "manage_nicknames" in firstperms.selected_options else False
)
roleperm.manage_permissions = (
True if "manage_permissions" in firstperms.selected_options else False
)
roleperm.manage_roles = True if "manage_roles" in firstperms.selected_options else False
roleperm.manage_webhooks = (
True if "manage_webhooks" in firstperms.selected_options else False
)
sel2 = manage_components.create_select(
placeholder="Choose the permissions you want for your role (2/2)",
max_values=17,
min_values=1,
options=[
manage_components.create_select_option(
label="mention everyone in a message",
value="mention_everyone",
),
manage_components.create_select_option(
label="move members across voice channels",
value="move_members",
),
manage_components.create_select_option(
label="mute members in voice channels",
value="mute_members",
),
manage_components.create_select_option(
label="priority speaker",
value="priority_speaker",
),
manage_components.create_select_option(
label="read message history in channels",
value="read_message_history",
),
manage_components.create_select_option(
label="read all messages in channels",
value="read_messages",
),
manage_components.create_select_option(
label="request to speak in stage channels",
value="request_to_speak",
),
manage_components.create_select_option(
label="send messages in channels",
value="send_messages",
),
manage_components.create_select_option(
label="send TTS messages in channels",
value="send_tts_messages",
),
manage_components.create_select_option(
label="speak in voice channels",
value="speak",
),
manage_components.create_select_option(
label="stream in voice channels / enable camera",
value="stream",
),
manage_components.create_select_option(
label="use external emojis",
value="use_external_emojis",
),
manage_components.create_select_option(
label="use slash commands in channels",
value="use_slash_commands",
),
manage_components.create_select_option(
label="use voice activation in voice channels (else only push-to-talk)",
value="use_voice_activation",
),
manage_components.create_select_option(
label="view the audit-log",
value="view_audit_log",
),
manage_components.create_select_option(
label="view channels",
value="view_channel",
),
manage_components.create_select_option(
label="view guild insights",
value="view_guild_insights",
),
],
)
sel2row = manage_components.create_actionrow(sel2)
await firstperms.edit_origin(
content="Please choose the permissions you want to assign to the role (2/2)",
components=[sel2row],
)
try:
secondperms = await manage_components.wait_for_component(
self.bot,
components=[sel2row],
timeout=600,
check=lambda msg: msg.author.id == ctx.author.id,
)
await secondperms.defer(edit_origin=True)
except asyncio.TimeoutError:
sel2row["components"][0]["disabled"] = True
await firstperms.origin_message.edit("Timed out.", components=[sel2row])
return
roleperm.mention_everyone = (
True if "mention_everyone" in secondperms.selected_options else False
)
roleperm.move_members = True if "move_members" in secondperms.selected_options else False
roleperm.mute_members = True if "mute_members" in secondperms.selected_options else False
roleperm.priority_speaker = (
True if "priority_speaker" in secondperms.selected_options else False
)
roleperm.read_message_history = (
True if "read_message_history" in secondperms.selected_options else False
)
roleperm.read_messages = True if "read_messages" in secondperms.selected_options else False
roleperm.request_to_speak = (
True if "request_to_speak" in secondperms.selected_options else False
)
roleperm.send_messages = True if "send_messages" in secondperms.selected_options else False
roleperm.send_tts_messages = (
True if "send_tts_messages" in secondperms.selected_options else False
)
roleperm.speak = True if "speak" in secondperms.selected_options else False
roleperm.stream = True if "stream" in secondperms.selected_options else False
roleperm.use_external_emojis = (
True if "use_external_emojis" in secondperms.selected_options else False
)
roleperm.use_slash_commands = (
True if "use_slash_commands" in secondperms.selected_options else False
)
roleperm.use_voice_activation = (
True if "use_voice_activation" in secondperms.selected_options else False
)
roleperm.view_audit_log = (
True if "view_audit_log" in secondperms.selected_options else False
)
roleperm.view_channel = True if "view_channel" in secondperms.selected_options else False
roleperm.view_guild_insights = (
True if "view_guild_insights" in secondperms.selected_options else False
)
return roleperm, sel2row, secondperms
pun_opt = [{"name": "user", "description": "The user to punish", "required": True, "type": 6}]
@cog_ext.cog_subcommand(
base="server",
subcommand_group="user",
name="punish",
description="punishes a user",
options=pun_opt,
)
async def _user_punish(self, ctx: SlashContext, user: discord.Member):
# add moderator permission restriction
await ctx.defer(hidden=False)
# This command is **not** hidden, so the user can see that he is being punished
user_setbtn1 = [
manage_components.create_button(label="BAN", style=ButtonStyle.red, custom_id="ban"),
manage_components.create_button(label="KICK", style=ButtonStyle.red, custom_id="kick"),
]
user_setbtn2 = [
manage_components.create_button(label="WARN", style=ButtonStyle.blue, custom_id="warn"),
manage_components.create_button(label="MUTE", style=ButtonStyle.blue, custom_id="mute"),
]
user_setbtn3 = [
manage_components.create_button(
label="Do nothing", style=ButtonStyle.gray, custom_id="nothing"
)
]
user_buttons_actionrow1 = manage_components.create_actionrow(*user_setbtn1)
user_buttons_actionrow2 = manage_components.create_actionrow(*user_setbtn2)
user_buttons_actionrow3 = manage_components.create_actionrow(*user_setbtn3)
if (
not ctx.author.guild_permissions.ban_members
and not ctx.author.guild_permissions.kick_members
and not ctx.author.guild_permissions.manage_messages
):
raise discord.ext.commands.MissingPermissions(
missing_perms=["manage_messages", "ban_members", "kick_members"]
) # raise some error you like
if (
not ctx.author.guild_permissions.ban_members
or not ctx.author.guild_permissions.kick_members
):
for i in range(2):
user_buttons_actionrow1["components"][i]["disabled"] = True
message = await ctx.send(
f"What do you want to do with {user.mention}? (timeout: 60 seconds)",
hidden=False,
components=[
user_buttons_actionrow1,
user_buttons_actionrow2,
user_buttons_actionrow3,
],
)
try:
buttons: ComponentContext = await manage_components.wait_for_component(
self.bot,
components=[
user_buttons_actionrow1,
user_buttons_actionrow2,
user_buttons_actionrow3,
],
timeout=60,
check=lambda msg: ctx.author.id == msg.author.id,
)
await buttons.defer(edit_origin=True)
except asyncio.TimeoutError:
for i in range(2):
user_buttons_actionrow1["components"][i]["disabled"] = True
user_buttons_actionrow2["components"][i]["disabled"] = True
user_buttons_actionrow3["components"][0]["disabled"] = True
await message.edit(
content="Timed out.",
components=[
user_buttons_actionrow1,
user_buttons_actionrow2,
user_buttons_actionrow3,
],
) # Disable the Buttons
return
if buttons.component_id == "mute":
mute_btn = [
manage_components.create_button(
label="Minutes", style=ButtonStyle.red, custom_id="m"
),
manage_components.create_button(
label="Hours", style=ButtonStyle.red, custom_id="h"
),
manage_components.create_button(label="Days", style=ButtonStyle.red, custom_id="d"),
manage_components.create_button(
label="Cancel", style=ButtonStyle.gray, custom_id="c"
),
]
mute_actionrow = manage_components.create_actionrow(*mute_btn)
await buttons.edit_origin(
content="Do you want to mute the user for a minutes, hours or days?",
hidden=True,
components=[mute_actionrow],
)
mute_btn_ctx: ComponentContext = (
await manage_components.manage_components.wait_for_component(
self.bot,
components=mute_actionrow,
check=lambda msg: msg.author.id == buttons.author.id,
)
)
await mute_btn_ctx.defer(edit_origin=True)
if mute_btn_ctx.component_id == "c":
for i in range(4):
mute_actionrow["components"][i]["disabled"] = True
await mute_btn_ctx.edit_origin(
content="ok, cancelled", hidden=True, components=[mute_actionrow]
)
if mute_btn_ctx.component_id == "m":
times1 = [
manage_components.create_button(
label="5", style=ButtonStyle.red, custom_id="5"
),
manage_components.create_button(
label="10", style=ButtonStyle.red, custom_id="10"
),
manage_components.create_button(
label="15", style=ButtonStyle.red, custom_id="15"
),
manage_components.create_button(
label="20", style=ButtonStyle.red, custom_id="20"
),
]
times2 = [
manage_components.create_button(
label="25", style=ButtonStyle.red, custom_id="25"
),
manage_components.create_button(
label="30", style=ButtonStyle.red, custom_id="30"
),
manage_components.create_button(
label="35", style=ButtonStyle.red, custom_id="35"
),
manage_components.create_button(
label="40", style=ButtonStyle.red, custom_id="40"
),
]
times3 = [
manage_components.create_button(
label="45", style=ButtonStyle.red, custom_id="45"
),
manage_components.create_button(
label="50", style=ButtonStyle.red, custom_id="50"
),
manage_components.create_button(
label="55", style=ButtonStyle.red, custom_id="55"
),
manage_components.create_button(
label="60", style=ButtonStyle.red, custom_id="60"
),
]
times1_row = manage_components.create_actionrow(*times1)
times2_row = manage_components.create_actionrow(*times2)
times3_row = manage_components.create_actionrow(*times3)
await mute_btn_ctx.edit_origin(
content="Select the duration of the mute! (timeout: 180s)",
hidden=True,
components=[times1_row, times2_row, times3_row],
)
try:
times_ctx: ComponentContext = (
await manage_components.manage_components.wait_for_component(
self.bot,
components=[times1_row, times2_row, times3_row],
check=lambda msg: msg.author.id == mute_btn_ctx.author.id,
timeout=180,
)
)
await times_ctx.defer(edit_origin=True)
except asyncio.TimeoutError:
for i in range(4):
times1_row["components"][i]["disabled"] = True
times2_row["components"][i]["disabled"] = True
times3_row["components"][i]["disabled"] = True
await mute_btn_ctx.origin_message.edit(
content="Timed out.",
hidden=True,
components=[times1_row, times2_row, times3_row],
)
return
for i in range(4):
times1_row["components"][i]["disabled"] = True
times2_row["components"][i]["disabled"] = True
times3_row["components"][i]["disabled"] = True
dur = int(times_ctx.component_id)
await times_ctx.edit_origin(
content=f"{user.mention} is going to be muted for {dur} minutes",
hidden=False,
components=[times1_row, times2_row, times3_row],
)
await punishments.mute(ctx, user, dur, "m")
if mute_btn_ctx.component_id == "h":
times1 = [
manage_components.create_button(
style=ButtonStyle.red, label="1", custom_id="1"
),
manage_components.create_button(
style=ButtonStyle.red, label="2", custom_id="2"
),
manage_components.create_button(
style=ButtonStyle.red, label="3", custom_id="3"
),
manage_components.create_button(
style=ButtonStyle.red, label="4", custom_id="4"
),
manage_components.create_button(
style=ButtonStyle.red, label="5", custom_id="5"
),
]
times2 = [
manage_components.create_button(
style=ButtonStyle.red, label="6", custom_id="6"
),
manage_components.create_button(
style=ButtonStyle.red, label="7", custom_id="7"
),
manage_components.create_button(
style=ButtonStyle.red, label="8", custom_id="8"
),
manage_components.create_button(
style=ButtonStyle.red, label="9", custom_id="9"
),
manage_components.create_button(
style=ButtonStyle.red, label="10", custom_id="10"
),
]
times3 = [
manage_components.create_button(
style=ButtonStyle.red, label="11", custom_id="11"
),
manage_components.create_button(
style=ButtonStyle.red, label="12", custom_id="12"
),
manage_components.create_button(
style=ButtonStyle.red, label="13", custom_id="13"
),
manage_components.create_button(
style=ButtonStyle.red, label="14", custom_id="14"
),
manage_components.create_button(
style=ButtonStyle.red, label="15", custom_id="15"
),
]
times4 = [
manage_components.create_button(
style=ButtonStyle.red, label="16", custom_id="16"
),
manage_components.create_button(
style=ButtonStyle.red, label="17", custom_id="17"
),
manage_components.create_button(
style=ButtonStyle.red, label="18", custom_id="18"
),
manage_components.create_button(
style=ButtonStyle.red, label="19", custom_id="19"
),
manage_components.create_button(
style=ButtonStyle.red, label="20", custom_id="20"
),
]
times5 = [
manage_components.create_button(
style=ButtonStyle.red, label="21", custom_id="21"
),
manage_components.create_button(
style=ButtonStyle.red, label="22", custom_id="22"
),
manage_components.create_button(
style=ButtonStyle.red, label="23", custom_id="23"
),
manage_components.create_button(
style=ButtonStyle.red, label="24", custom_id="24"
),
]
times1_row = manage_components.create_actionrow(*times1)
times2_row = manage_components.create_actionrow(*times2)
times3_row = manage_components.create_actionrow(*times3)
times4_row = manage_components.create_actionrow(*times4)
times5_row = manage_components.create_actionrow(*times5)
await mute_btn_ctx.edit_origin(
content="Select the duration of the mute! (timeout: 180s)",
hidden=True,
components=[times1_row, times2_row, times3_row, times4_row, times5_row],
)
try:
times_ctx: ComponentContext = (
await manage_components.manage_components.wait_for_component(
self.bot,
components=[times1_row, times2_row, times3_row, times4_row, times5_row],
check=lambda msg: mute_btn_ctx.author.id == msg.author.id,
timeout=180,
)
)
await times_ctx.defer(edit_origin=True)
except asyncio.TimeoutError:
for i in range(5):
times1_row["components"][i]["disabled"] = True
times2_row["components"][i]["disabled"] = True
times3_row["components"][i]["disabled"] = True
times4_row["components"][i]["disabled"] = True
for i in range(4):
times5_row["components"][i]["disabled"] = True
await mute_btn_ctx.origin_message.edit(
content="Timed out.",
hidden=True,
components=[times1_row, times2_row, times3_row, times4_row, times5_row],
)
return
for i in range(5):
times1_row["components"][i]["disabled"] = True
times2_row["components"][i]["disabled"] = True
times3_row["components"][i]["disabled"] = True
times4_row["components"][i]["disabled"] = True
for i in range(4):
times5_row["components"][i]["disabled"] = True
dur = int(times_ctx.component_id)
await times_ctx.edit_origin(
content=f"{user.mention} is going to be muted for {dur} hours",
hidden=False,
components=[times1_row, times2_row, times3_row, times4_row, times5_row],
)
await punishments.mute(ctx, user, dur, "h")
if mute_btn_ctx.component_id == "d":
times1 = [
manage_components.create_button(
style=ButtonStyle.red, label="1", custom_id="1"
),
manage_components.create_button(
style=ButtonStyle.red, label="2", custom_id="2"
),
manage_components.create_button(
style=ButtonStyle.red, label="3", custom_id="3"
),
manage_components.create_button(
style=ButtonStyle.red, label="4", custom_id="4"
),
manage_components.create_button(
style=ButtonStyle.red, label="5", custom_id="5"
),
]
times2 = [
manage_components.create_button(
style=ButtonStyle.red, label="6", custom_id="6"
),
manage_components.create_button(
style=ButtonStyle.red, label="7", custom_id="7"
),
manage_components.create_button(
style=ButtonStyle.red, label="8", custom_id="8"
),
manage_components.create_button(
style=ButtonStyle.red, label="9", custom_id="9"
),
manage_components.create_button(
style=ButtonStyle.red, label="10", custom_id="10"
),
]
times3 = [
manage_components.create_button(
style=ButtonStyle.red, label="11", custom_id="11"
),
manage_components.create_button(
style=ButtonStyle.red, label="12", custom_id="12"
),
manage_components.create_button(
style=ButtonStyle.red, label="13", custom_id="13"
),
manage_components.create_button(
style=ButtonStyle.red, label="14", custom_id="14"
),
manage_components.create_button(
style=ButtonStyle.red, label="15", custom_id="15"
),
]
times4 = [
manage_components.create_button(
style=ButtonStyle.red, label="16", custom_id="16"
),
manage_components.create_button(
style=ButtonStyle.red, label="17", custom_id="17"
),
manage_components.create_button(
style=ButtonStyle.red, label="18", custom_id="18"
),
manage_components.create_button(
style=ButtonStyle.red, label="19", custom_id="19"
),
manage_components.create_button(
style=ButtonStyle.red, label="20", custom_id="20"
),
]
times5 = [
manage_components.create_button(
style=ButtonStyle.red, label="21", custom_id="21"
),
manage_components.create_button(
style=ButtonStyle.red, label="22", custom_id="22"
),
manage_components.create_button(
style=ButtonStyle.red, label="23", custom_id="23"
),
manage_components.create_button(
style=ButtonStyle.red, label="24", custom_id="24"
),
manage_components.create_button(
style=ButtonStyle.red, label="25", custom_id="25"
),
]
times1_row = manage_components.create_actionrow(*times1)
times2_row = manage_components.create_actionrow(*times2)
times3_row = manage_components.create_actionrow(*times3)
times4_row = manage_components.create_actionrow(*times4)
times5_row = manage_components.create_actionrow(*times5)
await mute_btn_ctx.edit_origin(
content="Select the duration of the mute! (timeout: 180s)",
hidden=True,
components=[times1_row, times2_row, times3_row, times4_row, times5_row],
)
try:
times_ctx: ComponentContext = (
await manage_components.manage_components.wait_for_component(
self.bot,
components=[times1_row, times2_row, times3_row, times4_row, times5_row],
check=lambda msg: mute_btn_ctx.author.id == msg.author.id,
timeout=180,
)
)
await times_ctx.defer(edit_origin=True)
except asyncio.TimeoutError:
for i in range(5):
times1_row["components"][i]["disabled"] = True
times2_row["components"][i]["disabled"] = True
times3_row["components"][i]["disabled"] = True
times4_row["components"][i]["disabled"] = True
times5_row["components"][i]["disabled"] = True
await mute_btn_ctx.origin_message.edit(
content="Timed out.",
hidden=True,
components=[times1_row, times2_row, times3_row, times4_row, times5_row],
)
return
for i in range(5):
times1_row["components"][i]["disabled"] = True
times2_row["components"][i]["disabled"] = True
times3_row["components"][i]["disabled"] = True
times4_row["components"][i]["disabled"] = True
times5_row["components"][i]["disabled"] = True
dur = int(times_ctx.component_id)
await times_ctx.edit_origin(
content=f"{user.mention} is going to be muted for {dur} days",
hidden=False,
components=[times1_row, times2_row, times3_row, times4_row, times5_row],
)
await punishments.mute(ctx, user, dur, "d")
if buttons.component_id == "warn":
await buttons.edit_origin(
content="please send a message with the reason of the warning! (timeout: 600s)",
components=[],
)
try:
a = await self.bot.wait_for(
"message", check=lambda msg: msg.author.id == buttons.author.id, timeout=600
)
except asyncio.TimeoutError:
await buttons.origin_message.edit("Timed out, process canceled.")
return
reason = str(a.content)
await a.delete()
await buttons.origin_message.delete()
await punishments.warn(ctx, user, reason)
if buttons.component_id == "kick":
await buttons.edit_origin(
content="please send a message with the reason of the kick! (timeout: 600s)",
components=[],
)
try:
a = await self.bot.wait_for(
"message", check=lambda msg: msg.author.id == buttons.author.id, timeout=600
)
except asyncio.TimeoutError:
await buttons.origin_message.edit("Timed out, process canceled.")
return
reason = str(a.content)
await a.delete()
await buttons.origin_message.delete()
await punishments.kick(ctx, user, reason)
if buttons.component_id == "ban":
await buttons.edit_origin(
content="please send a message with the reason of the ban! (timeout: 600s)",
components=[],
)
try:
a = await self.bot.wait_for(
"message", check=lambda msg: msg.author.id == buttons.author.id, timeout=600
)
except asyncio.TimeoutError:
await buttons.origin_message.edit("Timed out, process canceled.")
return
reason = str(a.content)
await a.delete()
await buttons.origin_message.delete()
await punishments.ban(ctx, user, reason)
if buttons.component_id == "nothing":
for i in range(2):
user_buttons_actionrow1["components"][i]["disabled"] = True
user_buttons_actionrow2["components"][i]["disabled"] = True
user_buttons_actionrow3["components"][0]["disabled"] = True
await buttons.edit_origin(
content="ok nothing will happen",
components=[
user_buttons_actionrow1,
user_buttons_actionrow2,
user_buttons_actionrow3,
],
)
@cog_ext.cog_subcommand(
base="server", subcommand_group="user", name="unban", description="unbans a user"
)
async def _unban(
self, ctx: SlashContext, user_id: str, reason: str
): # in theory discord-slash should automatically create options for that
user_id = int(user_id)
user: discord.User = await self.bot.fetch_user(user_id)
reason1 = f"User {ctx.author.name} used the unban command on {user.name}! \n Unban-Reason: {reason}"
await ctx.guild.unban(user=user, reason=reason1)
await ctx.send(f"unbanned {user.mention}!")
# @cog_ext.cog_subcommand(base="server", subcommand_group="user", name="un-punish", description="un-punishes a user") # TODO: work on this when mute and warn system done
radd_opt = [
{
"name": "user",
"description": "the user to add the role to",
"required": True,
"type": 6,
},
{
"name": "role",
"description": "the role to add",
"type": 8,
"required": True,
},
]
@cog_ext.cog_subcommand(
base="server",
subcommand_group="user",
name="add-role",
description="adds a role to a user",
options=radd_opt,
)
async def _role_add(self, ctx: SlashContext, user: discord.Member, role: discord.Role):
if not ctx.author.guild_permissions.manage_roles:
raise discord.ext.commands.MissingPermissions(missing_perms=["manage_roles"])
await user.add_roles(
role, reason=f"User {ctx.author.name} used the add-role command on {user.name}!"
)
await ctx.send("Done!")
rrem_opt = [
{
"name": "user",
"description": "the user to remove the role from",
"required": True,
"type": 6,
},
{
"name": "role",
"description": "the role to remove",
"type": 8,
"required": True,
},
]
@cog_ext.cog_subcommand(
base="server",
subcommand_group="user",
name="remove-role",
description="removes a role from a user",
options=rrem_opt,
)
async def _role_remove(self, ctx: SlashContext, user: discord.Member, role: discord.Role):
if not ctx.author.guild_permissions.manage_roles:
raise discord.ext.commands.MissingPermissions(missing_perms=["manage_roles"])
await user.remove_roles(
role, reason=f"User {ctx.author.name} used the remove-role command on {user.name}!"
)
await ctx.send("Done!")
rcre_opt = [
{
"name": "name",
"description": "the name of the role",
"required": True,
"type": 3,
},
{
"name": "color",
"description": "the colour of the role (hex code)",
"type": 3,
"required": True,
},
{
"name": "hoist",
"description": "whether the role should be shown separately in the member list, default False",
"required": False,
"type": 5,
},
{
"name": "mentionable",
"description": "whether everyone should be able to mention the role, default False",
"required": False,
"type": 5,
},
]
@cog_ext.cog_subcommand(
base="server",
subcommand_group="role",
name="create",
description="creates a role",
options=rcre_opt,
)
async def _create_role(
self,
ctx: SlashContext,
name: str,
color: str,
hoist: bool = False,
mentionable: bool = False,
):
if not ctx.author.guild_permissions.manage_roles:
raise discord.ext.commands.errors.MissingPermissions(missing_perms=["manage_roles"])
match = re.search(r"^#(?:[0-9a-fA-F]{3}){1,2}$", color) # check if color is hex
if not match:
raise discord.ext.commands.BadArgument("color is not a hex-color code")
hexval = color.lstrip("#")
rgbval = tuple(bytes.fromhex(hexval))
await ctx.defer(hidden=False)
roleperm = discord.Permissions().none()
color = discord.Colour.from_rgb(r=rgbval[0], g=rgbval[1], b=rgbval[2])
anypermbutton = [
manage_components.create_button(label="Yes", style=ButtonStyle.green, custom_id="yes"),
manage_components.create_button(label="No", style=ButtonStyle.red, custom_id="no"),
]
adminbutton = [
manage_components.create_button(label="Yes", style=ButtonStyle.green, custom_id="yes"),
manage_components.create_button(label="No", style=ButtonStyle.red, custom_id="no"),
]
any_ar = manage_components.create_actionrow(*anypermbutton)
adm_ar = manage_components.create_actionrow(*adminbutton)
ask = await ctx.send(
"Do you want to have any Permission enabled on your role?", components=[any_ar]
)
try:
answer: ComponentContext = await manage_components.wait_for_component(
self.bot,
components=[any_ar],
timeout=600,
check=lambda msg: ctx.author.id == msg.author.id,
)
await answer.defer(edit_origin=True)
except asyncio.TimeoutError:
for i in range(2):
any_ar["components"][i]["disabled"] = True
await ask.edit(content="Timed out.", components=[any_ar])
return
if answer.component_id == "no":
for i in range(2):
any_ar["components"][i]["disabled"] = True
await answer.edit_origin(
content=f"creating role '{name}' with the color #{hexval} and no permissions....",
components=[any_ar],
)
await ctx.guild.create_role(
name=name,
color=color,
permissions=roleperm,
hoist=hoist,
mentionable=mentionable,
reason=f"User {ctx.author.name} used the create-role command!",
)
await ctx.channel.send("Done")
return
else:
pass
await answer.edit_origin(
content="Do you want the role to have administrator-permissions?", components=[adm_ar]
)
try:
admin: ComponentContext = await manage_components.wait_for_component(
self.bot,
components=[adm_ar],
timeout=600,
check=lambda msg: msg.author.id == ctx.author.id,
)
await admin.defer(edit_origin=True)
except asyncio.TimeoutError:
for i in range(2):
adm_ar["components"][i]["disabled"] = True
await answer.origin_message.edit(content="Timed out.", components=[adm_ar])
return
if admin.component_id == "yes":
roleperm.administrator = True
for i in range(2):
adm_ar["components"][i]["disabled"] = True
await admin.edit_origin(
content=f"creating role '{name}' with the color #{hexval} and administrator permissions....",
components=[adm_ar],
)
await ctx.guild.create_role(
name=name,
color=color,
permissions=roleperm,
hoist=hoist,
mentionable=mentionable,
reason=f"User {ctx.author.name} used the create-role command!",
)
await ctx.channel.send("Done")
return
else:
try:
roleperm, sel2row, secondperms = await self._get_role_permissions(
ctx=admin, roleperm=roleperm
)
except ValueError: # on timeout no values returned, just do nothing then
return
sel2row["components"][0]["disabled"] = True
await secondperms.edit_origin(
content=f"Role '{name}' with color #{hexval} and your custom permissions, which have the value {roleperm.value}, is being created",
components=[sel2row],
),
await ctx.guild.create_role(
name=name,
color=color,
permissions=roleperm,
hoist=hoist,
mentionable=mentionable,
reason=f"User {ctx.author.name} used the create-role command!",
)
await ctx.channel.send("Done!")
redt_opt = [
{
"name": "role",
"description": "the role you want to edit",
"required": True,
"type": 8,
},
{
"name": "name",
"description": "the new name of the role",
"required": False,
"type": 3,
},
{
"name": "color",
"description": "the new colour of the role (hex code)",
"type": 3,
"required": False,
},
{
"name": "hoist",
"description": "whether the role should be shown separately in the member list",
"required": False,
"type": 5,
},
{
"name": "mentionable",
"description": "whether everyone should be able to mention the role",
"required": False,
"type": 5,
},
]
@cog_ext.cog_subcommand(
base="server",
subcommand_group="role",
name="edit",
description="edits a role",
options=redt_opt,
)
async def _role_edit(
self,
ctx: SlashContext,
role: discord.Role,
name: str = None,
color: str = None,
hoist: bool = None,
mentionable: bool = None,
):
if not ctx.author.guild_permissions.manage_roles:
raise discord.ext.commands.errors.MissingPermissions(missing_perms=["manage_roles"])
if not name:
name = role.name
if not hoist:
hoist = role.hoist
if not mentionable:
mentionable = role.mentionable
if color:
match = re.search(r"^#(?:[0-9a-fA-F]{3}){1,2}$", color) # check if color is hex
if not match:
raise discord.ext.commands.BadArgument("color is not a hex-color code")
await ctx.defer(hidden=False)
hexval = color.lstrip("#")
rgbval = tuple(bytes.fromhex(hexval))
color = discord.Colour.from_rgb(r=rgbval[0], g=rgbval[1], b=rgbval[2])
elif not color:
await ctx.defer(hidden=False)
color = role.colour
perm_edt = [
manage_components.create_button(
label="Yes",
style=ButtonStyle.green,
custom_id="yes",
),
manage_components.create_button(
label="No",
style=ButtonStyle.red,
custom_id="no",
),
]
perm_ar = manage_components.create_actionrow(*perm_edt)
adminbutton = [
manage_components.create_button(label="Yes", style=ButtonStyle.green, custom_id="yes"),
manage_components.create_button(label="No", style=ButtonStyle.red, custom_id="no"),
]
adm_ar = manage_components.create_actionrow(*adminbutton)
msg = await ctx.send(
"Do you want to edit the permissions of the role?", components=[perm_ar]
)
try:
perm: ComponentContext = await manage_components.wait_for_component(
self.bot,
components=[perm_ar],
timeout=600,
check=lambda m: m.author.id == ctx.author.id,
)
await perm.defer(edit_origin=True)
except asyncio.TimeoutError:
for i in range(2):
perm_ar["components"][i]["disabled"] = True
await msg.edit(content="Timed out.", components=[perm_ar])
return
if perm.custom_id == "no":
roleperm = role.permissions
for i in range(2):
perm_ar["components"][i]["disabled"] = True
await perm.edit_origin(
content="editing the role without changing permissions?", components=[perm_ar]
)
await role.edit(
permissions=roleperm,
name=name,
hoist=hoist,
mentionable=mentionable,
colour=color,
reason=f"User {ctx.author.name} used the role-edit command!",
)
await perm.origin_message.channel.send(content="Done")
return
else:
await perm.edit_origin(
content="Do you want the role to have administrator-permissions?",
components=[adm_ar],
)
try:
admin: ComponentContext = await manage_components.wait_for_component(
self.bot,
components=[adm_ar],
timeout=600,
check=lambda msg: msg.author.id == ctx.author.id,
)
await admin.defer(edit_origin=True)
except asyncio.TimeoutError:
for i in range(2):
adm_ar["components"][i]["disabled"] = True
await perm.origin_message.edit(content="Timed out.", components=[adm_ar])
return
if admin.component_id == "yes":
roleperm = discord.Permissions().none()
roleperm.administrator = True
for i in range(2):
adm_ar["components"][i]["disabled"] = True
await admin.edit_origin(
content="editing role to admin-permissions", components=[adm_ar]
)
await role.edit(
name=name,
color=color,
permissions=roleperm,
hoist=hoist,
mentionable=mentionable,
reason=f"User {ctx.author.name} used the role-edit command!",
)
await admin.origin_message.channel.send("Done")
return
else:
try:
roleperm = discord.Permissions().none()
roleperm, sel2row, secondperms = await self._get_role_permissions(
ctx=admin, roleperm=roleperm
)
sel2row["components"][0]["disabled"] = True
await secondperms.edit_origin(
components=[sel2row], content="Edting role to your input....."
)
await role.edit(
name=name,
hoist=hoist,
colour=color,
mentionable=mentionable,
permissions=roleperm,
reason=f"User {ctx.author.name} used the role-edit command!",
)
await secondperms.origin_message.channel.send(content="Done")
except ValueError: # on timeout no values returned, just do nothing then
return
rdel_opt = [
{
"name": "role",
"description": "the role to delete",
"required": True,
"type": 8,
}
]
@cog_ext.cog_subcommand(
base="server",
subcommand_group="role",
name="delete",
description="deletes a role",
options=rdel_opt,
)
async def _delete_role(self, ctx: SlashContext, role: discord.Role):
if not ctx.author.guild_permissions.manage_roles:
raise discord.ext.commands.MissingPermissions(missing_perms=["manage_roles"])
await role.delete(reason=f"User {ctx.author.name} used the role-delete command!")
await ctx.send("role has been deleted", hidden=True)
ch_cre_opt = [
{
"name": "channel_type",
"description": "Text or Voice Channel",
"required": True,
"type": 3,
"choices": [
{
"name": "Text-channel",
"value": "TextChannel",
},
{
"name": "Voice-channel",
"value": "VoiceChannel",
},
],
},
{
"name": "name",
"description": "the name of the channel",
"type": 3,
"required": True,
},
{
"name": "category",
"description": "the category to add the channel to",
"type": 7,
"required": True,
},
{
"name": "nsfw",
"description": "whether the channel is nsfw. Default False (Text-Channel only)",
"type": 5,
"required": False,
},
]
@cog_ext.cog_subcommand(
base="server",
subcommand_group="channel",
name="create",
description="creates a channel",
options=ch_cre_opt,
)
async def _channel_create(
self,
ctx: SlashContext,
channel_type: str,
name: str,
category: discord.CategoryChannel,
nsfw: bool = False,
):
if not ctx.author.guild_permissions.manage_channels:
raise discord.ext.commands.MissingPermissions(missing_perms=["manage_channels"])
await ctx.defer(hidden=True)
if channel_type == "TextChannel":
await ctx.guild.create_text_channel(
name=name,
category=category,
nsfw=nsfw,
reason=f"User {ctx.author.name} used the channel-create command!",
)
elif channel_type == "VoiceChannel":
await ctx.guild.create_voice_channel(
name=name,
category=category,
reason=f"User {ctx.author.name} used the channel-create command!",
)
await ctx.send("done", hidden=True)
ch_edt_opt = [
{
"name": "channel",
"description": "The channel to edit",
"type": 7,
"required": True,
},
{
"name": "name",
"description": "The new name of the channel",
"type": 3,
"required": False,
},
{
"name": "slowmode_delay",
"description": "set the slowmode for the channel (only for text channels)",
"type": 4,
"required": False,
},
{
"name": "max_user_count",
"description": "set the max user count for the channel (only for voice channels",
"type": 4,
"required": False,
},
{
"name": "nsfw",
"description": "whether the channel is nsfw. (Text-Channel only)",
"type": 5,
"required": False,
},
{
"name": "position",
"description": "the position of the channel",
"type": 4,
"required": False,
},
]
@cog_ext.cog_subcommand(
base="server",
subcommand_group="channel",
name="edit",
description="edits a channel",
options=ch_edt_opt,
)
async def _channel_edit(
self,
ctx: SlashContext,
channel: Union[discord.TextChannel, discord.VoiceChannel],
name: str = None,
slowmode_delay: int = None,
max_user_count: int = None,
nsfw: bool = None,
position: int = None,
):
if not ctx.author.guild_permissions.manage_channels:
raise discord.ext.commands.MissingPermissions(missing_perms=["manage_channels"])
await ctx.defer(hidden=True)
if isinstance(channel, discord.TextChannel):
await channel.edit(
name=name if name is not None else channel.name,
slowmode_delay=slowmode_delay
if slowmode_delay is not None
else channel.slowmode_delay,
nsfw=nsfw if nsfw is not None else channel.nsfw,
position=position if nsfw is not None else channel.position,
reason=f"User {ctx.author.name} used the channel-edit command!",
)
elif isinstance(channel, discord.VoiceChannel):
await channel.edit(
name=name if name is not None else channel.name,
user_limit=max_user_count if max_user_count is not None else channel.user_limit,
position=position if nsfw is not None else channel.position,
reason=f"User {ctx.author.name} used the channel-edit command!",
)
await ctx.send("done", hidden=True)
ch_perm_edt_opt = [
{
"name": "channel",
"description": "the channel update permissions on",
"type": 7,
"required": True,
},
{
"name": "role_or_user",
"description": "the role or user to give special permissions over a channel",
"type": 9,
"required": True,
},
]
@cog_ext.cog_subcommand(
base="server",
subcommand_group="channel",
name="permission_edit",
options=ch_perm_edt_opt,
description="edits the permissions a user or a role has in a specific channel",
)
async def _channel_permission_edit(
self,
ctx: SlashContext,
channel: Union[discord.TextChannel, discord.VoiceChannel],
role_or_user: Union[discord.Role, discord.Member],
):
if (
not ctx.author.guild_permissions.manage_channels
and not ctx.author.guild_permissions.manage_permissions
):
raise discord.ext.commands.MissingPermissions(
missing_perms=["manage_channels", "manage_permissions"]
)
else:
await ctx.defer(hidden=False)
if isinstance(channel, discord.TextChannel):
utils.LOGGER.debug(f"Test: {isinstance(role_or_user, str)} \n value: {role_or_user}")
perm_sel = manage_components.create_select(
min_values=1,
max_values=14,
placeholder=f"choose the permissions to give the {'user' if isinstance(role_or_user, discord.Member) else 'role'}",
options=[
manage_components.create_select_option(
label="view the channel",
value="view_channel",
),
manage_components.create_select_option(
label="manage the channel",
value="manage_channels",
),
manage_components.create_select_option(
label="manage channel permissions",
value="manage_permissions",
),
manage_components.create_select_option(
label="manage webhooks",
value="manage_webhooks",
),
manage_components.create_select_option(
label="create instant invite",
value="create_instant_invite",
),
manage_components.create_select_option(
label="send messages in the channel",
value="send_messages",
),
manage_components.create_select_option(
label="embed links in messages",
value="embed_links",
),
manage_components.create_select_option(
label="attach files to messages",
value="attach_files",
),
manage_components.create_select_option(
label="add reactions to messages",
value="add_reactions",
),
manage_components.create_select_option(
label="use external emojis in messages",
value="use_external_emojis",
),
manage_components.create_select_option(
label="mention @everyone",
value="mention_everyone",
),
manage_components.create_select_option(
label="manage messages",
value="manage_messages",
),
manage_components.create_select_option(
label="read message history",
value="read_message_history",
),
manage_components.create_select_option(
label="send tts messages",
value="send_tts_messages",
),
],
)
sel_ar = manage_components.create_actionrow(perm_sel)
msg = await ctx.send(
content=f"What Permissions do you want to give the {'user' if isinstance(role_or_user, discord.Member) else 'role'}\n"
f"all not selected permissions will be **denied**",
components=[sel_ar],
)
try:
perms: ComponentContext = await manage_components.wait_for_component(
self.bot,
components=[sel_ar],
timeout=600,
check=lambda p: p.author.id == ctx.author.id,
)
await perms.defer(edit_origin=True)
except asyncio.TimeoutError:
sel_ar["components"][0]["disabled"] = True
await msg.edit(content="Timed out.", components=[sel_ar])
return
perm = discord.PermissionOverwrite(
view_channel=True if "view_channel" in perms.selected_options else False,
manage_channels=True if "manage_channels" in perms.selected_options else False,
manage_permissions=True
if "manage_permissions" in perms.selected_options
else False,
manage_webhooks=True if "manage_webhooks" in perms.selected_options else False,
create_instant_invite=True
if "create_instant_invite" in perms.selected_options
else False,
send_messages=True if "send_messages" in perms.selected_options else False,
embed_links=True if "embed_links" in perms.selected_options else False,
attach_files=True if "attach_files" in perms.selected_options else False,
add_reactions=True if "add_reactions" in perms.selected_options else False,
use_external_emojis=True
if "use_external_emojis" in perms.selected_options
else False,
mention_everyone=True if "mention_everyone" in perms.selected_options else False,
manage_messages=True if "manage_messages" in perms.selected_options else False,
read_message_history=True
if "read_message_history" in perms.selected_options
else False,
send_tts_messages=True if "send_tts_messages" in perms.selected_options else False,
)
sel_ar["components"][0]["disabled"] = True
try:
await perms.edit_origin(
content="Editing channel permissions....", components=[sel_ar]
)
utils.LOGGER.debug(f"type: {type(role_or_user)}")
perm_overwrite = {role_or_user: perm}
await channel.edit(
overwrites=perm_overwrite,
reason=f"User {ctx.author.name} used the channel-permission-edit command!",
)
await perms.channel.send(
f"Done, channel '{channel.name} has been edited!", delete_after=180
)
except AttributeError:
error = traceback.format_exc()
utils.LOGGER.error(error)
elif isinstance(channel, discord.VoiceChannel):
perm_sel = manage_components.create_select(
min_values=1,
max_values=12,
placeholder=f"choose the permissions to give the {'user' if isinstance(role_or_user, discord.Member) else 'role'}",
options=[
manage_components.create_select_option(
label="view the channel",
value="view_channel",
),
manage_components.create_select_option(
label="manage the channel",
value="manage_channels",
),
manage_components.create_select_option(
label="manage channel permissions",
value="manage_permissions",
),
manage_components.create_select_option(
label="create instant invite",
value="create_instant_invite",
),
manage_components.create_select_option(
label="connect to the channel",
value="connect",
),
manage_components.create_select_option(
label="speak in the channel",
value="speak",
),
manage_components.create_select_option(
label="stream/camera",
value="stream",
),
manage_components.create_select_option(
label="use voice activation",
value="use_voice_activation",
),
manage_components.create_select_option(
label="very important speaker",
value="priority_speaker",
),
manage_components.create_select_option(
label="mute members",
value="mute_members",
),
manage_components.create_select_option(
label="deafen members",
value="deafen_members",
),
manage_components.create_select_option(
label="move members in another channel",
value="move_members",
),
],
)
sel_ar = manage_components.create_actionrow(perm_sel)
msg = await ctx.send(
content=f"What Permissions do you want to give the {'user' if isinstance(role_or_user, discord.Member) else 'role'}"
f"all not selected permissions will be **denied**",
components=[sel_ar],
)
try:
perms: ComponentContext = await manage_components.wait_for_component(
self.bot,
components=[sel_ar],
timeout=600,
check=lambda comp: comp.author.id == ctx.author.id,
)
await perms.defer(edit_origin=True)
except asyncio.TimeoutError:
sel_ar["components"][0]["disabled"] = True
await msg.edit(content="Timed out.", components=[sel_ar])
return
perm = discord.PermissionOverwrite(
view_channel=True if "view_channel" in perms.selected_options else False,
manage_channels=True if "manage_channels" in perms.selected_options else False,
manage_permissions=True
if "manage_permissions" in perms.selected_options
else False,
create_instant_invite=True
if "create_instant_invite" in perms.selected_options
else False,
connect=True if "connect" in perms.selected_options else False,
speak=True if "speak" in perms.selected_options else False,
stream=True if "stream" in perms.selected_options else False,
use_voice_activation=True
if "use_voice_activation" in perms.selected_options
else False,
priority_speaker=True if "priority_speaker" in perms.selected_options else False,
mute_members=True if "mute_members" in perms.selected_options else False,
deafen_members=True if "deafen_members" in perms.selected_options else False,
move_members=True if "move_members" in perms.selected_options else False,
)
sel_ar["components"][0]["disabled"] = True
await perms.edit_origin(content="Editing channel permissions....", components=[sel_ar])
perm_overwrite = {
role_or_user: perm,
}
await channel.edit(
overwrites=perm_overwrite,
reason=f"User {ctx.author.name} used the channel-permission-edit command!",
)
ch_del_opt = [
{
"name": "channel",
"description": "The channel to edit",
"type": 7,
"required": True,
},
]
@cog_ext.cog_subcommand(
base="server",
subcommand_group="channel",
name="delete",
description="deletes a channel",
options=ch_del_opt,
)
async def _channel_delete(
self, ctx: SlashContext, channel: Union[discord.TextChannel, discord.VoiceChannel]
):
if not ctx.author.guild_permissions.manage_channels:
raise discord.ext.commands.MissingPermissions(missing_perms=["manage_channels"])
await channel.delete(reason=f"User {ctx.author.name} used the channel-delete command!")
await ctx.send("done", hidden=True)
cat_cre_opt = [
{
"name": "name",
"description": "the name of the category",
"type": 3,
"required": True,
},
{
"name": "position",
"description": "the position of the category",
"type": 4,
"required": False,
},
]
@cog_ext.cog_subcommand(
base="server",
subcommand_group="category",
name="create",
description="creates a category",
options=cat_cre_opt,
)
async def _category_create(self, ctx: SlashContext, name: str, position: int = None):
if not ctx.author.guild_permissions.manage_channels:
raise discord.ext.commands.MissingPermissions(missing_perms=["manage_channels"])
else:
await ctx.guild.create_category(
name=name,
position=position + 1 if position is not None else None,
reason=f"User {ctx.author.name} used the category-create command!",
)
await ctx.send("done!", hidden=True)
cat_edt_opt = [
{
"name": "category",
"description": "The category to delete",
"type": 7,
"required": True,
},
{
"name": "name",
"description": "the new name of the category",
"type": 3,
"required": False,
},
{
"name": "position",
"description": "the new position of the category",
"type": 4,
"required": False,
},
{
"name": "role_or_user",
"description": "if you specify this, you will be able to edit the permissions for a role or a user in that category",
"type": 9,
"required": False,
},
]
@cog_ext.cog_subcommand(
base="server",
subcommand_group="category",
name="edit",
description="edits a category",
options=cat_edt_opt,
)
async def _category_edit(
self,
ctx: SlashContext,
category: discord.CategoryChannel,
name: str = None,
position: int = None,
role_or_user: Union[discord.Member, discord.Role] = None,
):
if not ctx.author.guild_permissions.manage_channels:
raise discord.ext.commands.MissingPermissions(missing_perms=["manage_channels"])
else:
await ctx.defer(hidden=True)
if not role_or_user:
await category.edit(
reason=f"User {ctx.author.name} used the category-edit command!",
position=position + 1 if position is not None else category.position,
name=name if name is not None else category.name,
)
await ctx.send("done", hidden=True)
else:
text_perm_sel = manage_components.create_select(
min_values=1,
max_values=14,
placeholder=f"set the category text channel perms for the {'user' if isinstance(role_or_user, discord.Member) else 'role'}",
options=[
manage_components.create_select_option(
label="view the channel",
value="view_channel",
),
manage_components.create_select_option(
label="manage the channel",
value="manage_channels",
),
manage_components.create_select_option(
label="manage channel permissions",
value="manage_permissions",
),
manage_components.create_select_option(
label="manage webhooks",
value="manage_webhooks",
),
manage_components.create_select_option(
label="create instant invite",
value="create_instant_invite",
),
manage_components.create_select_option(
label="send messages in the channel",
value="send_messages",
),
manage_components.create_select_option(
label="embed links in messages",
value="embed_links",
),
manage_components.create_select_option(
label="attach files to messages",
value="attach_files",
),
manage_components.create_select_option(
label="add reactions to messages",
value="add_reactions",
),
manage_components.create_select_option(
label="use external emojis in messages",
value="use_external_emojis",
),
manage_components.create_select_option(
label="mention @everyone",
value="mention_everyone",
),
manage_components.create_select_option(
label="manage messages",
value="manage_messages",
),
manage_components.create_select_option(
label="read message history",
value="read_message_history",
),
manage_components.create_select_option(
label="send tts messages",
value="send_tts_messages",
),
],
)
text_sel_ar = manage_components.create_actionrow(text_perm_sel)
msg = await ctx.send(
content=f"What Permissions do you want to give the {'user' if isinstance(role_or_user, discord.Member) else 'role'} in the category?"
f"all not selected permissions will be **denied**",
components=[text_sel_ar],
)
try:
text_perms: ComponentContext = await manage_components.wait_for_component(
self.bot,
components=[text_sel_ar],
timeout=600,
check=lambda comp: comp.author.id == ctx.author.id,
)
await text_perms.defer(edit_origin=True)
except asyncio.TimeoutError:
text_sel_ar["components"][0]["disabled"] = True
await msg.edit(content="Timed out.", components=[text_sel_ar])
return
voice_perm_sel = manage_components.create_select(
min_values=1,
max_values=8,
placeholder=f"set the category voice channel perms for the {'user' if isinstance(role_or_user, discord.Member) else 'role'}",
options=[
manage_components.create_select_option(
label="connect to the channel",
value="connect",
),
manage_components.create_select_option(
label="speak in the channel",
value="speak",
),
manage_components.create_select_option(
label="stream/camera",
value="stream",
),
manage_components.create_select_option(
label="use voice activation",
value="use_voice_activation",
),
manage_components.create_select_option(
label="very important speaker",
value="priority_speaker",
),
manage_components.create_select_option(
label="mute members",
value="mute_members",
),
manage_components.create_select_option(
label="deafen members",
value="deafen_members",
),
manage_components.create_select_option(
label="move members in another channel",
value="move_members",
),
],
)
voice_sel_ar = manage_components.create_actionrow(voice_perm_sel)
await text_perms.edit_origin(
content=f"What Permissions do you want to give the {'user' if isinstance(role_or_user, discord.Member) else 'role'} in the category?"
f"all not selected permissions will be **denied**",
components=[voice_sel_ar],
)
try:
voice_perms: ComponentContext = await manage_components.wait_for_component(
self.bot,
components=[voice_sel_ar],
timeout=600,
check=lambda comp: comp.author.id == ctx.author.id,
)
await voice_perms.defer(edit_origin=True)
except asyncio.TimeoutError:
voice_sel_ar["components"][0]["disabled"] = True
await text_perms.origin_message.edit(
content="Timed out.", components=[voice_sel_ar]
)
return
allperms = discord.PermissionOverwrite(
view_channel=True if "view_channel" in text_perms.selected_options else False,
manage_channels=True
if "manage_channels" in text_perms.selected_options
else False,
manage_permissions=True
if "manage_permissions" in text_perms.selected_options
else False,
manage_webhooks=True
if "manage_webhooks" in text_perms.selected_options
else False,
create_instant_invite=True
if "create_instant_invite" in text_perms.selected_options
else False,
send_messages=True if "send_messages" in text_perms.selected_options else False,
embed_links=True if "embed_links" in text_perms.selected_options else False,
attach_files=True if "attach_files" in text_perms.selected_options else False,
add_reactions=True if "add_reactions" in text_perms.selected_options else False,
use_external_emojis=True
if "use_external_emojis" in text_perms.selected_options
else False,
mention_everyone=True
if "mention_everyone" in text_perms.selected_options
else False,
manage_messages=True
if "manage_messages" in text_perms.selected_options
else False,
read_message_history=True
if "read_message_history" in text_perms.selected_options
else False,
send_tts_messages=True
if "send_tts_messages" in text_perms.selected_options
else False,
connect=True if "connect" in voice_perms.selected_options else False,
speak=True if "speak" in voice_perms.selected_options else False,
stream=True if "stream" in voice_perms.selected_options else False,
use_voice_activation=True
if "use_voice_activation" in voice_perms.selected_options
else False,
priority_speaker=True
if "priority_speaker" in voice_perms.selected_options
else False,
mute_members=True if "mute_members" in voice_perms.selected_options else False,
deafen_members=True
if "deafen_members" in voice_perms.selected_options
else False,
move_members=True if "move_members" in voice_perms.selected_options else False,
)
voice_sel_ar["components"][0]["disabled"] = True
all_ovwerite = {role_or_user: allperms}
await category.edit(
reason=f"User {ctx.author.name} used the category-edit command!",
position=position + 1 if position is not None else category.position,
name=name if name is not None else category.name,
overwrites=all_ovwerite,
)
await voice_perms.edit_origin(content="Done!", components=[voice_sel_ar])
cat_del_opt = [
{
"name": "category",
"description": "The category to delete",
"type": 7,
"required": True,
},
]
@cog_ext.cog_subcommand(
base="server",
subcommand_group="category",
name="delete",
description="deletes a category",
options=cat_del_opt,
)
async def _cat_delete(self, ctx: SlashContext, category: discord.CategoryChannel):
if not ctx.author.guild_permissions.manage_channels:
raise discord.ext.commands.MissingPermissions(missing_perms=["manage_channels"])
else:
await category.delete(
reason=f"User {ctx.author.name} used the category-delete command!"
)
await ctx.send("done!", hidden=True)
def setup(bot: AutoShardedBot):
bot.add_cog(ServerUtils(bot))
|
[
"discord.ext.commands.MissingPermissions",
"utils.utils.LOGGER.debug",
"discord_slash.utils.manage_components.create_select_option",
"discord_slash.utils.manage_components.manage_components.wait_for_component",
"discord_slash.utils.manage_components.create_actionrow",
"discord.Colour.from_rgb",
"traceback.format_exc",
"discord.PermissionOverwrite",
"discord_slash.utils.manage_components.create_button",
"re.search",
"discord.ext.commands.errors.MissingPermissions",
"utils.punishments.kick",
"utils.punishments.ban",
"utils.punishments.warn",
"discord_slash.utils.manage_components.wait_for_component",
"discord.ext.commands.BadArgument",
"discord.Permissions",
"utils.punishments.mute",
"utils.utils.LOGGER.error",
"discord_slash.cog_ext.cog_subcommand"
] |
[((12847, 12977), 'discord_slash.cog_ext.cog_subcommand', 'cog_ext.cog_subcommand', ([], {'base': '"""server"""', 'subcommand_group': '"""user"""', 'name': '"""punish"""', 'description': '"""punishes a user"""', 'options': 'pun_opt'}), "(base='server', subcommand_group='user', name=\n 'punish', description='punishes a user', options=pun_opt)\n", (12869, 12977), False, 'from discord_slash import ButtonStyle, ComponentContext, SlashContext, cog_ext\n'), ((37852, 37961), 'discord_slash.cog_ext.cog_subcommand', 'cog_ext.cog_subcommand', ([], {'base': '"""server"""', 'subcommand_group': '"""user"""', 'name': '"""unban"""', 'description': '"""unbans a user"""'}), "(base='server', subcommand_group='user', name='unban',\n description='unbans a user')\n", (37874, 37961), False, 'from discord_slash import ButtonStyle, ComponentContext, SlashContext, cog_ext\n'), ((38960, 39099), 'discord_slash.cog_ext.cog_subcommand', 'cog_ext.cog_subcommand', ([], {'base': '"""server"""', 'subcommand_group': '"""user"""', 'name': '"""add-role"""', 'description': '"""adds a role to a user"""', 'options': 'radd_opt'}), "(base='server', subcommand_group='user', name=\n 'add-role', description='adds a role to a user', options=radd_opt)\n", (38982, 39099), False, 'from discord_slash import ButtonStyle, ComponentContext, SlashContext, cog_ext\n'), ((39893, 40040), 'discord_slash.cog_ext.cog_subcommand', 'cog_ext.cog_subcommand', ([], {'base': '"""server"""', 'subcommand_group': '"""user"""', 'name': '"""remove-role"""', 'description': '"""removes a role from a user"""', 'options': 'rrem_opt'}), "(base='server', subcommand_group='user', name=\n 'remove-role', description='removes a role from a user', options=rrem_opt)\n", (39915, 40040), False, 'from discord_slash import ButtonStyle, ComponentContext, SlashContext, cog_ext\n'), ((41266, 41396), 'discord_slash.cog_ext.cog_subcommand', 'cog_ext.cog_subcommand', ([], {'base': '"""server"""', 'subcommand_group': '"""role"""', 'name': '"""create"""', 'description': '"""creates a role"""', 'options': 'rcre_opt'}), "(base='server', subcommand_group='role', name=\n 'create', description='creates a role', options=rcre_opt)\n", (41288, 41396), False, 'from discord_slash import ButtonStyle, ComponentContext, SlashContext, cog_ext\n'), ((47404, 47529), 'discord_slash.cog_ext.cog_subcommand', 'cog_ext.cog_subcommand', ([], {'base': '"""server"""', 'subcommand_group': '"""role"""', 'name': '"""edit"""', 'description': '"""edits a role"""', 'options': 'redt_opt'}), "(base='server', subcommand_group='role', name='edit',\n description='edits a role', options=redt_opt)\n", (47426, 47529), False, 'from discord_slash import ButtonStyle, ComponentContext, SlashContext, cog_ext\n'), ((53512, 53642), 'discord_slash.cog_ext.cog_subcommand', 'cog_ext.cog_subcommand', ([], {'base': '"""server"""', 'subcommand_group': '"""role"""', 'name': '"""delete"""', 'description': '"""deletes a role"""', 'options': 'rdel_opt'}), "(base='server', subcommand_group='role', name=\n 'delete', description='deletes a role', options=rdel_opt)\n", (53534, 53642), False, 'from discord_slash import ButtonStyle, ComponentContext, SlashContext, cog_ext\n'), ((55066, 55204), 'discord_slash.cog_ext.cog_subcommand', 'cog_ext.cog_subcommand', ([], {'base': '"""server"""', 'subcommand_group': '"""channel"""', 'name': '"""create"""', 'description': '"""creates a channel"""', 'options': 'ch_cre_opt'}), "(base='server', subcommand_group='channel', name=\n 'create', description='creates a channel', options=ch_cre_opt)\n", (55088, 55204), False, 'from discord_slash import ButtonStyle, ComponentContext, SlashContext, cog_ext\n'), ((57313, 57447), 'discord_slash.cog_ext.cog_subcommand', 'cog_ext.cog_subcommand', ([], {'base': '"""server"""', 'subcommand_group': '"""channel"""', 'name': '"""edit"""', 'description': '"""edits a channel"""', 'options': 'ch_edt_opt'}), "(base='server', subcommand_group='channel', name=\n 'edit', description='edits a channel', options=ch_edt_opt)\n", (57335, 57447), False, 'from discord_slash import ButtonStyle, ComponentContext, SlashContext, cog_ext\n'), ((59376, 59580), 'discord_slash.cog_ext.cog_subcommand', 'cog_ext.cog_subcommand', ([], {'base': '"""server"""', 'subcommand_group': '"""channel"""', 'name': '"""permission_edit"""', 'options': 'ch_perm_edt_opt', 'description': '"""edits the permissions a user or a role has in a specific channel"""'}), "(base='server', subcommand_group='channel', name=\n 'permission_edit', options=ch_perm_edt_opt, description=\n 'edits the permissions a user or a role has in a specific channel')\n", (59398, 59580), False, 'from discord_slash import ButtonStyle, ComponentContext, SlashContext, cog_ext\n'), ((71871, 72009), 'discord_slash.cog_ext.cog_subcommand', 'cog_ext.cog_subcommand', ([], {'base': '"""server"""', 'subcommand_group': '"""channel"""', 'name': '"""delete"""', 'description': '"""deletes a channel"""', 'options': 'ch_del_opt'}), "(base='server', subcommand_group='channel', name=\n 'delete', description='deletes a channel', options=ch_del_opt)\n", (71893, 72009), False, 'from discord_slash import ButtonStyle, ComponentContext, SlashContext, cog_ext\n'), ((72831, 72972), 'discord_slash.cog_ext.cog_subcommand', 'cog_ext.cog_subcommand', ([], {'base': '"""server"""', 'subcommand_group': '"""category"""', 'name': '"""create"""', 'description': '"""creates a category"""', 'options': 'cat_cre_opt'}), "(base='server', subcommand_group='category', name=\n 'create', description='creates a category', options=cat_cre_opt)\n", (72853, 72972), False, 'from discord_slash import ButtonStyle, ComponentContext, SlashContext, cog_ext\n'), ((74331, 74468), 'discord_slash.cog_ext.cog_subcommand', 'cog_ext.cog_subcommand', ([], {'base': '"""server"""', 'subcommand_group': '"""category"""', 'name': '"""edit"""', 'description': '"""edits a category"""', 'options': 'cat_edt_opt'}), "(base='server', subcommand_group='category', name=\n 'edit', description='edits a category', options=cat_edt_opt)\n", (74353, 74468), False, 'from discord_slash import ButtonStyle, ComponentContext, SlashContext, cog_ext\n'), ((86226, 86367), 'discord_slash.cog_ext.cog_subcommand', 'cog_ext.cog_subcommand', ([], {'base': '"""server"""', 'subcommand_group': '"""category"""', 'name': '"""delete"""', 'description': '"""deletes a category"""', 'options': 'cat_del_opt'}), "(base='server', subcommand_group='category', name=\n 'delete', description='deletes a category', options=cat_del_opt)\n", (86248, 86367), False, 'from discord_slash import ButtonStyle, ComponentContext, SlashContext, cog_ext\n'), ((543, 615), 'utils.utils.LOGGER.debug', 'utils.LOGGER.debug', (['f"""Successfully loaded cog {self.__class__.__name__}"""'], {}), "(f'Successfully loaded cog {self.__class__.__name__}')\n", (561, 615), False, 'from utils import punishments, utils\n'), ((4005, 4044), 'discord_slash.utils.manage_components.create_actionrow', 'manage_components.create_actionrow', (['sel'], {}), '(sel)\n', (4039, 4044), False, 'from discord_slash.utils import manage_components\n'), ((9978, 10018), 'discord_slash.utils.manage_components.create_actionrow', 'manage_components.create_actionrow', (['sel2'], {}), '(sel2)\n', (10012, 10018), False, 'from discord_slash.utils import manage_components\n'), ((13946, 13995), 'discord_slash.utils.manage_components.create_actionrow', 'manage_components.create_actionrow', (['*user_setbtn1'], {}), '(*user_setbtn1)\n', (13980, 13995), False, 'from discord_slash.utils import manage_components\n'), ((14030, 14079), 'discord_slash.utils.manage_components.create_actionrow', 'manage_components.create_actionrow', (['*user_setbtn2'], {}), '(*user_setbtn2)\n', (14064, 14079), False, 'from discord_slash.utils import manage_components\n'), ((14114, 14163), 'discord_slash.utils.manage_components.create_actionrow', 'manage_components.create_actionrow', (['*user_setbtn3'], {}), '(*user_setbtn3)\n', (14148, 14163), False, 'from discord_slash.utils import manage_components\n'), ((41789, 41835), 're.search', 're.search', (['"""^#(?:[0-9a-fA-F]{3}){1,2}$"""', 'color'], {}), "('^#(?:[0-9a-fA-F]{3}){1,2}$', color)\n", (41798, 41835), False, 'import re\n'), ((42152, 42214), 'discord.Colour.from_rgb', 'discord.Colour.from_rgb', ([], {'r': 'rgbval[0]', 'g': 'rgbval[1]', 'b': 'rgbval[2]'}), '(r=rgbval[0], g=rgbval[1], b=rgbval[2])\n', (42175, 42214), False, 'import discord\n'), ((42696, 42746), 'discord_slash.utils.manage_components.create_actionrow', 'manage_components.create_actionrow', (['*anypermbutton'], {}), '(*anypermbutton)\n', (42730, 42746), False, 'from discord_slash.utils import manage_components\n'), ((42764, 42812), 'discord_slash.utils.manage_components.create_actionrow', 'manage_components.create_actionrow', (['*adminbutton'], {}), '(*adminbutton)\n', (42798, 42812), False, 'from discord_slash.utils import manage_components\n'), ((49035, 49080), 'discord_slash.utils.manage_components.create_actionrow', 'manage_components.create_actionrow', (['*perm_edt'], {}), '(*perm_edt)\n', (49069, 49080), False, 'from discord_slash.utils import manage_components\n'), ((49328, 49376), 'discord_slash.utils.manage_components.create_actionrow', 'manage_components.create_actionrow', (['*adminbutton'], {}), '(*adminbutton)\n', (49362, 49376), False, 'from discord_slash.utils import manage_components\n'), ((13305, 13393), 'discord_slash.utils.manage_components.create_button', 'manage_components.create_button', ([], {'label': '"""BAN"""', 'style': 'ButtonStyle.red', 'custom_id': '"""ban"""'}), "(label='BAN', style=ButtonStyle.red,\n custom_id='ban')\n", (13336, 13393), False, 'from discord_slash.utils import manage_components\n'), ((13403, 13493), 'discord_slash.utils.manage_components.create_button', 'manage_components.create_button', ([], {'label': '"""KICK"""', 'style': 'ButtonStyle.red', 'custom_id': '"""kick"""'}), "(label='KICK', style=ButtonStyle.red,\n custom_id='kick')\n", (13434, 13493), False, 'from discord_slash.utils import manage_components\n'), ((13538, 13629), 'discord_slash.utils.manage_components.create_button', 'manage_components.create_button', ([], {'label': '"""WARN"""', 'style': 'ButtonStyle.blue', 'custom_id': '"""warn"""'}), "(label='WARN', style=ButtonStyle.blue,\n custom_id='warn')\n", (13569, 13629), False, 'from discord_slash.utils import manage_components\n'), ((13639, 13730), 'discord_slash.utils.manage_components.create_button', 'manage_components.create_button', ([], {'label': '"""MUTE"""', 'style': 'ButtonStyle.blue', 'custom_id': '"""mute"""'}), "(label='MUTE', style=ButtonStyle.blue,\n custom_id='mute')\n", (13670, 13730), False, 'from discord_slash.utils import manage_components\n'), ((13775, 13875), 'discord_slash.utils.manage_components.create_button', 'manage_components.create_button', ([], {'label': '"""Do nothing"""', 'style': 'ButtonStyle.gray', 'custom_id': '"""nothing"""'}), "(label='Do nothing', style=ButtonStyle.gray,\n custom_id='nothing')\n", (13806, 13875), False, 'from discord_slash.utils import manage_components\n'), ((14391, 14500), 'discord.ext.commands.MissingPermissions', 'discord.ext.commands.MissingPermissions', ([], {'missing_perms': "['manage_messages', 'ban_members', 'kick_members']"}), "(missing_perms=['manage_messages',\n 'ban_members', 'kick_members'])\n", (14430, 14500), False, 'import discord\n'), ((16824, 16869), 'discord_slash.utils.manage_components.create_actionrow', 'manage_components.create_actionrow', (['*mute_btn'], {}), '(*mute_btn)\n', (16858, 16869), False, 'from discord_slash.utils import manage_components\n'), ((39310, 39381), 'discord.ext.commands.MissingPermissions', 'discord.ext.commands.MissingPermissions', ([], {'missing_perms': "['manage_roles']"}), "(missing_perms=['manage_roles'])\n", (39349, 39381), False, 'import discord\n'), ((40254, 40325), 'discord.ext.commands.MissingPermissions', 'discord.ext.commands.MissingPermissions', ([], {'missing_perms': "['manage_roles']"}), "(missing_perms=['manage_roles'])\n", (40293, 40325), False, 'import discord\n'), ((41694, 41772), 'discord.ext.commands.errors.MissingPermissions', 'discord.ext.commands.errors.MissingPermissions', ([], {'missing_perms': "['manage_roles']"}), "(missing_perms=['manage_roles'])\n", (41740, 41772), False, 'import discord\n'), ((41902, 41967), 'discord.ext.commands.BadArgument', 'discord.ext.commands.BadArgument', (['"""color is not a hex-color code"""'], {}), "('color is not a hex-color code')\n", (41934, 41967), False, 'import discord\n'), ((42254, 42344), 'discord_slash.utils.manage_components.create_button', 'manage_components.create_button', ([], {'label': '"""Yes"""', 'style': 'ButtonStyle.green', 'custom_id': '"""yes"""'}), "(label='Yes', style=ButtonStyle.green,\n custom_id='yes')\n", (42285, 42344), False, 'from discord_slash.utils import manage_components\n'), ((42354, 42440), 'discord_slash.utils.manage_components.create_button', 'manage_components.create_button', ([], {'label': '"""No"""', 'style': 'ButtonStyle.red', 'custom_id': '"""no"""'}), "(label='No', style=ButtonStyle.red,\n custom_id='no')\n", (42385, 42440), False, 'from discord_slash.utils import manage_components\n'), ((42485, 42575), 'discord_slash.utils.manage_components.create_button', 'manage_components.create_button', ([], {'label': '"""Yes"""', 'style': 'ButtonStyle.green', 'custom_id': '"""yes"""'}), "(label='Yes', style=ButtonStyle.green,\n custom_id='yes')\n", (42516, 42575), False, 'from discord_slash.utils import manage_components\n'), ((42585, 42671), 'discord_slash.utils.manage_components.create_button', 'manage_components.create_button', ([], {'label': '"""No"""', 'style': 'ButtonStyle.red', 'custom_id': '"""no"""'}), "(label='No', style=ButtonStyle.red,\n custom_id='no')\n", (42616, 42671), False, 'from discord_slash.utils import manage_components\n'), ((47867, 47945), 'discord.ext.commands.errors.MissingPermissions', 'discord.ext.commands.errors.MissingPermissions', ([], {'missing_perms': "['manage_roles']"}), "(missing_perms=['manage_roles'])\n", (47913, 47945), False, 'import discord\n'), ((48162, 48208), 're.search', 're.search', (['"""^#(?:[0-9a-fA-F]{3}){1,2}$"""', 'color'], {}), "('^#(?:[0-9a-fA-F]{3}){1,2}$', color)\n", (48171, 48208), False, 'import re\n'), ((48500, 48562), 'discord.Colour.from_rgb', 'discord.Colour.from_rgb', ([], {'r': 'rgbval[0]', 'g': 'rgbval[1]', 'b': 'rgbval[2]'}), '(r=rgbval[0], g=rgbval[1], b=rgbval[2])\n', (48523, 48562), False, 'import discord\n'), ((48696, 48786), 'discord_slash.utils.manage_components.create_button', 'manage_components.create_button', ([], {'label': '"""Yes"""', 'style': 'ButtonStyle.green', 'custom_id': '"""yes"""'}), "(label='Yes', style=ButtonStyle.green,\n custom_id='yes')\n", (48727, 48786), False, 'from discord_slash.utils import manage_components\n'), ((48859, 48945), 'discord_slash.utils.manage_components.create_button', 'manage_components.create_button', ([], {'label': '"""No"""', 'style': 'ButtonStyle.red', 'custom_id': '"""no"""'}), "(label='No', style=ButtonStyle.red,\n custom_id='no')\n", (48890, 48945), False, 'from discord_slash.utils import manage_components\n'), ((49117, 49207), 'discord_slash.utils.manage_components.create_button', 'manage_components.create_button', ([], {'label': '"""Yes"""', 'style': 'ButtonStyle.green', 'custom_id': '"""yes"""'}), "(label='Yes', style=ButtonStyle.green,\n custom_id='yes')\n", (49148, 49207), False, 'from discord_slash.utils import manage_components\n'), ((49217, 49303), 'discord_slash.utils.manage_components.create_button', 'manage_components.create_button', ([], {'label': '"""No"""', 'style': 'ButtonStyle.red', 'custom_id': '"""no"""'}), "(label='No', style=ButtonStyle.red,\n custom_id='no')\n", (49248, 49303), False, 'from discord_slash.utils import manage_components\n'), ((53834, 53905), 'discord.ext.commands.MissingPermissions', 'discord.ext.commands.MissingPermissions', ([], {'missing_perms': "['manage_roles']"}), "(missing_perms=['manage_roles'])\n", (53873, 53905), False, 'import discord\n'), ((55522, 55596), 'discord.ext.commands.MissingPermissions', 'discord.ext.commands.MissingPermissions', ([], {'missing_perms': "['manage_channels']"}), "(missing_perms=['manage_channels'])\n", (55561, 55596), False, 'import discord\n'), ((57868, 57942), 'discord.ext.commands.MissingPermissions', 'discord.ext.commands.MissingPermissions', ([], {'missing_perms': "['manage_channels']"}), "(missing_perms=['manage_channels'])\n", (57907, 57942), False, 'import discord\n'), ((60003, 60103), 'discord.ext.commands.MissingPermissions', 'discord.ext.commands.MissingPermissions', ([], {'missing_perms': "['manage_channels', 'manage_permissions']"}), "(missing_perms=['manage_channels',\n 'manage_permissions'])\n", (60042, 60103), False, 'import discord\n'), ((63295, 63339), 'discord_slash.utils.manage_components.create_actionrow', 'manage_components.create_actionrow', (['perm_sel'], {}), '(perm_sel)\n', (63329, 63339), False, 'from discord_slash.utils import manage_components\n'), ((64189, 65426), 'discord.PermissionOverwrite', 'discord.PermissionOverwrite', ([], {'view_channel': "(True if 'view_channel' in perms.selected_options else False)", 'manage_channels': "(True if 'manage_channels' in perms.selected_options else False)", 'manage_permissions': "(True if 'manage_permissions' in perms.selected_options else False)", 'manage_webhooks': "(True if 'manage_webhooks' in perms.selected_options else False)", 'create_instant_invite': "(True if 'create_instant_invite' in perms.selected_options else False)", 'send_messages': "(True if 'send_messages' in perms.selected_options else False)", 'embed_links': "(True if 'embed_links' in perms.selected_options else False)", 'attach_files': "(True if 'attach_files' in perms.selected_options else False)", 'add_reactions': "(True if 'add_reactions' in perms.selected_options else False)", 'use_external_emojis': "(True if 'use_external_emojis' in perms.selected_options else False)", 'mention_everyone': "(True if 'mention_everyone' in perms.selected_options else False)", 'manage_messages': "(True if 'manage_messages' in perms.selected_options else False)", 'read_message_history': "(True if 'read_message_history' in perms.selected_options else False)", 'send_tts_messages': "(True if 'send_tts_messages' in perms.selected_options else False)"}), "(view_channel=True if 'view_channel' in perms.\n selected_options else False, manage_channels=True if 'manage_channels' in\n perms.selected_options else False, manage_permissions=True if \n 'manage_permissions' in perms.selected_options else False,\n manage_webhooks=True if 'manage_webhooks' in perms.selected_options else\n False, create_instant_invite=True if 'create_instant_invite' in perms.\n selected_options else False, send_messages=True if 'send_messages' in\n perms.selected_options else False, embed_links=True if 'embed_links' in\n perms.selected_options else False, attach_files=True if 'attach_files' in\n perms.selected_options else False, add_reactions=True if \n 'add_reactions' in perms.selected_options else False,\n use_external_emojis=True if 'use_external_emojis' in perms.\n selected_options else False, mention_everyone=True if \n 'mention_everyone' in perms.selected_options else False,\n manage_messages=True if 'manage_messages' in perms.selected_options else\n False, read_message_history=True if 'read_message_history' in perms.\n selected_options else False, send_tts_messages=True if \n 'send_tts_messages' in perms.selected_options else False)\n", (64216, 65426), False, 'import discord\n'), ((72260, 72334), 'discord.ext.commands.MissingPermissions', 'discord.ext.commands.MissingPermissions', ([], {'missing_perms': "['manage_channels']"}), "(missing_perms=['manage_channels'])\n", (72299, 72334), False, 'import discord\n'), ((73184, 73258), 'discord.ext.commands.MissingPermissions', 'discord.ext.commands.MissingPermissions', ([], {'missing_perms': "['manage_channels']"}), "(missing_perms=['manage_channels'])\n", (73223, 73258), False, 'import discord\n'), ((74833, 74907), 'discord.ext.commands.MissingPermissions', 'discord.ext.commands.MissingPermissions', ([], {'missing_perms': "['manage_channels']"}), "(missing_perms=['manage_channels'])\n", (74872, 74907), False, 'import discord\n'), ((86576, 86650), 'discord.ext.commands.MissingPermissions', 'discord.ext.commands.MissingPermissions', ([], {'missing_perms': "['manage_channels']"}), "(missing_perms=['manage_channels'])\n", (86615, 86650), False, 'import discord\n'), ((4254, 4389), 'discord_slash.utils.manage_components.wait_for_component', 'manage_components.wait_for_component', (['self.bot'], {'components': '[selrow]', 'timeout': '(600)', 'check': '(lambda msg: msg.author.id == ctx.author.id)'}), '(self.bot, components=[selrow], timeout\n =600, check=lambda msg: msg.author.id == ctx.author.id)\n', (4290, 4389), False, 'from discord_slash.utils import manage_components\n'), ((10237, 10372), 'discord_slash.utils.manage_components.wait_for_component', 'manage_components.wait_for_component', (['self.bot'], {'components': '[sel2row]', 'timeout': '(600)', 'check': '(lambda msg: msg.author.id == ctx.author.id)'}), '(self.bot, components=[sel2row],\n timeout=600, check=lambda msg: msg.author.id == ctx.author.id)\n', (10273, 10372), False, 'from discord_slash.utils import manage_components\n'), ((15181, 15390), 'discord_slash.utils.manage_components.wait_for_component', 'manage_components.wait_for_component', (['self.bot'], {'components': '[user_buttons_actionrow1, user_buttons_actionrow2, user_buttons_actionrow3]', 'timeout': '(60)', 'check': '(lambda msg: ctx.author.id == msg.author.id)'}), '(self.bot, components=[\n user_buttons_actionrow1, user_buttons_actionrow2,\n user_buttons_actionrow3], timeout=60, check=lambda msg: ctx.author.id ==\n msg.author.id)\n', (15217, 15390), False, 'from discord_slash.utils import manage_components\n'), ((16272, 16362), 'discord_slash.utils.manage_components.create_button', 'manage_components.create_button', ([], {'label': '"""Minutes"""', 'style': 'ButtonStyle.red', 'custom_id': '"""m"""'}), "(label='Minutes', style=ButtonStyle.red,\n custom_id='m')\n", (16303, 16362), False, 'from discord_slash.utils import manage_components\n'), ((16414, 16502), 'discord_slash.utils.manage_components.create_button', 'manage_components.create_button', ([], {'label': '"""Hours"""', 'style': 'ButtonStyle.red', 'custom_id': '"""h"""'}), "(label='Hours', style=ButtonStyle.red,\n custom_id='h')\n", (16445, 16502), False, 'from discord_slash.utils import manage_components\n'), ((16554, 16641), 'discord_slash.utils.manage_components.create_button', 'manage_components.create_button', ([], {'label': '"""Days"""', 'style': 'ButtonStyle.red', 'custom_id': '"""d"""'}), "(label='Days', style=ButtonStyle.red,\n custom_id='d')\n", (16585, 16641), False, 'from discord_slash.utils import manage_components\n'), ((16655, 16745), 'discord_slash.utils.manage_components.create_button', 'manage_components.create_button', ([], {'label': '"""Cancel"""', 'style': 'ButtonStyle.gray', 'custom_id': '"""c"""'}), "(label='Cancel', style=ButtonStyle.gray,\n custom_id='c')\n", (16686, 16745), False, 'from discord_slash.utils import manage_components\n'), ((17152, 17302), 'discord_slash.utils.manage_components.manage_components.wait_for_component', 'manage_components.manage_components.wait_for_component', (['self.bot'], {'components': 'mute_actionrow', 'check': '(lambda msg: msg.author.id == buttons.author.id)'}), '(self.bot, components\n =mute_actionrow, check=lambda msg: msg.author.id == buttons.author.id)\n', (17206, 17302), False, 'from discord_slash.utils import manage_components\n'), ((19765, 19808), 'discord_slash.utils.manage_components.create_actionrow', 'manage_components.create_actionrow', (['*times1'], {}), '(*times1)\n', (19799, 19808), False, 'from discord_slash.utils import manage_components\n'), ((19838, 19881), 'discord_slash.utils.manage_components.create_actionrow', 'manage_components.create_actionrow', (['*times2'], {}), '(*times2)\n', (19872, 19881), False, 'from discord_slash.utils import manage_components\n'), ((19911, 19954), 'discord_slash.utils.manage_components.create_actionrow', 'manage_components.create_actionrow', (['*times3'], {}), '(*times3)\n', (19945, 19954), False, 'from discord_slash.utils import manage_components\n'), ((25760, 25803), 'discord_slash.utils.manage_components.create_actionrow', 'manage_components.create_actionrow', (['*times1'], {}), '(*times1)\n', (25794, 25803), False, 'from discord_slash.utils import manage_components\n'), ((25833, 25876), 'discord_slash.utils.manage_components.create_actionrow', 'manage_components.create_actionrow', (['*times2'], {}), '(*times2)\n', (25867, 25876), False, 'from discord_slash.utils import manage_components\n'), ((25906, 25949), 'discord_slash.utils.manage_components.create_actionrow', 'manage_components.create_actionrow', (['*times3'], {}), '(*times3)\n', (25940, 25949), False, 'from discord_slash.utils import manage_components\n'), ((25979, 26022), 'discord_slash.utils.manage_components.create_actionrow', 'manage_components.create_actionrow', (['*times4'], {}), '(*times4)\n', (26013, 26022), False, 'from discord_slash.utils import manage_components\n'), ((26052, 26095), 'discord_slash.utils.manage_components.create_actionrow', 'manage_components.create_actionrow', (['*times5'], {}), '(*times5)\n', (26086, 26095), False, 'from discord_slash.utils import manage_components\n'), ((32493, 32536), 'discord_slash.utils.manage_components.create_actionrow', 'manage_components.create_actionrow', (['*times1'], {}), '(*times1)\n', (32527, 32536), False, 'from discord_slash.utils import manage_components\n'), ((32566, 32609), 'discord_slash.utils.manage_components.create_actionrow', 'manage_components.create_actionrow', (['*times2'], {}), '(*times2)\n', (32600, 32609), False, 'from discord_slash.utils import manage_components\n'), ((32639, 32682), 'discord_slash.utils.manage_components.create_actionrow', 'manage_components.create_actionrow', (['*times3'], {}), '(*times3)\n', (32673, 32682), False, 'from discord_slash.utils import manage_components\n'), ((32712, 32755), 'discord_slash.utils.manage_components.create_actionrow', 'manage_components.create_actionrow', (['*times4'], {}), '(*times4)\n', (32746, 32755), False, 'from discord_slash.utils import manage_components\n'), ((32785, 32828), 'discord_slash.utils.manage_components.create_actionrow', 'manage_components.create_actionrow', (['*times5'], {}), '(*times5)\n', (32819, 32828), False, 'from discord_slash.utils import manage_components\n'), ((35797, 35832), 'utils.punishments.warn', 'punishments.warn', (['ctx', 'user', 'reason'], {}), '(ctx, user, reason)\n', (35813, 35832), False, 'from utils import punishments, utils\n'), ((36511, 36546), 'utils.punishments.kick', 'punishments.kick', (['ctx', 'user', 'reason'], {}), '(ctx, user, reason)\n', (36527, 36546), False, 'from utils import punishments, utils\n'), ((37223, 37257), 'utils.punishments.ban', 'punishments.ban', (['ctx', 'user', 'reason'], {}), '(ctx, user, reason)\n', (37238, 37257), False, 'from utils import punishments, utils\n'), ((42107, 42128), 'discord.Permissions', 'discord.Permissions', ([], {}), '()\n', (42126, 42128), False, 'import discord\n'), ((43005, 43140), 'discord_slash.utils.manage_components.wait_for_component', 'manage_components.wait_for_component', (['self.bot'], {'components': '[any_ar]', 'timeout': '(600)', 'check': '(lambda msg: ctx.author.id == msg.author.id)'}), '(self.bot, components=[any_ar], timeout\n =600, check=lambda msg: ctx.author.id == msg.author.id)\n', (43041, 43140), False, 'from discord_slash.utils import manage_components\n'), ((44393, 44528), 'discord_slash.utils.manage_components.wait_for_component', 'manage_components.wait_for_component', (['self.bot'], {'components': '[adm_ar]', 'timeout': '(600)', 'check': '(lambda msg: msg.author.id == ctx.author.id)'}), '(self.bot, components=[adm_ar], timeout\n =600, check=lambda msg: msg.author.id == ctx.author.id)\n', (44429, 44528), False, 'from discord_slash.utils import manage_components\n'), ((48283, 48348), 'discord.ext.commands.BadArgument', 'discord.ext.commands.BadArgument', (['"""color is not a hex-color code"""'], {}), "('color is not a hex-color code')\n", (48315, 48348), False, 'import discord\n'), ((49560, 49691), 'discord_slash.utils.manage_components.wait_for_component', 'manage_components.wait_for_component', (['self.bot'], {'components': '[perm_ar]', 'timeout': '(600)', 'check': '(lambda m: m.author.id == ctx.author.id)'}), '(self.bot, components=[perm_ar],\n timeout=600, check=lambda m: m.author.id == ctx.author.id)\n', (49596, 49691), False, 'from discord_slash.utils import manage_components\n'), ((69118, 69162), 'discord_slash.utils.manage_components.create_actionrow', 'manage_components.create_actionrow', (['perm_sel'], {}), '(perm_sel)\n', (69152, 69162), False, 'from discord_slash.utils import manage_components\n'), ((70016, 71023), 'discord.PermissionOverwrite', 'discord.PermissionOverwrite', ([], {'view_channel': "(True if 'view_channel' in perms.selected_options else False)", 'manage_channels': "(True if 'manage_channels' in perms.selected_options else False)", 'manage_permissions': "(True if 'manage_permissions' in perms.selected_options else False)", 'create_instant_invite': "(True if 'create_instant_invite' in perms.selected_options else False)", 'connect': "(True if 'connect' in perms.selected_options else False)", 'speak': "(True if 'speak' in perms.selected_options else False)", 'stream': "(True if 'stream' in perms.selected_options else False)", 'use_voice_activation': "(True if 'use_voice_activation' in perms.selected_options else False)", 'priority_speaker': "(True if 'priority_speaker' in perms.selected_options else False)", 'mute_members': "(True if 'mute_members' in perms.selected_options else False)", 'deafen_members': "(True if 'deafen_members' in perms.selected_options else False)", 'move_members': "(True if 'move_members' in perms.selected_options else False)"}), "(view_channel=True if 'view_channel' in perms.\n selected_options else False, manage_channels=True if 'manage_channels' in\n perms.selected_options else False, manage_permissions=True if \n 'manage_permissions' in perms.selected_options else False,\n create_instant_invite=True if 'create_instant_invite' in perms.\n selected_options else False, connect=True if 'connect' in perms.\n selected_options else False, speak=True if 'speak' in perms.\n selected_options else False, stream=True if 'stream' in perms.\n selected_options else False, use_voice_activation=True if \n 'use_voice_activation' in perms.selected_options else False,\n priority_speaker=True if 'priority_speaker' in perms.selected_options else\n False, mute_members=True if 'mute_members' in perms.selected_options else\n False, deafen_members=True if 'deafen_members' in perms.\n selected_options else False, move_members=True if 'move_members' in\n perms.selected_options else False)\n", (70043, 71023), False, 'import discord\n'), ((78599, 78648), 'discord_slash.utils.manage_components.create_actionrow', 'manage_components.create_actionrow', (['text_perm_sel'], {}), '(text_perm_sel)\n', (78633, 78648), False, 'from discord_slash.utils import manage_components\n'), ((81562, 81612), 'discord_slash.utils.manage_components.create_actionrow', 'manage_components.create_actionrow', (['voice_perm_sel'], {}), '(voice_perm_sel)\n', (81596, 81612), False, 'from discord_slash.utils import manage_components\n'), ((82671, 84661), 'discord.PermissionOverwrite', 'discord.PermissionOverwrite', ([], {'view_channel': "(True if 'view_channel' in text_perms.selected_options else False)", 'manage_channels': "(True if 'manage_channels' in text_perms.selected_options else False)", 'manage_permissions': "(True if 'manage_permissions' in text_perms.selected_options else False)", 'manage_webhooks': "(True if 'manage_webhooks' in text_perms.selected_options else False)", 'create_instant_invite': "(True if 'create_instant_invite' in text_perms.selected_options else False)", 'send_messages': "(True if 'send_messages' in text_perms.selected_options else False)", 'embed_links': "(True if 'embed_links' in text_perms.selected_options else False)", 'attach_files': "(True if 'attach_files' in text_perms.selected_options else False)", 'add_reactions': "(True if 'add_reactions' in text_perms.selected_options else False)", 'use_external_emojis': "(True if 'use_external_emojis' in text_perms.selected_options else False)", 'mention_everyone': "(True if 'mention_everyone' in text_perms.selected_options else False)", 'manage_messages': "(True if 'manage_messages' in text_perms.selected_options else False)", 'read_message_history': "(True if 'read_message_history' in text_perms.selected_options else False)", 'send_tts_messages': "(True if 'send_tts_messages' in text_perms.selected_options else False)", 'connect': "(True if 'connect' in voice_perms.selected_options else False)", 'speak': "(True if 'speak' in voice_perms.selected_options else False)", 'stream': "(True if 'stream' in voice_perms.selected_options else False)", 'use_voice_activation': "(True if 'use_voice_activation' in voice_perms.selected_options else False)", 'priority_speaker': "(True if 'priority_speaker' in voice_perms.selected_options else False)", 'mute_members': "(True if 'mute_members' in voice_perms.selected_options else False)", 'deafen_members': "(True if 'deafen_members' in voice_perms.selected_options else False)", 'move_members': "(True if 'move_members' in voice_perms.selected_options else False)"}), "(view_channel=True if 'view_channel' in\n text_perms.selected_options else False, manage_channels=True if \n 'manage_channels' in text_perms.selected_options else False,\n manage_permissions=True if 'manage_permissions' in text_perms.\n selected_options else False, manage_webhooks=True if 'manage_webhooks' in\n text_perms.selected_options else False, create_instant_invite=True if \n 'create_instant_invite' in text_perms.selected_options else False,\n send_messages=True if 'send_messages' in text_perms.selected_options else\n False, embed_links=True if 'embed_links' in text_perms.selected_options\n else False, attach_files=True if 'attach_files' in text_perms.\n selected_options else False, add_reactions=True if 'add_reactions' in\n text_perms.selected_options else False, use_external_emojis=True if \n 'use_external_emojis' in text_perms.selected_options else False,\n mention_everyone=True if 'mention_everyone' in text_perms.\n selected_options else False, manage_messages=True if 'manage_messages' in\n text_perms.selected_options else False, read_message_history=True if \n 'read_message_history' in text_perms.selected_options else False,\n send_tts_messages=True if 'send_tts_messages' in text_perms.\n selected_options else False, connect=True if 'connect' in voice_perms.\n selected_options else False, speak=True if 'speak' in voice_perms.\n selected_options else False, stream=True if 'stream' in voice_perms.\n selected_options else False, use_voice_activation=True if \n 'use_voice_activation' in voice_perms.selected_options else False,\n priority_speaker=True if 'priority_speaker' in voice_perms.\n selected_options else False, mute_members=True if 'mute_members' in\n voice_perms.selected_options else False, deafen_members=True if \n 'deafen_members' in voice_perms.selected_options else False,\n move_members=True if 'move_members' in voice_perms.selected_options else\n False)\n", (82698, 84661), False, 'import discord\n'), ((910, 1010), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""add reactions to messages"""', 'value': '"""add_reactions"""'}), "(label='add reactions to messages',\n value='add_reactions')\n", (948, 1010), False, 'from discord_slash.utils import manage_components\n'), ((1083, 1181), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""attach files to messages"""', 'value': '"""attach_files"""'}), "(label='attach files to messages',\n value='attach_files')\n", (1121, 1181), False, 'from discord_slash.utils import manage_components\n'), ((1254, 1339), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""ban members"""', 'value': '"""ban_members"""'}), "(label='ban members', value='ban_members'\n )\n", (1292, 1339), False, 'from discord_slash.utils import manage_components\n'), ((1411, 1508), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""change own nickname"""', 'value': '"""change_nickname"""'}), "(label='change own nickname', value=\n 'change_nickname')\n", (1449, 1508), False, 'from discord_slash.utils import manage_components\n'), ((1580, 1674), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""connect to voice channels"""', 'value': '"""connect"""'}), "(label='connect to voice channels',\n value='connect')\n", (1618, 1674), False, 'from discord_slash.utils import manage_components\n'), ((1747, 1865), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""create instant invite to a channel"""', 'value': '"""create_instant_invite"""'}), "(label=\n 'create instant invite to a channel', value='create_instant_invite')\n", (1785, 1865), False, 'from discord_slash.utils import manage_components\n'), ((1937, 2052), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""deafen other members in voice channels"""', 'value': '"""deafen_members"""'}), "(label=\n 'deafen other members in voice channels', value='deafen_members')\n", (1975, 2052), False, 'from discord_slash.utils import manage_components\n'), ((2124, 2220), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""embed links in channels"""', 'value': '"""embed_links"""'}), "(label='embed links in channels',\n value='embed_links')\n", (2162, 2220), False, 'from discord_slash.utils import manage_components\n'), ((2293, 2391), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""send external emojis"""', 'value': '"""external_emojis"""'}), "(label='send external emojis', value=\n 'external_emojis')\n", (2331, 2391), False, 'from discord_slash.utils import manage_components\n'), ((2463, 2550), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""kick members"""', 'value': '"""kick_members"""'}), "(label='kick members', value=\n 'kick_members')\n", (2501, 2550), False, 'from discord_slash.utils import manage_components\n'), ((2622, 2729), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""manage channels on the server"""', 'value': '"""manage_channels"""'}), "(label=\n 'manage channels on the server', value='manage_channels')\n", (2660, 2729), False, 'from discord_slash.utils import manage_components\n'), ((2801, 2903), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""manage emojis of the server"""', 'value': '"""manage_emojis"""'}), "(label='manage emojis of the server',\n value='manage_emojis')\n", (2839, 2903), False, 'from discord_slash.utils import manage_components\n'), ((2976, 3063), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""manage guild"""', 'value': '"""manage_guild"""'}), "(label='manage guild', value=\n 'manage_guild')\n", (3014, 3063), False, 'from discord_slash.utils import manage_components\n'), ((3135, 3228), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""manage messages"""', 'value': '"""manage_messages"""'}), "(label='manage messages', value=\n 'manage_messages')\n", (3173, 3228), False, 'from discord_slash.utils import manage_components\n'), ((3300, 3399), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""manage all nicknames"""', 'value': '"""manage_nicknames"""'}), "(label='manage all nicknames', value=\n 'manage_nicknames')\n", (3338, 3399), False, 'from discord_slash.utils import manage_components\n'), ((3471, 3582), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""manage the permission of roles"""', 'value': '"""manage_permissions"""'}), "(label=\n 'manage the permission of roles', value='manage_permissions')\n", (3509, 3582), False, 'from discord_slash.utils import manage_components\n'), ((3654, 3763), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""manage roles and their permissions"""', 'value': '"""manage_roles"""'}), "(label=\n 'manage roles and their permissions', value='manage_roles')\n", (3692, 3763), False, 'from discord_slash.utils import manage_components\n'), ((3835, 3928), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""manage webhooks"""', 'value': '"""manage_webhooks"""'}), "(label='manage webhooks', value=\n 'manage_webhooks')\n", (3873, 3928), False, 'from discord_slash.utils import manage_components\n'), ((6925, 7033), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""mention everyone in a message"""', 'value': '"""mention_everyone"""'}), "(label=\n 'mention everyone in a message', value='mention_everyone')\n", (6963, 7033), False, 'from discord_slash.utils import manage_components\n'), ((7105, 7214), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""move members across voice channels"""', 'value': '"""move_members"""'}), "(label=\n 'move members across voice channels', value='move_members')\n", (7143, 7214), False, 'from discord_slash.utils import manage_components\n'), ((7286, 7391), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""mute members in voice channels"""', 'value': '"""mute_members"""'}), "(label=\n 'mute members in voice channels', value='mute_members')\n", (7324, 7391), False, 'from discord_slash.utils import manage_components\n'), ((7463, 7558), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""priority speaker"""', 'value': '"""priority_speaker"""'}), "(label='priority speaker', value=\n 'priority_speaker')\n", (7501, 7558), False, 'from discord_slash.utils import manage_components\n'), ((7630, 7745), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""read message history in channels"""', 'value': '"""read_message_history"""'}), "(label=\n 'read message history in channels', value='read_message_history')\n", (7668, 7745), False, 'from discord_slash.utils import manage_components\n'), ((7817, 7922), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""read all messages in channels"""', 'value': '"""read_messages"""'}), "(label=\n 'read all messages in channels', value='read_messages')\n", (7855, 7922), False, 'from discord_slash.utils import manage_components\n'), ((7994, 8107), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""request to speak in stage channels"""', 'value': '"""request_to_speak"""'}), "(label=\n 'request to speak in stage channels', value='request_to_speak')\n", (8032, 8107), False, 'from discord_slash.utils import manage_components\n'), ((8179, 8279), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""send messages in channels"""', 'value': '"""send_messages"""'}), "(label='send messages in channels',\n value='send_messages')\n", (8217, 8279), False, 'from discord_slash.utils import manage_components\n'), ((8352, 8461), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""send TTS messages in channels"""', 'value': '"""send_tts_messages"""'}), "(label=\n 'send TTS messages in channels', value='send_tts_messages')\n", (8390, 8461), False, 'from discord_slash.utils import manage_components\n'), ((8533, 8623), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""speak in voice channels"""', 'value': '"""speak"""'}), "(label='speak in voice channels',\n value='speak')\n", (8571, 8623), False, 'from discord_slash.utils import manage_components\n'), ((8696, 8805), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""stream in voice channels / enable camera"""', 'value': '"""stream"""'}), "(label=\n 'stream in voice channels / enable camera', value='stream')\n", (8734, 8805), False, 'from discord_slash.utils import manage_components\n'), ((8877, 8978), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""use external emojis"""', 'value': '"""use_external_emojis"""'}), "(label='use external emojis', value=\n 'use_external_emojis')\n", (8915, 8978), False, 'from discord_slash.utils import manage_components\n'), ((9050, 9161), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""use slash commands in channels"""', 'value': '"""use_slash_commands"""'}), "(label=\n 'use slash commands in channels', value='use_slash_commands')\n", (9088, 9161), False, 'from discord_slash.utils import manage_components\n'), ((9233, 9383), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""use voice activation in voice channels (else only push-to-talk)"""', 'value': '"""use_voice_activation"""'}), "(label=\n 'use voice activation in voice channels (else only push-to-talk)',\n value='use_voice_activation')\n", (9271, 9383), False, 'from discord_slash.utils import manage_components\n'), ((9451, 9546), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""view the audit-log"""', 'value': '"""view_audit_log"""'}), "(label='view the audit-log', value=\n 'view_audit_log')\n", (9489, 9546), False, 'from discord_slash.utils import manage_components\n'), ((9618, 9706), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""view channels"""', 'value': '"""view_channel"""'}), "(label='view channels', value=\n 'view_channel')\n", (9656, 9706), False, 'from discord_slash.utils import manage_components\n'), ((9778, 9879), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""view guild insights"""', 'value': '"""view_guild_insights"""'}), "(label='view guild insights', value=\n 'view_guild_insights')\n", (9816, 9879), False, 'from discord_slash.utils import manage_components\n'), ((17849, 17934), 'discord_slash.utils.manage_components.create_button', 'manage_components.create_button', ([], {'label': '"""5"""', 'style': 'ButtonStyle.red', 'custom_id': '"""5"""'}), "(label='5', style=ButtonStyle.red, custom_id='5'\n )\n", (17880, 17934), False, 'from discord_slash.utils import manage_components\n'), ((17997, 18083), 'discord_slash.utils.manage_components.create_button', 'manage_components.create_button', ([], {'label': '"""10"""', 'style': 'ButtonStyle.red', 'custom_id': '"""10"""'}), "(label='10', style=ButtonStyle.red,\n custom_id='10')\n", (18028, 18083), False, 'from discord_slash.utils import manage_components\n'), ((18147, 18233), 'discord_slash.utils.manage_components.create_button', 'manage_components.create_button', ([], {'label': '"""15"""', 'style': 'ButtonStyle.red', 'custom_id': '"""15"""'}), "(label='15', style=ButtonStyle.red,\n custom_id='15')\n", (18178, 18233), False, 'from discord_slash.utils import manage_components\n'), ((18297, 18383), 'discord_slash.utils.manage_components.create_button', 'manage_components.create_button', ([], {'label': '"""20"""', 'style': 'ButtonStyle.red', 'custom_id': '"""20"""'}), "(label='20', style=ButtonStyle.red,\n custom_id='20')\n", (18328, 18383), False, 'from discord_slash.utils import manage_components\n'), ((18492, 18578), 'discord_slash.utils.manage_components.create_button', 'manage_components.create_button', ([], {'label': '"""25"""', 'style': 'ButtonStyle.red', 'custom_id': '"""25"""'}), "(label='25', style=ButtonStyle.red,\n custom_id='25')\n", (18523, 18578), False, 'from discord_slash.utils import manage_components\n'), ((18642, 18728), 'discord_slash.utils.manage_components.create_button', 'manage_components.create_button', ([], {'label': '"""30"""', 'style': 'ButtonStyle.red', 'custom_id': '"""30"""'}), "(label='30', style=ButtonStyle.red,\n custom_id='30')\n", (18673, 18728), False, 'from discord_slash.utils import manage_components\n'), ((18792, 18878), 'discord_slash.utils.manage_components.create_button', 'manage_components.create_button', ([], {'label': '"""35"""', 'style': 'ButtonStyle.red', 'custom_id': '"""35"""'}), "(label='35', style=ButtonStyle.red,\n custom_id='35')\n", (18823, 18878), False, 'from discord_slash.utils import manage_components\n'), ((18942, 19028), 'discord_slash.utils.manage_components.create_button', 'manage_components.create_button', ([], {'label': '"""40"""', 'style': 'ButtonStyle.red', 'custom_id': '"""40"""'}), "(label='40', style=ButtonStyle.red,\n custom_id='40')\n", (18973, 19028), False, 'from discord_slash.utils import manage_components\n'), ((19137, 19223), 'discord_slash.utils.manage_components.create_button', 'manage_components.create_button', ([], {'label': '"""45"""', 'style': 'ButtonStyle.red', 'custom_id': '"""45"""'}), "(label='45', style=ButtonStyle.red,\n custom_id='45')\n", (19168, 19223), False, 'from discord_slash.utils import manage_components\n'), ((19287, 19373), 'discord_slash.utils.manage_components.create_button', 'manage_components.create_button', ([], {'label': '"""50"""', 'style': 'ButtonStyle.red', 'custom_id': '"""50"""'}), "(label='50', style=ButtonStyle.red,\n custom_id='50')\n", (19318, 19373), False, 'from discord_slash.utils import manage_components\n'), ((19437, 19523), 'discord_slash.utils.manage_components.create_button', 'manage_components.create_button', ([], {'label': '"""55"""', 'style': 'ButtonStyle.red', 'custom_id': '"""55"""'}), "(label='55', style=ButtonStyle.red,\n custom_id='55')\n", (19468, 19523), False, 'from discord_slash.utils import manage_components\n'), ((19587, 19673), 'discord_slash.utils.manage_components.create_button', 'manage_components.create_button', ([], {'label': '"""60"""', 'style': 'ButtonStyle.red', 'custom_id': '"""60"""'}), "(label='60', style=ButtonStyle.red,\n custom_id='60')\n", (19618, 19673), False, 'from discord_slash.utils import manage_components\n'), ((21835, 21872), 'utils.punishments.mute', 'punishments.mute', (['ctx', 'user', 'dur', '"""m"""'], {}), "(ctx, user, dur, 'm')\n", (21851, 21872), False, 'from utils import punishments, utils\n'), ((21970, 22055), 'discord_slash.utils.manage_components.create_button', 'manage_components.create_button', ([], {'style': 'ButtonStyle.red', 'label': '"""1"""', 'custom_id': '"""1"""'}), "(style=ButtonStyle.red, label='1', custom_id='1'\n )\n", (22001, 22055), False, 'from discord_slash.utils import manage_components\n'), ((22118, 22203), 'discord_slash.utils.manage_components.create_button', 'manage_components.create_button', ([], {'style': 'ButtonStyle.red', 'label': '"""2"""', 'custom_id': '"""2"""'}), "(style=ButtonStyle.red, label='2', custom_id='2'\n )\n", (22149, 22203), False, 'from discord_slash.utils import manage_components\n'), ((22266, 22351), 'discord_slash.utils.manage_components.create_button', 'manage_components.create_button', ([], {'style': 'ButtonStyle.red', 'label': '"""3"""', 'custom_id': '"""3"""'}), "(style=ButtonStyle.red, label='3', custom_id='3'\n )\n", (22297, 22351), False, 'from discord_slash.utils import manage_components\n'), ((22414, 22499), 'discord_slash.utils.manage_components.create_button', 'manage_components.create_button', ([], {'style': 'ButtonStyle.red', 'label': '"""4"""', 'custom_id': '"""4"""'}), "(style=ButtonStyle.red, label='4', custom_id='4'\n )\n", (22445, 22499), False, 'from discord_slash.utils import manage_components\n'), ((22562, 22647), 'discord_slash.utils.manage_components.create_button', 'manage_components.create_button', ([], {'style': 'ButtonStyle.red', 'label': '"""5"""', 'custom_id': '"""5"""'}), "(style=ButtonStyle.red, label='5', custom_id='5'\n )\n", (22593, 22647), False, 'from discord_slash.utils import manage_components\n'), ((22755, 22840), 'discord_slash.utils.manage_components.create_button', 'manage_components.create_button', ([], {'style': 'ButtonStyle.red', 'label': '"""6"""', 'custom_id': '"""6"""'}), "(style=ButtonStyle.red, label='6', custom_id='6'\n )\n", (22786, 22840), False, 'from discord_slash.utils import manage_components\n'), ((22903, 22988), 'discord_slash.utils.manage_components.create_button', 'manage_components.create_button', ([], {'style': 'ButtonStyle.red', 'label': '"""7"""', 'custom_id': '"""7"""'}), "(style=ButtonStyle.red, label='7', custom_id='7'\n )\n", (22934, 22988), False, 'from discord_slash.utils import manage_components\n'), ((23051, 23136), 'discord_slash.utils.manage_components.create_button', 'manage_components.create_button', ([], {'style': 'ButtonStyle.red', 'label': '"""8"""', 'custom_id': '"""8"""'}), "(style=ButtonStyle.red, label='8', custom_id='8'\n )\n", (23082, 23136), False, 'from discord_slash.utils import manage_components\n'), ((23199, 23284), 'discord_slash.utils.manage_components.create_button', 'manage_components.create_button', ([], {'style': 'ButtonStyle.red', 'label': '"""9"""', 'custom_id': '"""9"""'}), "(style=ButtonStyle.red, label='9', custom_id='9'\n )\n", (23230, 23284), False, 'from discord_slash.utils import manage_components\n'), ((23347, 23433), 'discord_slash.utils.manage_components.create_button', 'manage_components.create_button', ([], {'style': 'ButtonStyle.red', 'label': '"""10"""', 'custom_id': '"""10"""'}), "(style=ButtonStyle.red, label='10',\n custom_id='10')\n", (23378, 23433), False, 'from discord_slash.utils import manage_components\n'), ((23542, 23628), 'discord_slash.utils.manage_components.create_button', 'manage_components.create_button', ([], {'style': 'ButtonStyle.red', 'label': '"""11"""', 'custom_id': '"""11"""'}), "(style=ButtonStyle.red, label='11',\n custom_id='11')\n", (23573, 23628), False, 'from discord_slash.utils import manage_components\n'), ((23692, 23778), 'discord_slash.utils.manage_components.create_button', 'manage_components.create_button', ([], {'style': 'ButtonStyle.red', 'label': '"""12"""', 'custom_id': '"""12"""'}), "(style=ButtonStyle.red, label='12',\n custom_id='12')\n", (23723, 23778), False, 'from discord_slash.utils import manage_components\n'), ((23842, 23928), 'discord_slash.utils.manage_components.create_button', 'manage_components.create_button', ([], {'style': 'ButtonStyle.red', 'label': '"""13"""', 'custom_id': '"""13"""'}), "(style=ButtonStyle.red, label='13',\n custom_id='13')\n", (23873, 23928), False, 'from discord_slash.utils import manage_components\n'), ((23992, 24078), 'discord_slash.utils.manage_components.create_button', 'manage_components.create_button', ([], {'style': 'ButtonStyle.red', 'label': '"""14"""', 'custom_id': '"""14"""'}), "(style=ButtonStyle.red, label='14',\n custom_id='14')\n", (24023, 24078), False, 'from discord_slash.utils import manage_components\n'), ((24142, 24228), 'discord_slash.utils.manage_components.create_button', 'manage_components.create_button', ([], {'style': 'ButtonStyle.red', 'label': '"""15"""', 'custom_id': '"""15"""'}), "(style=ButtonStyle.red, label='15',\n custom_id='15')\n", (24173, 24228), False, 'from discord_slash.utils import manage_components\n'), ((24337, 24423), 'discord_slash.utils.manage_components.create_button', 'manage_components.create_button', ([], {'style': 'ButtonStyle.red', 'label': '"""16"""', 'custom_id': '"""16"""'}), "(style=ButtonStyle.red, label='16',\n custom_id='16')\n", (24368, 24423), False, 'from discord_slash.utils import manage_components\n'), ((24487, 24573), 'discord_slash.utils.manage_components.create_button', 'manage_components.create_button', ([], {'style': 'ButtonStyle.red', 'label': '"""17"""', 'custom_id': '"""17"""'}), "(style=ButtonStyle.red, label='17',\n custom_id='17')\n", (24518, 24573), False, 'from discord_slash.utils import manage_components\n'), ((24637, 24723), 'discord_slash.utils.manage_components.create_button', 'manage_components.create_button', ([], {'style': 'ButtonStyle.red', 'label': '"""18"""', 'custom_id': '"""18"""'}), "(style=ButtonStyle.red, label='18',\n custom_id='18')\n", (24668, 24723), False, 'from discord_slash.utils import manage_components\n'), ((24787, 24873), 'discord_slash.utils.manage_components.create_button', 'manage_components.create_button', ([], {'style': 'ButtonStyle.red', 'label': '"""19"""', 'custom_id': '"""19"""'}), "(style=ButtonStyle.red, label='19',\n custom_id='19')\n", (24818, 24873), False, 'from discord_slash.utils import manage_components\n'), ((24937, 25023), 'discord_slash.utils.manage_components.create_button', 'manage_components.create_button', ([], {'style': 'ButtonStyle.red', 'label': '"""20"""', 'custom_id': '"""20"""'}), "(style=ButtonStyle.red, label='20',\n custom_id='20')\n", (24968, 25023), False, 'from discord_slash.utils import manage_components\n'), ((25132, 25218), 'discord_slash.utils.manage_components.create_button', 'manage_components.create_button', ([], {'style': 'ButtonStyle.red', 'label': '"""21"""', 'custom_id': '"""21"""'}), "(style=ButtonStyle.red, label='21',\n custom_id='21')\n", (25163, 25218), False, 'from discord_slash.utils import manage_components\n'), ((25282, 25368), 'discord_slash.utils.manage_components.create_button', 'manage_components.create_button', ([], {'style': 'ButtonStyle.red', 'label': '"""22"""', 'custom_id': '"""22"""'}), "(style=ButtonStyle.red, label='22',\n custom_id='22')\n", (25313, 25368), False, 'from discord_slash.utils import manage_components\n'), ((25432, 25518), 'discord_slash.utils.manage_components.create_button', 'manage_components.create_button', ([], {'style': 'ButtonStyle.red', 'label': '"""23"""', 'custom_id': '"""23"""'}), "(style=ButtonStyle.red, label='23',\n custom_id='23')\n", (25463, 25518), False, 'from discord_slash.utils import manage_components\n'), ((25582, 25668), 'discord_slash.utils.manage_components.create_button', 'manage_components.create_button', ([], {'style': 'ButtonStyle.red', 'label': '"""24"""', 'custom_id': '"""24"""'}), "(style=ButtonStyle.red, label='24',\n custom_id='24')\n", (25613, 25668), False, 'from discord_slash.utils import manage_components\n'), ((28420, 28457), 'utils.punishments.mute', 'punishments.mute', (['ctx', 'user', 'dur', '"""h"""'], {}), "(ctx, user, dur, 'h')\n", (28436, 28457), False, 'from utils import punishments, utils\n'), ((28554, 28639), 'discord_slash.utils.manage_components.create_button', 'manage_components.create_button', ([], {'style': 'ButtonStyle.red', 'label': '"""1"""', 'custom_id': '"""1"""'}), "(style=ButtonStyle.red, label='1', custom_id='1'\n )\n", (28585, 28639), False, 'from discord_slash.utils import manage_components\n'), ((28702, 28787), 'discord_slash.utils.manage_components.create_button', 'manage_components.create_button', ([], {'style': 'ButtonStyle.red', 'label': '"""2"""', 'custom_id': '"""2"""'}), "(style=ButtonStyle.red, label='2', custom_id='2'\n )\n", (28733, 28787), False, 'from discord_slash.utils import manage_components\n'), ((28850, 28935), 'discord_slash.utils.manage_components.create_button', 'manage_components.create_button', ([], {'style': 'ButtonStyle.red', 'label': '"""3"""', 'custom_id': '"""3"""'}), "(style=ButtonStyle.red, label='3', custom_id='3'\n )\n", (28881, 28935), False, 'from discord_slash.utils import manage_components\n'), ((28998, 29083), 'discord_slash.utils.manage_components.create_button', 'manage_components.create_button', ([], {'style': 'ButtonStyle.red', 'label': '"""4"""', 'custom_id': '"""4"""'}), "(style=ButtonStyle.red, label='4', custom_id='4'\n )\n", (29029, 29083), False, 'from discord_slash.utils import manage_components\n'), ((29146, 29231), 'discord_slash.utils.manage_components.create_button', 'manage_components.create_button', ([], {'style': 'ButtonStyle.red', 'label': '"""5"""', 'custom_id': '"""5"""'}), "(style=ButtonStyle.red, label='5', custom_id='5'\n )\n", (29177, 29231), False, 'from discord_slash.utils import manage_components\n'), ((29339, 29424), 'discord_slash.utils.manage_components.create_button', 'manage_components.create_button', ([], {'style': 'ButtonStyle.red', 'label': '"""6"""', 'custom_id': '"""6"""'}), "(style=ButtonStyle.red, label='6', custom_id='6'\n )\n", (29370, 29424), False, 'from discord_slash.utils import manage_components\n'), ((29487, 29572), 'discord_slash.utils.manage_components.create_button', 'manage_components.create_button', ([], {'style': 'ButtonStyle.red', 'label': '"""7"""', 'custom_id': '"""7"""'}), "(style=ButtonStyle.red, label='7', custom_id='7'\n )\n", (29518, 29572), False, 'from discord_slash.utils import manage_components\n'), ((29635, 29720), 'discord_slash.utils.manage_components.create_button', 'manage_components.create_button', ([], {'style': 'ButtonStyle.red', 'label': '"""8"""', 'custom_id': '"""8"""'}), "(style=ButtonStyle.red, label='8', custom_id='8'\n )\n", (29666, 29720), False, 'from discord_slash.utils import manage_components\n'), ((29783, 29868), 'discord_slash.utils.manage_components.create_button', 'manage_components.create_button', ([], {'style': 'ButtonStyle.red', 'label': '"""9"""', 'custom_id': '"""9"""'}), "(style=ButtonStyle.red, label='9', custom_id='9'\n )\n", (29814, 29868), False, 'from discord_slash.utils import manage_components\n'), ((29931, 30017), 'discord_slash.utils.manage_components.create_button', 'manage_components.create_button', ([], {'style': 'ButtonStyle.red', 'label': '"""10"""', 'custom_id': '"""10"""'}), "(style=ButtonStyle.red, label='10',\n custom_id='10')\n", (29962, 30017), False, 'from discord_slash.utils import manage_components\n'), ((30126, 30212), 'discord_slash.utils.manage_components.create_button', 'manage_components.create_button', ([], {'style': 'ButtonStyle.red', 'label': '"""11"""', 'custom_id': '"""11"""'}), "(style=ButtonStyle.red, label='11',\n custom_id='11')\n", (30157, 30212), False, 'from discord_slash.utils import manage_components\n'), ((30276, 30362), 'discord_slash.utils.manage_components.create_button', 'manage_components.create_button', ([], {'style': 'ButtonStyle.red', 'label': '"""12"""', 'custom_id': '"""12"""'}), "(style=ButtonStyle.red, label='12',\n custom_id='12')\n", (30307, 30362), False, 'from discord_slash.utils import manage_components\n'), ((30426, 30512), 'discord_slash.utils.manage_components.create_button', 'manage_components.create_button', ([], {'style': 'ButtonStyle.red', 'label': '"""13"""', 'custom_id': '"""13"""'}), "(style=ButtonStyle.red, label='13',\n custom_id='13')\n", (30457, 30512), False, 'from discord_slash.utils import manage_components\n'), ((30576, 30662), 'discord_slash.utils.manage_components.create_button', 'manage_components.create_button', ([], {'style': 'ButtonStyle.red', 'label': '"""14"""', 'custom_id': '"""14"""'}), "(style=ButtonStyle.red, label='14',\n custom_id='14')\n", (30607, 30662), False, 'from discord_slash.utils import manage_components\n'), ((30726, 30812), 'discord_slash.utils.manage_components.create_button', 'manage_components.create_button', ([], {'style': 'ButtonStyle.red', 'label': '"""15"""', 'custom_id': '"""15"""'}), "(style=ButtonStyle.red, label='15',\n custom_id='15')\n", (30757, 30812), False, 'from discord_slash.utils import manage_components\n'), ((30921, 31007), 'discord_slash.utils.manage_components.create_button', 'manage_components.create_button', ([], {'style': 'ButtonStyle.red', 'label': '"""16"""', 'custom_id': '"""16"""'}), "(style=ButtonStyle.red, label='16',\n custom_id='16')\n", (30952, 31007), False, 'from discord_slash.utils import manage_components\n'), ((31071, 31157), 'discord_slash.utils.manage_components.create_button', 'manage_components.create_button', ([], {'style': 'ButtonStyle.red', 'label': '"""17"""', 'custom_id': '"""17"""'}), "(style=ButtonStyle.red, label='17',\n custom_id='17')\n", (31102, 31157), False, 'from discord_slash.utils import manage_components\n'), ((31221, 31307), 'discord_slash.utils.manage_components.create_button', 'manage_components.create_button', ([], {'style': 'ButtonStyle.red', 'label': '"""18"""', 'custom_id': '"""18"""'}), "(style=ButtonStyle.red, label='18',\n custom_id='18')\n", (31252, 31307), False, 'from discord_slash.utils import manage_components\n'), ((31371, 31457), 'discord_slash.utils.manage_components.create_button', 'manage_components.create_button', ([], {'style': 'ButtonStyle.red', 'label': '"""19"""', 'custom_id': '"""19"""'}), "(style=ButtonStyle.red, label='19',\n custom_id='19')\n", (31402, 31457), False, 'from discord_slash.utils import manage_components\n'), ((31521, 31607), 'discord_slash.utils.manage_components.create_button', 'manage_components.create_button', ([], {'style': 'ButtonStyle.red', 'label': '"""20"""', 'custom_id': '"""20"""'}), "(style=ButtonStyle.red, label='20',\n custom_id='20')\n", (31552, 31607), False, 'from discord_slash.utils import manage_components\n'), ((31716, 31802), 'discord_slash.utils.manage_components.create_button', 'manage_components.create_button', ([], {'style': 'ButtonStyle.red', 'label': '"""21"""', 'custom_id': '"""21"""'}), "(style=ButtonStyle.red, label='21',\n custom_id='21')\n", (31747, 31802), False, 'from discord_slash.utils import manage_components\n'), ((31866, 31952), 'discord_slash.utils.manage_components.create_button', 'manage_components.create_button', ([], {'style': 'ButtonStyle.red', 'label': '"""22"""', 'custom_id': '"""22"""'}), "(style=ButtonStyle.red, label='22',\n custom_id='22')\n", (31897, 31952), False, 'from discord_slash.utils import manage_components\n'), ((32016, 32102), 'discord_slash.utils.manage_components.create_button', 'manage_components.create_button', ([], {'style': 'ButtonStyle.red', 'label': '"""23"""', 'custom_id': '"""23"""'}), "(style=ButtonStyle.red, label='23',\n custom_id='23')\n", (32047, 32102), False, 'from discord_slash.utils import manage_components\n'), ((32166, 32252), 'discord_slash.utils.manage_components.create_button', 'manage_components.create_button', ([], {'style': 'ButtonStyle.red', 'label': '"""24"""', 'custom_id': '"""24"""'}), "(style=ButtonStyle.red, label='24',\n custom_id='24')\n", (32197, 32252), False, 'from discord_slash.utils import manage_components\n'), ((32316, 32402), 'discord_slash.utils.manage_components.create_button', 'manage_components.create_button', ([], {'style': 'ButtonStyle.red', 'label': '"""25"""', 'custom_id': '"""25"""'}), "(style=ButtonStyle.red, label='25',\n custom_id='25')\n", (32347, 32402), False, 'from discord_slash.utils import manage_components\n'), ((35078, 35115), 'utils.punishments.mute', 'punishments.mute', (['ctx', 'user', 'dur', '"""d"""'], {}), "(ctx, user, dur, 'd')\n", (35094, 35115), False, 'from utils import punishments, utils\n'), ((50967, 51102), 'discord_slash.utils.manage_components.wait_for_component', 'manage_components.wait_for_component', (['self.bot'], {'components': '[adm_ar]', 'timeout': '(600)', 'check': '(lambda msg: msg.author.id == ctx.author.id)'}), '(self.bot, components=[adm_ar], timeout\n =600, check=lambda msg: msg.author.id == ctx.author.id)\n', (51003, 51102), False, 'from discord_slash.utils import manage_components\n'), ((63694, 63825), 'discord_slash.utils.manage_components.wait_for_component', 'manage_components.wait_for_component', (['self.bot'], {'components': '[sel_ar]', 'timeout': '(600)', 'check': '(lambda p: p.author.id == ctx.author.id)'}), '(self.bot, components=[sel_ar], timeout\n =600, check=lambda p: p.author.id == ctx.author.id)\n', (63730, 63825), False, 'from discord_slash.utils import manage_components\n'), ((66461, 66483), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (66481, 66483), False, 'import traceback\n'), ((66500, 66525), 'utils.utils.LOGGER.error', 'utils.LOGGER.error', (['error'], {}), '(error)\n', (66518, 66525), False, 'from utils import punishments, utils\n'), ((20306, 20500), 'discord_slash.utils.manage_components.manage_components.wait_for_component', 'manage_components.manage_components.wait_for_component', (['self.bot'], {'components': '[times1_row, times2_row, times3_row]', 'check': '(lambda msg: msg.author.id == mute_btn_ctx.author.id)', 'timeout': '(180)'}), '(self.bot, components\n =[times1_row, times2_row, times3_row], check=lambda msg: msg.author.id ==\n mute_btn_ctx.author.id, timeout=180)\n', (20360, 20500), False, 'from discord_slash.utils import manage_components\n'), ((26471, 26689), 'discord_slash.utils.manage_components.manage_components.wait_for_component', 'manage_components.manage_components.wait_for_component', (['self.bot'], {'components': '[times1_row, times2_row, times3_row, times4_row, times5_row]', 'check': '(lambda msg: mute_btn_ctx.author.id == msg.author.id)', 'timeout': '(180)'}), '(self.bot, components\n =[times1_row, times2_row, times3_row, times4_row, times5_row], check=lambda\n msg: mute_btn_ctx.author.id == msg.author.id, timeout=180)\n', (26525, 26689), False, 'from discord_slash.utils import manage_components\n'), ((33204, 33422), 'discord_slash.utils.manage_components.manage_components.wait_for_component', 'manage_components.manage_components.wait_for_component', (['self.bot'], {'components': '[times1_row, times2_row, times3_row, times4_row, times5_row]', 'check': '(lambda msg: mute_btn_ctx.author.id == msg.author.id)', 'timeout': '(180)'}), '(self.bot, components\n =[times1_row, times2_row, times3_row, times4_row, times5_row], check=lambda\n msg: mute_btn_ctx.author.id == msg.author.id, timeout=180)\n', (33258, 33422), False, 'from discord_slash.utils import manage_components\n'), ((51573, 51594), 'discord.Permissions', 'discord.Permissions', ([], {}), '()\n', (51592, 51594), False, 'import discord\n'), ((60634, 60725), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""view the channel"""', 'value': '"""view_channel"""'}), "(label='view the channel', value=\n 'view_channel')\n", (60672, 60725), False, 'from discord_slash.utils import manage_components\n'), ((60813, 60909), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""manage the channel"""', 'value': '"""manage_channels"""'}), "(label='manage the channel', value=\n 'manage_channels')\n", (60851, 60909), False, 'from discord_slash.utils import manage_components\n'), ((60997, 61103), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""manage channel permissions"""', 'value': '"""manage_permissions"""'}), "(label='manage channel permissions',\n value='manage_permissions')\n", (61035, 61103), False, 'from discord_slash.utils import manage_components\n'), ((61192, 61285), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""manage webhooks"""', 'value': '"""manage_webhooks"""'}), "(label='manage webhooks', value=\n 'manage_webhooks')\n", (61230, 61285), False, 'from discord_slash.utils import manage_components\n'), ((61373, 61478), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""create instant invite"""', 'value': '"""create_instant_invite"""'}), "(label='create instant invite', value\n ='create_instant_invite')\n", (61411, 61478), False, 'from discord_slash.utils import manage_components\n'), ((61566, 61669), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""send messages in the channel"""', 'value': '"""send_messages"""'}), "(label='send messages in the channel',\n value='send_messages')\n", (61604, 61669), False, 'from discord_slash.utils import manage_components\n'), ((61758, 61854), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""embed links in messages"""', 'value': '"""embed_links"""'}), "(label='embed links in messages',\n value='embed_links')\n", (61796, 61854), False, 'from discord_slash.utils import manage_components\n'), ((61943, 62041), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""attach files to messages"""', 'value': '"""attach_files"""'}), "(label='attach files to messages',\n value='attach_files')\n", (61981, 62041), False, 'from discord_slash.utils import manage_components\n'), ((62130, 62230), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""add reactions to messages"""', 'value': '"""add_reactions"""'}), "(label='add reactions to messages',\n value='add_reactions')\n", (62168, 62230), False, 'from discord_slash.utils import manage_components\n'), ((62319, 62432), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""use external emojis in messages"""', 'value': '"""use_external_emojis"""'}), "(label=\n 'use external emojis in messages', value='use_external_emojis')\n", (62357, 62432), False, 'from discord_slash.utils import manage_components\n'), ((62520, 62616), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""mention @everyone"""', 'value': '"""mention_everyone"""'}), "(label='mention @everyone', value=\n 'mention_everyone')\n", (62558, 62616), False, 'from discord_slash.utils import manage_components\n'), ((62704, 62797), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""manage messages"""', 'value': '"""manage_messages"""'}), "(label='manage messages', value=\n 'manage_messages')\n", (62742, 62797), False, 'from discord_slash.utils import manage_components\n'), ((62885, 62988), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""read message history"""', 'value': '"""read_message_history"""'}), "(label='read message history', value=\n 'read_message_history')\n", (62923, 62988), False, 'from discord_slash.utils import manage_components\n'), ((63076, 63173), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""send tts messages"""', 'value': '"""send_tts_messages"""'}), "(label='send tts messages', value=\n 'send_tts_messages')\n", (63114, 63173), False, 'from discord_slash.utils import manage_components\n'), ((69515, 69652), 'discord_slash.utils.manage_components.wait_for_component', 'manage_components.wait_for_component', (['self.bot'], {'components': '[sel_ar]', 'timeout': '(600)', 'check': '(lambda comp: comp.author.id == ctx.author.id)'}), '(self.bot, components=[sel_ar], timeout\n =600, check=lambda comp: comp.author.id == ctx.author.id)\n', (69551, 69652), False, 'from discord_slash.utils import manage_components\n'), ((79056, 79197), 'discord_slash.utils.manage_components.wait_for_component', 'manage_components.wait_for_component', (['self.bot'], {'components': '[text_sel_ar]', 'timeout': '(600)', 'check': '(lambda comp: comp.author.id == ctx.author.id)'}), '(self.bot, components=[text_sel_ar],\n timeout=600, check=lambda comp: comp.author.id == ctx.author.id)\n', (79092, 79197), False, 'from discord_slash.utils import manage_components\n'), ((82030, 82172), 'discord_slash.utils.manage_components.wait_for_component', 'manage_components.wait_for_component', (['self.bot'], {'components': '[voice_sel_ar]', 'timeout': '(600)', 'check': '(lambda comp: comp.author.id == ctx.author.id)'}), '(self.bot, components=[voice_sel_ar],\n timeout=600, check=lambda comp: comp.author.id == ctx.author.id)\n', (82066, 82172), False, 'from discord_slash.utils import manage_components\n'), ((52365, 52386), 'discord.Permissions', 'discord.Permissions', ([], {}), '()\n', (52384, 52386), False, 'import discord\n'), ((66879, 66970), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""view the channel"""', 'value': '"""view_channel"""'}), "(label='view the channel', value=\n 'view_channel')\n", (66917, 66970), False, 'from discord_slash.utils import manage_components\n'), ((67058, 67154), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""manage the channel"""', 'value': '"""manage_channels"""'}), "(label='manage the channel', value=\n 'manage_channels')\n", (67096, 67154), False, 'from discord_slash.utils import manage_components\n'), ((67242, 67348), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""manage channel permissions"""', 'value': '"""manage_permissions"""'}), "(label='manage channel permissions',\n value='manage_permissions')\n", (67280, 67348), False, 'from discord_slash.utils import manage_components\n'), ((67437, 67542), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""create instant invite"""', 'value': '"""create_instant_invite"""'}), "(label='create instant invite', value\n ='create_instant_invite')\n", (67475, 67542), False, 'from discord_slash.utils import manage_components\n'), ((67630, 67721), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""connect to the channel"""', 'value': '"""connect"""'}), "(label='connect to the channel',\n value='connect')\n", (67668, 67721), False, 'from discord_slash.utils import manage_components\n'), ((67810, 67898), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""speak in the channel"""', 'value': '"""speak"""'}), "(label='speak in the channel', value=\n 'speak')\n", (67848, 67898), False, 'from discord_slash.utils import manage_components\n'), ((67986, 68063), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""stream/camera"""', 'value': '"""stream"""'}), "(label='stream/camera', value='stream')\n", (68024, 68063), False, 'from discord_slash.utils import manage_components\n'), ((68156, 68259), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""use voice activation"""', 'value': '"""use_voice_activation"""'}), "(label='use voice activation', value=\n 'use_voice_activation')\n", (68194, 68259), False, 'from discord_slash.utils import manage_components\n'), ((68347, 68447), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""very important speaker"""', 'value': '"""priority_speaker"""'}), "(label='very important speaker',\n value='priority_speaker')\n", (68385, 68447), False, 'from discord_slash.utils import manage_components\n'), ((68536, 68623), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""mute members"""', 'value': '"""mute_members"""'}), "(label='mute members', value=\n 'mute_members')\n", (68574, 68623), False, 'from discord_slash.utils import manage_components\n'), ((68711, 68802), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""deafen members"""', 'value': '"""deafen_members"""'}), "(label='deafen members', value=\n 'deafen_members')\n", (68749, 68802), False, 'from discord_slash.utils import manage_components\n'), ((68890, 68996), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""move members in another channel"""', 'value': '"""move_members"""'}), "(label=\n 'move members in another channel', value='move_members')\n", (68928, 68996), False, 'from discord_slash.utils import manage_components\n'), ((75701, 75792), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""view the channel"""', 'value': '"""view_channel"""'}), "(label='view the channel', value=\n 'view_channel')\n", (75739, 75792), False, 'from discord_slash.utils import manage_components\n'), ((75896, 75992), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""manage the channel"""', 'value': '"""manage_channels"""'}), "(label='manage the channel', value=\n 'manage_channels')\n", (75934, 75992), False, 'from discord_slash.utils import manage_components\n'), ((76096, 76202), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""manage channel permissions"""', 'value': '"""manage_permissions"""'}), "(label='manage channel permissions',\n value='manage_permissions')\n", (76134, 76202), False, 'from discord_slash.utils import manage_components\n'), ((76307, 76400), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""manage webhooks"""', 'value': '"""manage_webhooks"""'}), "(label='manage webhooks', value=\n 'manage_webhooks')\n", (76345, 76400), False, 'from discord_slash.utils import manage_components\n'), ((76504, 76609), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""create instant invite"""', 'value': '"""create_instant_invite"""'}), "(label='create instant invite', value\n ='create_instant_invite')\n", (76542, 76609), False, 'from discord_slash.utils import manage_components\n'), ((76713, 76816), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""send messages in the channel"""', 'value': '"""send_messages"""'}), "(label='send messages in the channel',\n value='send_messages')\n", (76751, 76816), False, 'from discord_slash.utils import manage_components\n'), ((76921, 77017), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""embed links in messages"""', 'value': '"""embed_links"""'}), "(label='embed links in messages',\n value='embed_links')\n", (76959, 77017), False, 'from discord_slash.utils import manage_components\n'), ((77122, 77220), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""attach files to messages"""', 'value': '"""attach_files"""'}), "(label='attach files to messages',\n value='attach_files')\n", (77160, 77220), False, 'from discord_slash.utils import manage_components\n'), ((77325, 77425), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""add reactions to messages"""', 'value': '"""add_reactions"""'}), "(label='add reactions to messages',\n value='add_reactions')\n", (77363, 77425), False, 'from discord_slash.utils import manage_components\n'), ((77530, 77643), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""use external emojis in messages"""', 'value': '"""use_external_emojis"""'}), "(label=\n 'use external emojis in messages', value='use_external_emojis')\n", (77568, 77643), False, 'from discord_slash.utils import manage_components\n'), ((77747, 77843), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""mention @everyone"""', 'value': '"""mention_everyone"""'}), "(label='mention @everyone', value=\n 'mention_everyone')\n", (77785, 77843), False, 'from discord_slash.utils import manage_components\n'), ((77947, 78040), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""manage messages"""', 'value': '"""manage_messages"""'}), "(label='manage messages', value=\n 'manage_messages')\n", (77985, 78040), False, 'from discord_slash.utils import manage_components\n'), ((78144, 78247), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""read message history"""', 'value': '"""read_message_history"""'}), "(label='read message history', value=\n 'read_message_history')\n", (78182, 78247), False, 'from discord_slash.utils import manage_components\n'), ((78351, 78448), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""send tts messages"""', 'value': '"""send_tts_messages"""'}), "(label='send tts messages', value=\n 'send_tts_messages')\n", (78389, 78448), False, 'from discord_slash.utils import manage_components\n'), ((79932, 80023), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""connect to the channel"""', 'value': '"""connect"""'}), "(label='connect to the channel',\n value='connect')\n", (79970, 80023), False, 'from discord_slash.utils import manage_components\n'), ((80128, 80216), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""speak in the channel"""', 'value': '"""speak"""'}), "(label='speak in the channel', value=\n 'speak')\n", (80166, 80216), False, 'from discord_slash.utils import manage_components\n'), ((80320, 80397), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""stream/camera"""', 'value': '"""stream"""'}), "(label='stream/camera', value='stream')\n", (80358, 80397), False, 'from discord_slash.utils import manage_components\n'), ((80506, 80609), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""use voice activation"""', 'value': '"""use_voice_activation"""'}), "(label='use voice activation', value=\n 'use_voice_activation')\n", (80544, 80609), False, 'from discord_slash.utils import manage_components\n'), ((80713, 80813), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""very important speaker"""', 'value': '"""priority_speaker"""'}), "(label='very important speaker',\n value='priority_speaker')\n", (80751, 80813), False, 'from discord_slash.utils import manage_components\n'), ((80918, 81005), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""mute members"""', 'value': '"""mute_members"""'}), "(label='mute members', value=\n 'mute_members')\n", (80956, 81005), False, 'from discord_slash.utils import manage_components\n'), ((81109, 81200), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""deafen members"""', 'value': '"""deafen_members"""'}), "(label='deafen members', value=\n 'deafen_members')\n", (81147, 81200), False, 'from discord_slash.utils import manage_components\n'), ((81304, 81410), 'discord_slash.utils.manage_components.create_select_option', 'manage_components.create_select_option', ([], {'label': '"""move members in another channel"""', 'value': '"""move_members"""'}), "(label=\n 'move members in another channel', value='move_members')\n", (81342, 81410), False, 'from discord_slash.utils import manage_components\n')]
|
import numpy as np
import tensorflow as tf
from collections import Counter
from utils.process_utils import calculate_iou, non_maximum_suppression
def evaluate(y_pred, y_true, num_classes, score_thresh=0.5, iou_thresh=0.5):
num_images = y_true[0].shape[0]
true_labels_dict = {i:0 for i in range(num_classes)} # {class: count}
pred_labels_dict = {i:0 for i in range(num_classes)}
true_positive_dict = {i:0 for i in range(num_classes)}
for i in range(num_images):
true_labels_list, true_boxes_list = [], []
for j in range(3): # three feature maps
true_probs_temp = y_true[j][i][...,5: ]
true_boxes_temp = y_true[j][i][...,0:4]
object_mask = true_probs_temp.sum(axis=-1) > 0
true_probs_temp = true_probs_temp[object_mask]
true_boxes_temp = true_boxes_temp[object_mask]
true_labels_list += np.argmax(true_probs_temp, axis=-1).tolist()
true_boxes_list += true_boxes_temp.tolist()
if len(true_labels_list) != 0:
for cls, count in Counter(true_labels_list).items(): true_labels_dict[cls] += count
pred_boxes = y_pred[0][i:i+1]
pred_confs = y_pred[1][i:i+1]
pred_probs = y_pred[2][i:i+1]
pred_boxes, pred_confs, pred_labels = non_maximum_suppression(pred_boxes, pred_confs, pred_probs)
true_boxes = np.array(true_boxes_list)
box_centers, box_sizes = true_boxes[:,0:2], true_boxes[:,2:4]
true_boxes[:,0:2] = box_centers - box_sizes / 2.
true_boxes[:,2:4] = true_boxes[:,0:2] + box_sizes
pred_labels_list = [] if pred_labels is None else pred_labels.tolist()
if pred_labels_list == []: continue
detected = []
for k in range(len(true_labels_list)):
# compute iou between predicted box and ground_truth boxes
iou = calculate_iou(true_boxes[k:k+1], pred_boxes)
m = np.argmax(iou) # Extract index of largest overlap
if iou[m] >= iou_thresh and true_labels_list[k] == pred_labels_list[m] and m not in detected:
pred_labels_dict[true_labels_list[k]] += 1
detected.append(m)
pred_labels_list = [pred_labels_list[m] for m in detected]
for c in range(num_classes):
t = true_labels_list.count(c)
p = pred_labels_list.count(c)
true_positive_dict[c] += p if t >= p else t
recall = sum(true_positive_dict.values()) / (sum(true_labels_dict.values()) + 1e-6)
precision = sum(true_positive_dict.values()) / (sum(pred_labels_dict.values()) + 1e-6)
avg_prec = [true_positive_dict[i] / (true_labels_dict[i] + 1e-6) for i in range(num_classes)]
mAP = sum(avg_prec) / (sum([avg_prec[i] != 0 for i in range(num_classes)]) + 1e-6)
return recall, precision, mAP
|
[
"utils.process_utils.non_maximum_suppression",
"numpy.argmax",
"numpy.array",
"utils.process_utils.calculate_iou",
"collections.Counter"
] |
[((1303, 1362), 'utils.process_utils.non_maximum_suppression', 'non_maximum_suppression', (['pred_boxes', 'pred_confs', 'pred_probs'], {}), '(pred_boxes, pred_confs, pred_probs)\n', (1326, 1362), False, 'from utils.process_utils import calculate_iou, non_maximum_suppression\n'), ((1385, 1410), 'numpy.array', 'np.array', (['true_boxes_list'], {}), '(true_boxes_list)\n', (1393, 1410), True, 'import numpy as np\n'), ((1880, 1926), 'utils.process_utils.calculate_iou', 'calculate_iou', (['true_boxes[k:k + 1]', 'pred_boxes'], {}), '(true_boxes[k:k + 1], pred_boxes)\n', (1893, 1926), False, 'from utils.process_utils import calculate_iou, non_maximum_suppression\n'), ((1941, 1955), 'numpy.argmax', 'np.argmax', (['iou'], {}), '(iou)\n', (1950, 1955), True, 'import numpy as np\n'), ((903, 938), 'numpy.argmax', 'np.argmax', (['true_probs_temp'], {'axis': '(-1)'}), '(true_probs_temp, axis=-1)\n', (912, 938), True, 'import numpy as np\n'), ((1075, 1100), 'collections.Counter', 'Counter', (['true_labels_list'], {}), '(true_labels_list)\n', (1082, 1100), False, 'from collections import Counter\n')]
|
"""
Created on 31 Jan 2019
@author: <NAME> (<EMAIL>)
https://www.u-blox.com/en/product/sam-m8q-module
example sentences:
PAM7...
$GPRMC,103228.00,A,5049.37823,N,00007.37872,W,0.104,,301216,,,D*64
$GPVTG,,T,,M,0.104,N,0.193,K,D*28
$GPGGA,103228.00,5049.37823,N,00007.37872,W,2,07,1.85,34.0,M,45.4,M,,0000*75
$GPGSA,A,3,23,17,03,09,01,22,19,,,,,,2.96,1.85,2.30*06
$GPGSV,4,1,13,01,15,142,36,02,12,312,21,03,46,084,33,06,46,301,*70
$GPGSV,4,2,13,09,49,206,46,12,01,319,,17,32,235,43,19,38,254,35*74
$GPGSV,4,3,13,22,31,090,29,23,74,115,35,25,03,355,,31,14,034,20*78
$GPGSV,4,4,13,33,30,200,42*4C
$GPGLL,5049.37823,N,00007.37872,W,103228.00,A,D*7F
SAM8...
$GNRMC,114733.00,A,5049.38206,N,00007.39011,W,0.109,,310119,,,D*73
$GNVTG,,T,,M,0.109,N,0.202,K,D*30
$GNGGA,114733.00,5049.38206,N,00007.39011,W,2,06,1.44,116.2,M,45.4,M,,0000*5C
$GNGSA,A,3,05,07,13,28,30,15,,,,,,,2.84,1.44,2.45*1D
$GNGSA,A,3,,,,,,,,,,,,,2.84,1.44,2.45*10
$GPGSV,3,1,12,05,51,194,47,07,18,060,27,08,08,035,22,13,71,296,32*75
$GPGSV,3,2,12,15,34,290,26,21,15,308,12,24,04,240,,27,03,007,*7D
$GPGSV,3,3,12,28,45,116,29,30,50,064,31,36,25,141,,49,32,173,46*77
$GLGSV,1,1,00*65
$GNGLL,5049.38206,N,00007.39011,W,114733.00,A,D*69
"""
import time
from scs_core.position.nmea.gpgga import GPGGA
from scs_core.position.nmea.gpgll import GPGLL
from scs_core.position.nmea.gpgsa import GPGSA
from scs_core.position.nmea.gpgsv import GPGSV
from scs_core.position.nmea.gprmc import GPRMC
from scs_core.position.nmea.gpvtg import GPVTG
from scs_core.position.nmea.nmea_report import NMEAReport
from scs_dfe.board.io import IO
from scs_host.sys.host_serial import HostSerial
# --------------------------------------------------------------------------------------------------------------------
class SAMM8Q(object):
"""
u-blox SAM M8Q GPS Antenna Module
"""
SOURCE = "SAM8Q"
START_MESSAGE_IDS = GPRMC.MESSAGE_IDS
__BAUD_RATE = 9600
__BOOT_DELAY = 0.500 # seconds
__SERIAL_LOCK_TIMEOUT = 3.0
__SERIAL_COMMS_TIMEOUT = 1.0
# ----------------------------------------------------------------------------------------------------------------
def __init__(self, uart):
self.__io = IO()
self.__serial = HostSerial(uart, self.__BAUD_RATE, False)
# ----------------------------------------------------------------------------------------------------------------
def power_on(self):
self.__io.gps_power = IO.LOW
time.sleep(self.__BOOT_DELAY)
def power_off(self):
self.__io.gps_power = IO.HIGH
# ----------------------------------------------------------------------------------------------------------------
def open(self):
self.__serial.open(self.__SERIAL_LOCK_TIMEOUT, self.__SERIAL_COMMS_TIMEOUT)
def close(self):
self.__serial.close()
# ----------------------------------------------------------------------------------------------------------------
def report(self, message_class):
for i in range(11):
try:
line = self.__serial.read_line("\r\n", self.__SERIAL_COMMS_TIMEOUT)
r = NMEAReport.construct(line)
if r.str(0) in message_class.MESSAGE_IDS:
return message_class.construct(r)
except (IndexError, UnicodeDecodeError, ValueError):
continue
return None
# noinspection PyListCreation
def report_all(self):
# reports...
reports = []
for i in range(20):
try:
r = NMEAReport.construct(self.__serial.read_line("\r\n", self.__SERIAL_COMMS_TIMEOUT))
reports.append(r)
except (UnicodeDecodeError, ValueError):
continue
# start...
start = None
for start in range(len(reports)):
if reports[start].str(0) in SAMM8Q.START_MESSAGE_IDS:
break
if start is None:
return []
# sentences...
sentences = []
# GPRMC...
sentences.append(GPRMC.construct(reports[start]))
# GPVTG...
sentences.append(GPVTG.construct(reports[start + 1]))
# GPGGA...
sentences.append(GPGGA.construct(reports[start + 2]))
# GPGSA...
sentences.append(GPGSA.construct(reports[start + 3]))
report = None # prevents post-loop warning
# GPGSVs...
for report in reports[start + 4:]:
if report.str(0) in GPGSV.MESSAGE_IDS:
break
sentences.append(GPGSV.construct(report))
# GPGLL...
for report in reports[start + 5:]:
if report.str(0) in GPGLL.MESSAGE_IDS:
break
sentences.append(GPGLL.construct(report))
return sentences
def line(self):
return self.__serial.read_line("\r\n", self.__SERIAL_COMMS_TIMEOUT)
# ----------------------------------------------------------------------------------------------------------------
def __str__(self, *args, **kwargs):
return "SAMM8Q:{io:%s, serial:%s}" % (self.__io, self.__serial)
|
[
"scs_core.position.nmea.nmea_report.NMEAReport.construct",
"scs_core.position.nmea.gprmc.GPRMC.construct",
"scs_dfe.board.io.IO",
"scs_core.position.nmea.gpgll.GPGLL.construct",
"time.sleep",
"scs_core.position.nmea.gpvtg.GPVTG.construct",
"scs_host.sys.host_serial.HostSerial",
"scs_core.position.nmea.gpgsa.GPGSA.construct",
"scs_core.position.nmea.gpgsv.GPGSV.construct",
"scs_core.position.nmea.gpgga.GPGGA.construct"
] |
[((2270, 2274), 'scs_dfe.board.io.IO', 'IO', ([], {}), '()\n', (2272, 2274), False, 'from scs_dfe.board.io import IO\n'), ((2299, 2340), 'scs_host.sys.host_serial.HostSerial', 'HostSerial', (['uart', 'self.__BAUD_RATE', '(False)'], {}), '(uart, self.__BAUD_RATE, False)\n', (2309, 2340), False, 'from scs_host.sys.host_serial import HostSerial\n'), ((2532, 2561), 'time.sleep', 'time.sleep', (['self.__BOOT_DELAY'], {}), '(self.__BOOT_DELAY)\n', (2542, 2561), False, 'import time\n'), ((4143, 4174), 'scs_core.position.nmea.gprmc.GPRMC.construct', 'GPRMC.construct', (['reports[start]'], {}), '(reports[start])\n', (4158, 4174), False, 'from scs_core.position.nmea.gprmc import GPRMC\n'), ((4221, 4256), 'scs_core.position.nmea.gpvtg.GPVTG.construct', 'GPVTG.construct', (['reports[start + 1]'], {}), '(reports[start + 1])\n', (4236, 4256), False, 'from scs_core.position.nmea.gpvtg import GPVTG\n'), ((4303, 4338), 'scs_core.position.nmea.gpgga.GPGGA.construct', 'GPGGA.construct', (['reports[start + 2]'], {}), '(reports[start + 2])\n', (4318, 4338), False, 'from scs_core.position.nmea.gpgga import GPGGA\n'), ((4385, 4420), 'scs_core.position.nmea.gpgsa.GPGSA.construct', 'GPGSA.construct', (['reports[start + 3]'], {}), '(reports[start + 3])\n', (4400, 4420), False, 'from scs_core.position.nmea.gpgsa import GPGSA\n'), ((4645, 4668), 'scs_core.position.nmea.gpgsv.GPGSV.construct', 'GPGSV.construct', (['report'], {}), '(report)\n', (4660, 4668), False, 'from scs_core.position.nmea.gpgsv import GPGSV\n'), ((4832, 4855), 'scs_core.position.nmea.gpgll.GPGLL.construct', 'GPGLL.construct', (['report'], {}), '(report)\n', (4847, 4855), False, 'from scs_core.position.nmea.gpgll import GPGLL\n'), ((3214, 3240), 'scs_core.position.nmea.nmea_report.NMEAReport.construct', 'NMEAReport.construct', (['line'], {}), '(line)\n', (3234, 3240), False, 'from scs_core.position.nmea.nmea_report import NMEAReport\n')]
|
# The main purpose of this cfi is test that the validation will insert
# missing parameters that are required by the ParameterSetDescription.
# It also tests many other things that cannot be tested in an
# autogenerated cfi file that are related to the ParameterSetDescription
# infrastructure.
import FWCore.ParameterSet.Config as cms
testProducerWithPsetDesc = cms.EDProducer('ProducerWithPSetDesc',
testingAutoGeneratedCfi = cms.untracked.bool(False),
p_int_opt_nd = cms.int32(11),
p_int_optuntracked_nd = cms.untracked.int32(12),
wildcardPset = cms.PSet(
a = cms.int32(1)
, b = cms.untracked.double(1.0)
),
switchPset = cms.PSet(
iswitch = cms.int32(2),
ivalue = cms.string('102'),
addTeVRefits = cms.bool(False)
),
xorPset = cms.PSet(
name = cms.uint32(11)
),
orPset = cms.PSet(
x2 = cms.uint32(11),
y1 = cms.string('11'),
y2 = cms.uint32(11)
),
andPset = cms.PSet(
x1 = cms.string('11'),
y2 = cms.uint32(11),
a2 = cms.uint32(11),
b3 = cms.uint32(11)
),
ifExistsPset = cms.PSet(
x1 = cms.uint32(11),
y1 = cms.uint32(11),
z2 = cms.string('11')
),
allowedLabelsPset = cms.PSet(
testAllowedLabels = cms.vstring('i1', 'i2', 'i3'),
i1 = cms.int32(1),
i2 = cms.int32(2),
i3 = cms.int32(3),
testAllowedLabelsUntracked = cms.untracked.vstring('u1', 'u2', 'u3'),
u1 = cms.untracked.uint32(1),
u3 = cms.untracked.uint32(3),
testOptAllowedLabels = cms.vstring('oi1', 'oi2', 'oi3'),
oi1 = cms.int32(1),
oi2 = cms.int32(2),
oi3 = cms.int32(3),
testOptAllowedLabelsUntracked = cms.untracked.vstring('ou1', 'ou2', 'ou3'),
ou1 = cms.untracked.uint32(1),
ou2 = cms.untracked.uint32(2)
)
#, bars = cms.VPSet(
# cms.PSet(
# ndouDrinks = cms.untracked.uint32(5)
# ),
# cms.PSet(
# )
#)
)
|
[
"FWCore.ParameterSet.Config.string",
"FWCore.ParameterSet.Config.untracked.int32",
"FWCore.ParameterSet.Config.untracked.vstring",
"FWCore.ParameterSet.Config.untracked.bool",
"FWCore.ParameterSet.Config.vstring",
"FWCore.ParameterSet.Config.int32",
"FWCore.ParameterSet.Config.uint32",
"FWCore.ParameterSet.Config.bool",
"FWCore.ParameterSet.Config.untracked.uint32",
"FWCore.ParameterSet.Config.untracked.double"
] |
[((434, 459), 'FWCore.ParameterSet.Config.untracked.bool', 'cms.untracked.bool', (['(False)'], {}), '(False)\n', (452, 459), True, 'import FWCore.ParameterSet.Config as cms\n'), ((478, 491), 'FWCore.ParameterSet.Config.int32', 'cms.int32', (['(11)'], {}), '(11)\n', (487, 491), True, 'import FWCore.ParameterSet.Config as cms\n'), ((519, 542), 'FWCore.ParameterSet.Config.untracked.int32', 'cms.untracked.int32', (['(12)'], {}), '(12)\n', (538, 542), True, 'import FWCore.ParameterSet.Config as cms\n'), ((579, 591), 'FWCore.ParameterSet.Config.int32', 'cms.int32', (['(1)'], {}), '(1)\n', (588, 591), True, 'import FWCore.ParameterSet.Config as cms\n'), ((602, 627), 'FWCore.ParameterSet.Config.untracked.double', 'cms.untracked.double', (['(1.0)'], {}), '(1.0)\n', (622, 627), True, 'import FWCore.ParameterSet.Config as cms\n'), ((672, 684), 'FWCore.ParameterSet.Config.int32', 'cms.int32', (['(2)'], {}), '(2)\n', (681, 684), True, 'import FWCore.ParameterSet.Config as cms\n'), ((699, 716), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""102"""'], {}), "('102')\n", (709, 716), True, 'import FWCore.ParameterSet.Config as cms\n'), ((737, 752), 'FWCore.ParameterSet.Config.bool', 'cms.bool', (['(False)'], {}), '(False)\n', (745, 752), True, 'import FWCore.ParameterSet.Config as cms\n'), ((791, 805), 'FWCore.ParameterSet.Config.uint32', 'cms.uint32', (['(11)'], {}), '(11)\n', (801, 805), True, 'import FWCore.ParameterSet.Config as cms\n'), ((841, 855), 'FWCore.ParameterSet.Config.uint32', 'cms.uint32', (['(11)'], {}), '(11)\n', (851, 855), True, 'import FWCore.ParameterSet.Config as cms\n'), ((866, 882), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""11"""'], {}), "('11')\n", (876, 882), True, 'import FWCore.ParameterSet.Config as cms\n'), ((893, 907), 'FWCore.ParameterSet.Config.uint32', 'cms.uint32', (['(11)'], {}), '(11)\n', (903, 907), True, 'import FWCore.ParameterSet.Config as cms\n'), ((944, 960), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""11"""'], {}), "('11')\n", (954, 960), True, 'import FWCore.ParameterSet.Config as cms\n'), ((971, 985), 'FWCore.ParameterSet.Config.uint32', 'cms.uint32', (['(11)'], {}), '(11)\n', (981, 985), True, 'import FWCore.ParameterSet.Config as cms\n'), ((996, 1010), 'FWCore.ParameterSet.Config.uint32', 'cms.uint32', (['(11)'], {}), '(11)\n', (1006, 1010), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1021, 1035), 'FWCore.ParameterSet.Config.uint32', 'cms.uint32', (['(11)'], {}), '(11)\n', (1031, 1035), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1077, 1091), 'FWCore.ParameterSet.Config.uint32', 'cms.uint32', (['(11)'], {}), '(11)\n', (1087, 1091), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1102, 1116), 'FWCore.ParameterSet.Config.uint32', 'cms.uint32', (['(11)'], {}), '(11)\n', (1112, 1116), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1127, 1143), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""11"""'], {}), "('11')\n", (1137, 1143), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1205, 1234), 'FWCore.ParameterSet.Config.vstring', 'cms.vstring', (['"""i1"""', '"""i2"""', '"""i3"""'], {}), "('i1', 'i2', 'i3')\n", (1216, 1234), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1245, 1257), 'FWCore.ParameterSet.Config.int32', 'cms.int32', (['(1)'], {}), '(1)\n', (1254, 1257), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1268, 1280), 'FWCore.ParameterSet.Config.int32', 'cms.int32', (['(2)'], {}), '(2)\n', (1277, 1280), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1291, 1303), 'FWCore.ParameterSet.Config.int32', 'cms.int32', (['(3)'], {}), '(3)\n', (1300, 1303), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1338, 1377), 'FWCore.ParameterSet.Config.untracked.vstring', 'cms.untracked.vstring', (['"""u1"""', '"""u2"""', '"""u3"""'], {}), "('u1', 'u2', 'u3')\n", (1359, 1377), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1388, 1411), 'FWCore.ParameterSet.Config.untracked.uint32', 'cms.untracked.uint32', (['(1)'], {}), '(1)\n', (1408, 1411), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1422, 1445), 'FWCore.ParameterSet.Config.untracked.uint32', 'cms.untracked.uint32', (['(3)'], {}), '(3)\n', (1442, 1445), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1474, 1506), 'FWCore.ParameterSet.Config.vstring', 'cms.vstring', (['"""oi1"""', '"""oi2"""', '"""oi3"""'], {}), "('oi1', 'oi2', 'oi3')\n", (1485, 1506), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1518, 1530), 'FWCore.ParameterSet.Config.int32', 'cms.int32', (['(1)'], {}), '(1)\n', (1527, 1530), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1542, 1554), 'FWCore.ParameterSet.Config.int32', 'cms.int32', (['(2)'], {}), '(2)\n', (1551, 1554), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1566, 1578), 'FWCore.ParameterSet.Config.int32', 'cms.int32', (['(3)'], {}), '(3)\n', (1575, 1578), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1616, 1658), 'FWCore.ParameterSet.Config.untracked.vstring', 'cms.untracked.vstring', (['"""ou1"""', '"""ou2"""', '"""ou3"""'], {}), "('ou1', 'ou2', 'ou3')\n", (1637, 1658), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1670, 1693), 'FWCore.ParameterSet.Config.untracked.uint32', 'cms.untracked.uint32', (['(1)'], {}), '(1)\n', (1690, 1693), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1705, 1728), 'FWCore.ParameterSet.Config.untracked.uint32', 'cms.untracked.uint32', (['(2)'], {}), '(2)\n', (1725, 1728), True, 'import FWCore.ParameterSet.Config as cms\n')]
|
"""empty message
Revision ID: 0d0c426e7b01
Revises: <KEY>
Create Date: 2017-10-29 20:37:10.615469
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '0d0c426e7b01'
down_revision = '<KEY>'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('access_key',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('access_key_id', sa.String(length=128), nullable=True),
sa.Column('secret_access_key', sa.String(length=128), nullable=True),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('access_key')
# ### end Alembic commands ###
|
[
"alembic.op.drop_table",
"sqlalchemy.PrimaryKeyConstraint",
"sqlalchemy.ForeignKeyConstraint",
"sqlalchemy.String",
"sqlalchemy.Integer"
] |
[((872, 899), 'alembic.op.drop_table', 'op.drop_table', (['"""access_key"""'], {}), "('access_key')\n", (885, 899), False, 'from alembic import op\n'), ((655, 704), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['user_id']", "['user.id']"], {}), "(['user_id'], ['user.id'])\n", (678, 704), True, 'import sqlalchemy as sa\n'), ((712, 741), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (735, 741), True, 'import sqlalchemy as sa\n'), ((420, 432), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (430, 432), True, 'import sqlalchemy as sa\n'), ((482, 503), 'sqlalchemy.String', 'sa.String', ([], {'length': '(128)'}), '(length=128)\n', (491, 503), True, 'import sqlalchemy as sa\n'), ((556, 577), 'sqlalchemy.String', 'sa.String', ([], {'length': '(128)'}), '(length=128)\n', (565, 577), True, 'import sqlalchemy as sa\n'), ((620, 632), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (630, 632), True, 'import sqlalchemy as sa\n')]
|