code stringlengths 2k 1.04M | repo_path stringlengths 5 517 | parsed_code stringlengths 0 1.04M | quality_prob float64 0.02 0.95 | learning_prob float64 0.02 0.93 |
|---|---|---|---|---|
import os
from django import forms
from django.test import TestCase
from django.core.files.uploadedfile import SimpleUploadedFile
from cassava.fields import CsvFormsetField
class ShoppingListItemForm(forms.Form):
item_code = forms.CharField(max_length=20)
description = forms.CharField(max_length=255)
quantity = forms.IntegerField()
class ShoppingListForm(forms.Form):
title = forms.CharField(max_length=255)
shopping_list_csv = CsvFormsetField(form_class=ShoppingListItemForm)
class CsvFormsetFieldTests(TestCase):
"""
Test CsvFormsetField by mocking a ShoppingListCsv Form
"""
def get_csv_file_data(self, filename):
"""
Get csv file data from a filename relative this directory
"""
current_dir = os.path.dirname(__file__)
file_path = os.path.join(current_dir, filename)
csv_file = open(file_path, 'rb')
return SimpleUploadedFile(csv_file.name, csv_file.read())
def test_valid_formset_csv(self):
"""
Test that a valid csv upload does not raise any errors
"""
post_data = {
'title': u'My Shopping List',
}
file_data = {
'shopping_list_csv': self.get_csv_file_data('valid_formset_csv.csv')
}
form = ShoppingListForm(post_data, file_data)
self.assertTrue(form.is_valid())
def test_invalid_formset_csv_wrong_data_type(self):
"""
Test that an invalid csv upload with a bad data type is invalid
"""
post_data = {
'title': u'My Shopping List',
}
file_data = {
'shopping_list_csv': self.get_csv_file_data('invalid_formset_csv_wrong_data_type.csv')
}
form = ShoppingListForm(post_data, file_data)
self.assertFalse(form.is_valid())
self.assertIn(u"Error in row 3", form.errors['shopping_list_csv'])
self.assertIn(u"quantity", form.fields['shopping_list_csv'].forms[1].errors)
self.assertIn(u'Enter a whole number.', form.fields['shopping_list_csv'].forms[1].errors['quantity']) | cassava/tests/test_forms.py | import os
from django import forms
from django.test import TestCase
from django.core.files.uploadedfile import SimpleUploadedFile
from cassava.fields import CsvFormsetField
class ShoppingListItemForm(forms.Form):
item_code = forms.CharField(max_length=20)
description = forms.CharField(max_length=255)
quantity = forms.IntegerField()
class ShoppingListForm(forms.Form):
title = forms.CharField(max_length=255)
shopping_list_csv = CsvFormsetField(form_class=ShoppingListItemForm)
class CsvFormsetFieldTests(TestCase):
"""
Test CsvFormsetField by mocking a ShoppingListCsv Form
"""
def get_csv_file_data(self, filename):
"""
Get csv file data from a filename relative this directory
"""
current_dir = os.path.dirname(__file__)
file_path = os.path.join(current_dir, filename)
csv_file = open(file_path, 'rb')
return SimpleUploadedFile(csv_file.name, csv_file.read())
def test_valid_formset_csv(self):
"""
Test that a valid csv upload does not raise any errors
"""
post_data = {
'title': u'My Shopping List',
}
file_data = {
'shopping_list_csv': self.get_csv_file_data('valid_formset_csv.csv')
}
form = ShoppingListForm(post_data, file_data)
self.assertTrue(form.is_valid())
def test_invalid_formset_csv_wrong_data_type(self):
"""
Test that an invalid csv upload with a bad data type is invalid
"""
post_data = {
'title': u'My Shopping List',
}
file_data = {
'shopping_list_csv': self.get_csv_file_data('invalid_formset_csv_wrong_data_type.csv')
}
form = ShoppingListForm(post_data, file_data)
self.assertFalse(form.is_valid())
self.assertIn(u"Error in row 3", form.errors['shopping_list_csv'])
self.assertIn(u"quantity", form.fields['shopping_list_csv'].forms[1].errors)
self.assertIn(u'Enter a whole number.', form.fields['shopping_list_csv'].forms[1].errors['quantity']) | 0.343892 | 0.161254 |
import argparse
from multiprocessing import Pool
import requests
from bs4 import BeautifulSoup
import urllib.parse as urlparse
import os, sys, re
import socket
import validators
import time, random
def load_file(file):
''' read file line by line and output a list'''
if os.path.isfile(file):
with open(file) as f:
lines = f.read().splitlines()
return lines
else:
print(f"\033[0;31mERROR: file not exist [{file}]\033[0m")
sys.exit(1)
def check_url_format(url):
''' valide or reformat URL format. URL must start with http(s)'''
if url.find('http') != 0:
url = 'http://' + url
if validators.url(url) is True:
return url
else:
return False
def scrape_urls(site, blacklist, max_depth = 1, cur_depth=0, urls=[],emails=[]):
''' recursive function to grep url from url'''
pid = os.getpid()
url = urlparse.urlparse(site)
status_code = None
base_url = url.scheme + '://' + url.netloc
if url.path != '':
base_url = base_url + os.path.dirname(url.path) + '/' # do // sometime
headers = {'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36'}
try:
r = requests.get(site, headers=headers)
status_code = r.status_code
except:
print(f" WARNING: [{pid}] request fail {site}")
return {'urls': urls, 'emails': emails} # maybe ...
print(f" INFO: [{pid}] HTTP status code [{status_code}]")
s = BeautifulSoup(r.text,"html.parser")
mails = scrap_email(r.text)
for mail in mails:
if mail not in emails:
emails.append(mail)
nb_emails = len(emails)
print(f" Info: pid[{pid}] depth[{cur_depth}] emails[{nb_emails}] {site}")
if cur_depth >= max_depth: # exit: to mutch iterration
print(f" INFO: pid[{pid}] max depth {cur_depth} {max_depth}")
return {'urls': urls, 'emails': emails}
for a in s.find_all("a", href=True):
site = format_url(a['href'],base_url)
if site is not False:
if site not in urls and check_extension(site, blacklist):
urls.append(site)
time.sleep(random.randint(1,4)/5) # no dos
scrape_urls(site, blacklist, max_depth, cur_depth+1, urls, emails)
return {'urls': urls, 'emails': emails}
def format_url(url_tmp,url_valide):
''' create Url and check if in domain. need http predix for url_valide'''
url_temp_raw = url_tmp
url_valide_raw = url_valide
url_tmp = urlparse.urlparse(url_tmp)
url_valide = urlparse.urlparse(url_valide)
if url_tmp.netloc == '' or url_tmp.netloc == url_valide.netloc:
if url_tmp.path != '' and url_tmp.path.find('(') == -1 and url_tmp.scheme != 'mailto':
url_join = urlparse.urljoin(url_valide_raw, url_temp_raw)
return url_join
return False
def check_redirection(url, max_redirection=5):
''' check if url is redirect and return value'''
count = 0
while count < max_redirection:
count = count + 1
try:
req = requests.head(url, timeout=(2, 5), allow_redirects=False)
except:
print("\033[0;31mWARNING: check_redirection error (SSL/Timeout ...)\033[0m")
return False
if 'location' in req.headers:
url = req.headers['location']
if count == max_redirection:
print("\033[0;31mWARNING: To mutch redirection\033[0m")
return False
else:
break
return url
def valid_domain(url):
''' ns lookup to resolv domain to IP'''
url = urlparse.urlparse(url)
domain = url.netloc
try:
s = socket.getaddrinfo(domain,0,2,0,0)
return True
except:
print("\033[0;31mWARNING: domain resolution fail\033[0m")
return False
def scrap_email(txt):
''' scrap mail on txt'''
out = re.findall(r"\b[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.(?!png|jpg|gif)[A-Z|a-z]{2,}\b", txt, re.I)
return out
def write_to(file,values):
''' Write list to file line by line'''
if isinstance(values, list):
try:
f = open(file, "w")
for value in values:
f.write(f"{value}\n")
f.close()
return True
except:
print("\033[0;31mWARNING: Fail to write file\033[0m")
return False
else:
print('\033[0;31mWARNING: Need a list, wrong type\033[0m')
return False
def check_extension(url,blacklist=[]):
''' check if extension is in blacklist. need http prefix'''
path = urlparse.urlparse(url).path
if os.path.splitext(path)[1]:
if os.path.splitext(path)[1] in blacklist:
return False
else:
return True
else:
return True # no extension
def scrap(datas):
''' scrap url '''
pid = os.getpid()
url = datas['url']
folder = datas['out']
blacklist = datas['blacklist']
max_depth = datas['max_depth']
print(f"\033[0;32mINFO [{pid}] Start {url}\033[0m")
check_url = check_url_format(url)
if check_url is False:
print(f"\033[0;31mWARNING: [{pid}] invalid URL [{url}]\033[0m")
else:
if valid_domain(check_url):
rurl = check_redirection(check_url)
if rurl is not False:
if check_url not in rurl:
print(f"\033[0;32mINFO [{pid}] reddirection {check_url} > {rurl}\033[0m")
else:
print(f"\033[0;32mINFO [{pid}] Scrap {rurl}\033[0m")
file = urlparse.urlparse(rurl).hostname + '.txt'
path = os.path.join(folder,file)
if os.path.isfile(path) is False:
#scrap Url
result = scrape_urls(rurl,blacklist,max_depth,0,[],[])
mails = result['emails']
# write emails in file
write_to(path,mails)
else:
print(f"\033[0;32mINFO [{pid}] File already exist {path}")
else:
print(f"\033[0;31mWARNING: [{pid}] request error {check_url}\033[0m")
else:
print(f"\033[0;31mWARNING: [{pid}] name resolution error {check_url}\033[0m")
print(f'\033[0;32mINFO: [{pid}] END {check_url}\033[0m')
def main():
""" main code """
threads = 4
file = 'scrabe.txt'
folder = 'out'
blacklist = ['.pdf','.xls','.xlsx','.pptx','.doc','.docx','.docm','.jpg','.jpeg','.png','.gif','.tiff']
max_depth = 1
description = 'Scrap email from URLs'
parser = argparse.ArgumentParser(description=description)
parser.add_argument('-t','--threads', type=int, default=threads, help='number of default concurent threads')
parser.add_argument('-f','--file', default=file, help='file with a URL line by line. Best to prefix URL with http/https (default scrabe.txt)')
parser.add_argument('-o','--out', default=folder, help='folder to save output (default /out')
parser.add_argument('-m','--max', default=max_depth, help='set recurisve depth (default 1')
args = parser.parse_args()
threads = args.threads
file = args.file
folder = args.out
max_depth = int(args.max)
urls = load_file(file)
print(f"\033[0;32mINFO: Load {len(urls)} from {file}\033[0m")
print(f"\033[0;32mINFO: Extension blacklist: {blacklist}\033[0m")
if not os.path.exists(folder):
os.mkdir(folder)
# deduplicate
urls = list(set(urls))
jobs = []
for url in urls:
jobs.append({'out':folder, 'url':url, 'blacklist': blacklist, 'max_depth': max_depth})
p = Pool(threads)
p.map(scrap,jobs)
p.close()
p.join()
# main
if __name__ == '__main__':
main() | scrabe.py | import argparse
from multiprocessing import Pool
import requests
from bs4 import BeautifulSoup
import urllib.parse as urlparse
import os, sys, re
import socket
import validators
import time, random
def load_file(file):
''' read file line by line and output a list'''
if os.path.isfile(file):
with open(file) as f:
lines = f.read().splitlines()
return lines
else:
print(f"\033[0;31mERROR: file not exist [{file}]\033[0m")
sys.exit(1)
def check_url_format(url):
''' valide or reformat URL format. URL must start with http(s)'''
if url.find('http') != 0:
url = 'http://' + url
if validators.url(url) is True:
return url
else:
return False
def scrape_urls(site, blacklist, max_depth = 1, cur_depth=0, urls=[],emails=[]):
''' recursive function to grep url from url'''
pid = os.getpid()
url = urlparse.urlparse(site)
status_code = None
base_url = url.scheme + '://' + url.netloc
if url.path != '':
base_url = base_url + os.path.dirname(url.path) + '/' # do // sometime
headers = {'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36'}
try:
r = requests.get(site, headers=headers)
status_code = r.status_code
except:
print(f" WARNING: [{pid}] request fail {site}")
return {'urls': urls, 'emails': emails} # maybe ...
print(f" INFO: [{pid}] HTTP status code [{status_code}]")
s = BeautifulSoup(r.text,"html.parser")
mails = scrap_email(r.text)
for mail in mails:
if mail not in emails:
emails.append(mail)
nb_emails = len(emails)
print(f" Info: pid[{pid}] depth[{cur_depth}] emails[{nb_emails}] {site}")
if cur_depth >= max_depth: # exit: to mutch iterration
print(f" INFO: pid[{pid}] max depth {cur_depth} {max_depth}")
return {'urls': urls, 'emails': emails}
for a in s.find_all("a", href=True):
site = format_url(a['href'],base_url)
if site is not False:
if site not in urls and check_extension(site, blacklist):
urls.append(site)
time.sleep(random.randint(1,4)/5) # no dos
scrape_urls(site, blacklist, max_depth, cur_depth+1, urls, emails)
return {'urls': urls, 'emails': emails}
def format_url(url_tmp,url_valide):
''' create Url and check if in domain. need http predix for url_valide'''
url_temp_raw = url_tmp
url_valide_raw = url_valide
url_tmp = urlparse.urlparse(url_tmp)
url_valide = urlparse.urlparse(url_valide)
if url_tmp.netloc == '' or url_tmp.netloc == url_valide.netloc:
if url_tmp.path != '' and url_tmp.path.find('(') == -1 and url_tmp.scheme != 'mailto':
url_join = urlparse.urljoin(url_valide_raw, url_temp_raw)
return url_join
return False
def check_redirection(url, max_redirection=5):
''' check if url is redirect and return value'''
count = 0
while count < max_redirection:
count = count + 1
try:
req = requests.head(url, timeout=(2, 5), allow_redirects=False)
except:
print("\033[0;31mWARNING: check_redirection error (SSL/Timeout ...)\033[0m")
return False
if 'location' in req.headers:
url = req.headers['location']
if count == max_redirection:
print("\033[0;31mWARNING: To mutch redirection\033[0m")
return False
else:
break
return url
def valid_domain(url):
''' ns lookup to resolv domain to IP'''
url = urlparse.urlparse(url)
domain = url.netloc
try:
s = socket.getaddrinfo(domain,0,2,0,0)
return True
except:
print("\033[0;31mWARNING: domain resolution fail\033[0m")
return False
def scrap_email(txt):
''' scrap mail on txt'''
out = re.findall(r"\b[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.(?!png|jpg|gif)[A-Z|a-z]{2,}\b", txt, re.I)
return out
def write_to(file,values):
''' Write list to file line by line'''
if isinstance(values, list):
try:
f = open(file, "w")
for value in values:
f.write(f"{value}\n")
f.close()
return True
except:
print("\033[0;31mWARNING: Fail to write file\033[0m")
return False
else:
print('\033[0;31mWARNING: Need a list, wrong type\033[0m')
return False
def check_extension(url,blacklist=[]):
''' check if extension is in blacklist. need http prefix'''
path = urlparse.urlparse(url).path
if os.path.splitext(path)[1]:
if os.path.splitext(path)[1] in blacklist:
return False
else:
return True
else:
return True # no extension
def scrap(datas):
''' scrap url '''
pid = os.getpid()
url = datas['url']
folder = datas['out']
blacklist = datas['blacklist']
max_depth = datas['max_depth']
print(f"\033[0;32mINFO [{pid}] Start {url}\033[0m")
check_url = check_url_format(url)
if check_url is False:
print(f"\033[0;31mWARNING: [{pid}] invalid URL [{url}]\033[0m")
else:
if valid_domain(check_url):
rurl = check_redirection(check_url)
if rurl is not False:
if check_url not in rurl:
print(f"\033[0;32mINFO [{pid}] reddirection {check_url} > {rurl}\033[0m")
else:
print(f"\033[0;32mINFO [{pid}] Scrap {rurl}\033[0m")
file = urlparse.urlparse(rurl).hostname + '.txt'
path = os.path.join(folder,file)
if os.path.isfile(path) is False:
#scrap Url
result = scrape_urls(rurl,blacklist,max_depth,0,[],[])
mails = result['emails']
# write emails in file
write_to(path,mails)
else:
print(f"\033[0;32mINFO [{pid}] File already exist {path}")
else:
print(f"\033[0;31mWARNING: [{pid}] request error {check_url}\033[0m")
else:
print(f"\033[0;31mWARNING: [{pid}] name resolution error {check_url}\033[0m")
print(f'\033[0;32mINFO: [{pid}] END {check_url}\033[0m')
def main():
""" main code """
threads = 4
file = 'scrabe.txt'
folder = 'out'
blacklist = ['.pdf','.xls','.xlsx','.pptx','.doc','.docx','.docm','.jpg','.jpeg','.png','.gif','.tiff']
max_depth = 1
description = 'Scrap email from URLs'
parser = argparse.ArgumentParser(description=description)
parser.add_argument('-t','--threads', type=int, default=threads, help='number of default concurent threads')
parser.add_argument('-f','--file', default=file, help='file with a URL line by line. Best to prefix URL with http/https (default scrabe.txt)')
parser.add_argument('-o','--out', default=folder, help='folder to save output (default /out')
parser.add_argument('-m','--max', default=max_depth, help='set recurisve depth (default 1')
args = parser.parse_args()
threads = args.threads
file = args.file
folder = args.out
max_depth = int(args.max)
urls = load_file(file)
print(f"\033[0;32mINFO: Load {len(urls)} from {file}\033[0m")
print(f"\033[0;32mINFO: Extension blacklist: {blacklist}\033[0m")
if not os.path.exists(folder):
os.mkdir(folder)
# deduplicate
urls = list(set(urls))
jobs = []
for url in urls:
jobs.append({'out':folder, 'url':url, 'blacklist': blacklist, 'max_depth': max_depth})
p = Pool(threads)
p.map(scrap,jobs)
p.close()
p.join()
# main
if __name__ == '__main__':
main() | 0.140602 | 0.075517 |
from datetime import datetime, timedelta
import dateutil.parser
import json
from json import JSONDecoder
import re
from airflow import DAG
from airflow.operators.dummy_operator import DummyOperator
from airflow.operators.bash_operator import BashOperator
from airflow.operators.sensors import ExternalTaskSensor
def datetime_praser(json_dict):
for (key, value) in json_dict.items():
if isinstance(value, str) and re.search("d1", value):
json_dict[key] = datetime.strptime(value[3:], "%Y-%m-%dT%H:%M:%S")
elif isinstance(value, str) and re.search("d2", value):
json_dict[key] = timedelta(seconds=int(value[3:]))
else:
json_dict[key] = value
return json_dict
def read_dag_definition(filepath):
"""Reads in a DAG definition. DAGs are defined in JSON
raises a FileNotFoundError if the filepath doesn't exist
raises a InvalidConfigError if the dag definition is invalid
:filepath: filepath to dag definition
:returns: dict
"""
with open(filepath) as f:
content = f.read()
if not content:
return {}
else:
try:
return json.loads(content, object_hook=datetime_praser)
except json.decoder.JSONDecodeError as e:
raise InvalidConfigError("The json format of '{filepath}' is invalid.")
class InvalidConfigError(Exception):
pass
def dag_builder(conf):
"""Return a DAG given a configuration"""
dag = DAG(dag_id=conf['dag_id'], schedule_interval=conf['schedule_interval'], start_date=conf['start_date'], catchup=conf['catchup'], default_args=conf['default_args'])
task_conf = conf.get('tasks', [])
dep_conf = conf.get('dependencies', [])
tasks = {}
if task_conf:
tasks = attach_tasks(dag, task_conf)
if dep_conf:
build_flow(dep_conf, tasks)
return dag
def attach_tasks(dag, task_conf):
def build_task(task_def):
operator = operator_factory(task_def['operator_type'])
return operator(dag=dag, task_id=task_def['task_id'], **task_def['parameters'])
task_dict = {}
for task in task_conf:
task_dict[task.get("task_id")] = build_task(task)
return task_dict
def build_flow(dep_conf, task_dict):
for source_key in dep_conf:
dest_task = task_dict.get(source_key)
source_task_keys = dep_conf.get(source_key)
for key in source_task_keys:
source_task = task_dict.get(key)
if (source_task != None and dest_task != None):
source_task >> dest_task
def operator_factory(name):
operators = {
'dummy': DummyOperator,
'bash': BashOperator,
'external_sensor': ExternalTaskSensor,
}
return operators[name] | dags/utils/functions.py | from datetime import datetime, timedelta
import dateutil.parser
import json
from json import JSONDecoder
import re
from airflow import DAG
from airflow.operators.dummy_operator import DummyOperator
from airflow.operators.bash_operator import BashOperator
from airflow.operators.sensors import ExternalTaskSensor
def datetime_praser(json_dict):
for (key, value) in json_dict.items():
if isinstance(value, str) and re.search("d1", value):
json_dict[key] = datetime.strptime(value[3:], "%Y-%m-%dT%H:%M:%S")
elif isinstance(value, str) and re.search("d2", value):
json_dict[key] = timedelta(seconds=int(value[3:]))
else:
json_dict[key] = value
return json_dict
def read_dag_definition(filepath):
"""Reads in a DAG definition. DAGs are defined in JSON
raises a FileNotFoundError if the filepath doesn't exist
raises a InvalidConfigError if the dag definition is invalid
:filepath: filepath to dag definition
:returns: dict
"""
with open(filepath) as f:
content = f.read()
if not content:
return {}
else:
try:
return json.loads(content, object_hook=datetime_praser)
except json.decoder.JSONDecodeError as e:
raise InvalidConfigError("The json format of '{filepath}' is invalid.")
class InvalidConfigError(Exception):
pass
def dag_builder(conf):
"""Return a DAG given a configuration"""
dag = DAG(dag_id=conf['dag_id'], schedule_interval=conf['schedule_interval'], start_date=conf['start_date'], catchup=conf['catchup'], default_args=conf['default_args'])
task_conf = conf.get('tasks', [])
dep_conf = conf.get('dependencies', [])
tasks = {}
if task_conf:
tasks = attach_tasks(dag, task_conf)
if dep_conf:
build_flow(dep_conf, tasks)
return dag
def attach_tasks(dag, task_conf):
def build_task(task_def):
operator = operator_factory(task_def['operator_type'])
return operator(dag=dag, task_id=task_def['task_id'], **task_def['parameters'])
task_dict = {}
for task in task_conf:
task_dict[task.get("task_id")] = build_task(task)
return task_dict
def build_flow(dep_conf, task_dict):
for source_key in dep_conf:
dest_task = task_dict.get(source_key)
source_task_keys = dep_conf.get(source_key)
for key in source_task_keys:
source_task = task_dict.get(key)
if (source_task != None and dest_task != None):
source_task >> dest_task
def operator_factory(name):
operators = {
'dummy': DummyOperator,
'bash': BashOperator,
'external_sensor': ExternalTaskSensor,
}
return operators[name] | 0.52829 | 0.186095 |
import enum
from PyQt5.QtCore import *
from PyQt5.QtWidgets import *
from PyQt5.QtGui import QTextDocument, QTextDocumentWriter
from PyQt5.QtSql import QSqlDatabase, QSqlQuery
from .recordmodel import RecordModel
class Record():
def __init__(self, recordId, fileName, isSaved):
self.recordId = recordId
self.name = ""
self.surname = ""
self.fileName = fileName
self.isSaved = isSaved
class RecordManager(QObject):
currentRecordChanged = pyqtSignal()
class ErrorCodes(enum.IntEnum):
NoError = 0
RecordExists = enum.auto()
RecordNotExist = enum.auto()
NoCurrentRecord = enum.auto()
FileOpenFailed = enum.auto()
FileSaveFailed = enum.auto()
def __init__(self, textDocument: QTextDocument, parent):
super().__init__(parent)
self.textDocument = textDocument
self.currentRecord = None
self.recordModel = RecordModel(self)
# FILE DIRECTORY PATH
writeDir = QDir(QStandardPaths.writableLocation(QStandardPaths.AppDataLocation))
if not writeDir.mkpath('.'):
qFatal(f"Failed to create writable directory at {writeDir.absolutePath()}")
self.absolutePath = writeDir.absolutePath()
self.setupDatabase()
def setupDatabase(self):
database = QSqlDatabase.database()
if not database.isValid():
database = QSqlDatabase.addDatabase("QSQLITE")
if not database.isValid():
qFatal(f"Cannot add database {database.lastError().text()}")
databasePath = self.absolutePath + "/db.sqlite3"
database.setDatabaseName(databasePath)
if not database.open():
qFatal(f"Cannot open database {database.lastError().text()}")
if not database.tables().count("Records"):
query = QSqlQuery()
if not query.exec(
"""
CREATE TABLE IF NOT EXISTS 'Records' (
'record_id' TEXT PRIMARY KEY,
'name' TEXT NOT NULL,
'surname' TEXT NOT NULL,
'filename' TEXT,
'created' DATETIME DEFAULT (datetime(CURRENT_TIMESTAMP, 'localtime')),
'last_modified' DATETIME DEFAULT (datetime(CURRENT_TIMESTAMP, 'localtime'))
)
"""):
qFatal(f"Failed to query database {database.lastError().text()}")
def setCurrentRecord(self, record):
if self.currentRecord == record:
return
self.currentRecord = record
self.currentRecordChanged.emit()
def create(self, recordId, name, surname):
if self.findRecord(recordId):
return RecordManager.ErrorCodes.RecordExists
fileName = f'{recordId}.htm'
record = Record(recordId, fileName, isSaved=False)
record.name = name
record.surname = surname
# UPDATE DOCUMENT
self.textDocument.setHtml(f"document for {recordId} {name} {surname}")
self.textDocument.setModified(False)
self.setCurrentRecord(record)
return RecordManager.ErrorCodes.NoError
def open(self, recordId):
record = self.findRecord(recordId)
if not record:
return RecordManager.ErrorCodes.RecordNotExist
# FILE READ
file = QFile(f'{self.absolutePath}/{record.fileName}')
if not file.open(QFile.ReadOnly):
qCritical(f"File open failed: {file.errorString()}")
return RecordManager.ErrorCodes.FileOpenFailed
data = file.readAll()
codec = QTextCodec.codecForHtml(data)
text = codec.toUnicode(data)
# UPDATE DOCUMENT
self.textDocument.setHtml(text)
self.textDocument.setModified(False)
self.setCurrentRecord(record)
return RecordManager.ErrorCodes.NoError
def save(self):
record = self.currentRecord
if not record:
return RecordManager.ErrorCodes.NoCurrentRecord
# FILE WRITE
writer = QTextDocumentWriter(f'{self.absolutePath}/{record.fileName}')
success = writer.write(self.textDocument)
if not success:
qCritical("File write failed")
return RecordManager.ErrorCodes.FileSaveFailed
# DATABASE WRITE
query = QSqlQuery()
if record.isSaved:
q = f'UPDATE Records SET \
last_modified = (datetime(CURRENT_TIMESTAMP, "localtime")) \
WHERE record_id="{record.recordId}"'
query.prepare(q)
else:
q = f'INSERT INTO Records (record_id, name, surname, filename) \
VALUES ("{record.recordId}", :name, :surname, "{record.fileName}")'
query.prepare(q)
query.bindValue(":name", record.name)
query.bindValue(":surname", record.surname)
success = query.exec()
if not success:
qCritical(f"Cannot save record to database: {query.lastError().text()}")
return RecordManager.ErrorCodes.FileSaveFailed
# UPDATE DOCUMENT
self.textDocument.setModified(False)
return RecordManager.ErrorCodes.NoError
def findRecord(self, recordId):
query = QSqlQuery(f'SELECT record_id, name, surname, filename FROM Records WHERE record_id="{recordId}"')
if query.next():
name = query.value(1)
surname = query.value(2)
fileName = query.value(3)
record = Record(recordId, fileName, isSaved=True)
record.name = name
record.surname = surname
return record
else:
return None | RecordDatabasePython/recorddatabase/recordmanager.py | import enum
from PyQt5.QtCore import *
from PyQt5.QtWidgets import *
from PyQt5.QtGui import QTextDocument, QTextDocumentWriter
from PyQt5.QtSql import QSqlDatabase, QSqlQuery
from .recordmodel import RecordModel
class Record():
def __init__(self, recordId, fileName, isSaved):
self.recordId = recordId
self.name = ""
self.surname = ""
self.fileName = fileName
self.isSaved = isSaved
class RecordManager(QObject):
currentRecordChanged = pyqtSignal()
class ErrorCodes(enum.IntEnum):
NoError = 0
RecordExists = enum.auto()
RecordNotExist = enum.auto()
NoCurrentRecord = enum.auto()
FileOpenFailed = enum.auto()
FileSaveFailed = enum.auto()
def __init__(self, textDocument: QTextDocument, parent):
super().__init__(parent)
self.textDocument = textDocument
self.currentRecord = None
self.recordModel = RecordModel(self)
# FILE DIRECTORY PATH
writeDir = QDir(QStandardPaths.writableLocation(QStandardPaths.AppDataLocation))
if not writeDir.mkpath('.'):
qFatal(f"Failed to create writable directory at {writeDir.absolutePath()}")
self.absolutePath = writeDir.absolutePath()
self.setupDatabase()
def setupDatabase(self):
database = QSqlDatabase.database()
if not database.isValid():
database = QSqlDatabase.addDatabase("QSQLITE")
if not database.isValid():
qFatal(f"Cannot add database {database.lastError().text()}")
databasePath = self.absolutePath + "/db.sqlite3"
database.setDatabaseName(databasePath)
if not database.open():
qFatal(f"Cannot open database {database.lastError().text()}")
if not database.tables().count("Records"):
query = QSqlQuery()
if not query.exec(
"""
CREATE TABLE IF NOT EXISTS 'Records' (
'record_id' TEXT PRIMARY KEY,
'name' TEXT NOT NULL,
'surname' TEXT NOT NULL,
'filename' TEXT,
'created' DATETIME DEFAULT (datetime(CURRENT_TIMESTAMP, 'localtime')),
'last_modified' DATETIME DEFAULT (datetime(CURRENT_TIMESTAMP, 'localtime'))
)
"""):
qFatal(f"Failed to query database {database.lastError().text()}")
def setCurrentRecord(self, record):
if self.currentRecord == record:
return
self.currentRecord = record
self.currentRecordChanged.emit()
def create(self, recordId, name, surname):
if self.findRecord(recordId):
return RecordManager.ErrorCodes.RecordExists
fileName = f'{recordId}.htm'
record = Record(recordId, fileName, isSaved=False)
record.name = name
record.surname = surname
# UPDATE DOCUMENT
self.textDocument.setHtml(f"document for {recordId} {name} {surname}")
self.textDocument.setModified(False)
self.setCurrentRecord(record)
return RecordManager.ErrorCodes.NoError
def open(self, recordId):
record = self.findRecord(recordId)
if not record:
return RecordManager.ErrorCodes.RecordNotExist
# FILE READ
file = QFile(f'{self.absolutePath}/{record.fileName}')
if not file.open(QFile.ReadOnly):
qCritical(f"File open failed: {file.errorString()}")
return RecordManager.ErrorCodes.FileOpenFailed
data = file.readAll()
codec = QTextCodec.codecForHtml(data)
text = codec.toUnicode(data)
# UPDATE DOCUMENT
self.textDocument.setHtml(text)
self.textDocument.setModified(False)
self.setCurrentRecord(record)
return RecordManager.ErrorCodes.NoError
def save(self):
record = self.currentRecord
if not record:
return RecordManager.ErrorCodes.NoCurrentRecord
# FILE WRITE
writer = QTextDocumentWriter(f'{self.absolutePath}/{record.fileName}')
success = writer.write(self.textDocument)
if not success:
qCritical("File write failed")
return RecordManager.ErrorCodes.FileSaveFailed
# DATABASE WRITE
query = QSqlQuery()
if record.isSaved:
q = f'UPDATE Records SET \
last_modified = (datetime(CURRENT_TIMESTAMP, "localtime")) \
WHERE record_id="{record.recordId}"'
query.prepare(q)
else:
q = f'INSERT INTO Records (record_id, name, surname, filename) \
VALUES ("{record.recordId}", :name, :surname, "{record.fileName}")'
query.prepare(q)
query.bindValue(":name", record.name)
query.bindValue(":surname", record.surname)
success = query.exec()
if not success:
qCritical(f"Cannot save record to database: {query.lastError().text()}")
return RecordManager.ErrorCodes.FileSaveFailed
# UPDATE DOCUMENT
self.textDocument.setModified(False)
return RecordManager.ErrorCodes.NoError
def findRecord(self, recordId):
query = QSqlQuery(f'SELECT record_id, name, surname, filename FROM Records WHERE record_id="{recordId}"')
if query.next():
name = query.value(1)
surname = query.value(2)
fileName = query.value(3)
record = Record(recordId, fileName, isSaved=True)
record.name = name
record.surname = surname
return record
else:
return None | 0.290176 | 0.075585 |
import torch.nn as nn
from torch.distributions import Categorical
from ActorCritic import ActorCritic
import torch
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
class PPO:
def __init__(self, state_dim, action_dim, n_agents, lr, betas, gamma, K_epochs, eps_clip):
self.lr = lr
self.betas = betas
self.gamma = gamma
self.eps_clip = eps_clip
self.K_epochs = K_epochs
self.policy = ActorCritic(state_dim, action_dim, n_agents).to(device)
self.optimizer = torch.optim.Adam(self.policy.parameters(), lr=lr, betas=betas)
self.policy_old = ActorCritic(state_dim, action_dim, n_agents).to(device)
self.policy_old.load_state_dict(self.policy.state_dict())
self.MseLoss = nn.MSELoss()
def update(self, memory):
# Monte Carlo estimate of state rewards:
rewards = []
discounted_reward = [0, 0]
for reward, is_terminal in zip(reversed(memory.rewards), reversed(memory.is_terminals)):
if all(is_terminal):
discounted_reward =[0, 0]
elif is_terminal[0]:
discounted_reward[0] = 0
elif is_terminal[1]:
discounted_reward[1] = 0
discounted_reward[0] = reward[0] + self.gamma * discounted_reward[0]
discounted_reward[1] = reward[1] + self.gamma * discounted_reward[1]
rewards.insert(0, discounted_reward)
# Normalizing the rewards:
rewards = torch.tensor(rewards).to(device)
rewards = (rewards - rewards.mean()) / (rewards.std() + 1e-5)
# convert list to tensor
old_states = torch.stack(memory.states).to(device).detach()
old_actions = torch.stack(memory.actions).to(device).detach()
old_logprobs = torch.stack(memory.logprobs).to(device).detach()
# Optimize policy for K epochs:
for _ in range(self.K_epochs):
# Evaluating old actions and values :
logprobs, state_values, dist_entropy = self.policy.evaluate(old_states, old_actions)
# Finding the ratio (pi_theta / pi_theta__old):
ratios = torch.exp(logprobs - old_logprobs.detach())
# Finding Surrogate Loss:
advantages = rewards - state_values.detach()
surr1 = ratios * advantages
surr2 = torch.clamp(ratios, 1 - self.eps_clip, 1 + self.eps_clip) * advantages
loss = -torch.min(surr1, surr2) + 0.5 * self.MseLoss(state_values, rewards) - 0.01 * dist_entropy
# take gradient step
self.optimizer.zero_grad()
loss.mean().backward()
self.optimizer.step()
# Copy new weights into old policy:
self.policy_old.load_state_dict(self.policy.state_dict())
return loss.mean() | PPO.py | import torch.nn as nn
from torch.distributions import Categorical
from ActorCritic import ActorCritic
import torch
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
class PPO:
def __init__(self, state_dim, action_dim, n_agents, lr, betas, gamma, K_epochs, eps_clip):
self.lr = lr
self.betas = betas
self.gamma = gamma
self.eps_clip = eps_clip
self.K_epochs = K_epochs
self.policy = ActorCritic(state_dim, action_dim, n_agents).to(device)
self.optimizer = torch.optim.Adam(self.policy.parameters(), lr=lr, betas=betas)
self.policy_old = ActorCritic(state_dim, action_dim, n_agents).to(device)
self.policy_old.load_state_dict(self.policy.state_dict())
self.MseLoss = nn.MSELoss()
def update(self, memory):
# Monte Carlo estimate of state rewards:
rewards = []
discounted_reward = [0, 0]
for reward, is_terminal in zip(reversed(memory.rewards), reversed(memory.is_terminals)):
if all(is_terminal):
discounted_reward =[0, 0]
elif is_terminal[0]:
discounted_reward[0] = 0
elif is_terminal[1]:
discounted_reward[1] = 0
discounted_reward[0] = reward[0] + self.gamma * discounted_reward[0]
discounted_reward[1] = reward[1] + self.gamma * discounted_reward[1]
rewards.insert(0, discounted_reward)
# Normalizing the rewards:
rewards = torch.tensor(rewards).to(device)
rewards = (rewards - rewards.mean()) / (rewards.std() + 1e-5)
# convert list to tensor
old_states = torch.stack(memory.states).to(device).detach()
old_actions = torch.stack(memory.actions).to(device).detach()
old_logprobs = torch.stack(memory.logprobs).to(device).detach()
# Optimize policy for K epochs:
for _ in range(self.K_epochs):
# Evaluating old actions and values :
logprobs, state_values, dist_entropy = self.policy.evaluate(old_states, old_actions)
# Finding the ratio (pi_theta / pi_theta__old):
ratios = torch.exp(logprobs - old_logprobs.detach())
# Finding Surrogate Loss:
advantages = rewards - state_values.detach()
surr1 = ratios * advantages
surr2 = torch.clamp(ratios, 1 - self.eps_clip, 1 + self.eps_clip) * advantages
loss = -torch.min(surr1, surr2) + 0.5 * self.MseLoss(state_values, rewards) - 0.01 * dist_entropy
# take gradient step
self.optimizer.zero_grad()
loss.mean().backward()
self.optimizer.step()
# Copy new weights into old policy:
self.policy_old.load_state_dict(self.policy.state_dict())
return loss.mean() | 0.905475 | 0.509825 |
import logging
import clipl.utility.logger as logger
log = logging.getLogger(__name__)
import hashlib
import ROOT
from math import sqrt, pow
import clipl.analysisbase as analysisbase
import clipl.utility.roottools as roottools
import clipl.analysis_modules.scaleerrors as scaleerrors
class Difference(analysisbase.AnalysisBase):
"""Add differences of histograms"""
def modify_argument_parser(self, parser, args):
super(Difference, self).modify_argument_parser(parser, args)
self.difference_options = parser.add_argument_group("{} options".format(self.name()))
self.difference_options.add_argument("--difference-minuend-nicks", nargs="+",
help="Nick names for the minuends of the difference. Multiple nicks (whitespace separated) will be summed up before calculating the difference.")
self.difference_options.add_argument("--difference-subtrahend-nicks", nargs="+",
help="Nick names for the subtrahend of the difference. Multiple nicks (whitespace separated) will be summed up before calculating the difference.")
self.difference_options.add_argument("--difference-result-nicks", nargs="+",
help="Nick names for the resulting difference graphs.")
def prepare_args(self, parser, plotData):
super(Difference, self).prepare_args(parser, plotData)
self.prepare_list_args(plotData, ["difference_minuend_nicks", "difference_subtrahend_nicks", "difference_result_nicks"])
self.auto_set_arguments(plotData, ["difference_minuend_nicks", "difference_subtrahend_nicks"], "difference_result_nicks", "difference")
for index, (difference_minuend_nick, difference_subtrahend_nick, difference_result_nick) in enumerate(zip(*[plotData.plotdict[k] for k in ["difference_minuend_nicks", "difference_subtrahend_nicks", "difference_result_nicks"]])):
plotData.plotdict["difference_minuend_nicks"][index] = difference_minuend_nick.split()
plotData.plotdict["difference_subtrahend_nicks"][index] = difference_subtrahend_nick.split()
if difference_result_nick is None:
plotData.plotdict["difference_result_nicks"][index] = "difference_{num}_over_{denom}".format(
num="_".join(plotData.plotdict["difference_minuend_nicks"][index]),
denom="_".join(plotData.plotdict["difference_subtrahend_nicks"][index]))
if not plotData.plotdict["difference_result_nicks"][index] in plotData.plotdict["nicks"]:
plotData.plotdict["nicks"].append(plotData.plotdict["difference_result_nicks"][index])
@staticmethod
def get_histograms(plotdict={}, difference_nicks=None, difference_minuend_nicks=None, difference_subtrahend_nicks=None, difference_result_nick=None):
histogram = None
for nick in difference_nicks:
root_object = plotdict[nick]
if histogram is None:
histogram = root_object.Clone(
"difference_" + hashlib.md5("_".join([str(difference_nicks), str(difference_subtrahend_nicks), difference_result_nick])).hexdigest()
)
else: histogram.Add(root_object)
if hasattr(histogram, "SetDirectory"): histogram.SetDirectory(0)
return histogram
def run(self, plotData=None):
super(Difference, self).run(plotData)
for difference_minuend_nicks, difference_subtrahend_nicks, difference_result_nick in zip(
*[plotData.plotdict[k] for k in ["difference_minuend_nicks", "difference_subtrahend_nicks", "difference_result_nicks"]]):
new_name = "histogram_" + hashlib.md5("_".join(map(str, [difference_minuend_nicks, difference_subtrahend_nicks, difference_result_nick]))).hexdigest()
# Create nick sum histograms
minuend_histogram = self.get_histograms(plotdict=plotData.plotdict["root_objects"], difference_nicks=difference_minuend_nicks, difference_minuend_nicks=difference_minuend_nicks, difference_subtrahend_nicks=difference_subtrahend_nicks, difference_result_nick=difference_result_nick)
subtrahend_histogram = self.get_histograms(plotdict=plotData.plotdict["root_objects"], difference_nicks=difference_subtrahend_nicks, difference_minuend_nicks=difference_minuend_nicks, difference_subtrahend_nicks=difference_subtrahend_nicks, difference_result_nick=difference_result_nick)
# preparations for differences with one function
if ((isinstance(minuend_histogram, ROOT.TGraph) and isinstance(subtrahend_histogram, ROOT.TF1)) or
(isinstance(minuend_histogram, ROOT.TF1) and isinstance(subtrahend_histogram, ROOT.TGraph))):
graph = minuend_histogram if isinstance(minuend_histogram, ROOT.TGraph) else subtrahend_histogram
function = subtrahend_histogram if isinstance(minuend_histogram, ROOT.TGraph) else minuend_histogram
function_graph = graph.Clone("function_graph_"+function.GetName())
scaleerrors.ScaleErrors.scale_errors(function_graph, scale_factor=0.0)
x_values = function_graph.GetX()
x_values = [x_values[index] for index in xrange(function_graph.GetN())]
if not isinstance(function_graph, ROOT.TGraph2D):
y_values = [function.Eval(x_value) for x_value in x_values]
for index, (x_value, y_value) in enumerate(zip(x_values, y_values)):
function_graph.SetPoint(index, x_value, y_value)
else:
y_values = function_graph.GetY()
y_values = [y_values[index] for index in xrange(function_graph.GetN())]
z_values = [function.Eval(x_value, y_value) for x_value, y_value in zip(x_values, y_values)]
for index, (x_value, y_value, z_value) in enumerate(zip(x_values, y_values, z_values)):
function_graph.SetPoint(index, x_value, y_value, z_value)
if isinstance(minuend_histogram, ROOT.TGraph):
subtrahend_histogram = function_graph
else:
minuend_histogram = function_graph
# Calculate difference
if isinstance(minuend_histogram, ROOT.TGraph) and isinstance(subtrahend_histogram, ROOT.TGraph):
if isinstance(minuend_histogram, ROOT.TGraphAsymmErrors) and isinstance(subtrahend_histogram, ROOT.TGraphAsymmErrors):
difference_histogram = ROOT.TGraphAsymmErrors()
elif isinstance(minuend_histogram, ROOT.TGraphErrors) and isinstance(subtrahend_histogram, ROOT.TGraphErrors):
difference_histogram = ROOT.TGraphErrors()
else:
difference_histogram = ROOT.TGraph()
for point in range(0, minuend_histogram.GetN()):
x_value = ROOT.Double(0)
y_value_minuend = ROOT.Double(0)
minuend_histogram.GetPoint(point, x_value, y_value_minuend)
y_value_subtrahend = subtrahend_histogram.Eval(x_value)
difference = y_value_minuend - y_value_subtrahend
difference_histogram.SetPoint(point, x_value, difference)
if isinstance(difference_histogram, ROOT.TGraphAsymmErrors):
x_err_high = minuend_histogram.GetErrorXhigh(point)
x_err_low = minuend_histogram.GetErrorXlow(point)
y_err_high_minuend = minuend_histogram.GetErrorYhigh(point)
y_err_low_minuend = minuend_histogram.GetErrorYlow(point)
y_err_high_subtrahend = subtrahend_histogram.GetErrorYhigh(point)
y_err_low_subtrahend = subtrahend_histogram.GetErrorYlow(point)
y_err_high_difference = sqrt(pow(y_err_high_minuend, 2) + pow(y_err_high_subtrahend, 2))
y_err_low_difference = sqrt(pow(y_err_low_minuend, 2) + pow(y_err_low_subtrahend, 2))
difference_histogram.SetPointEXhigh(point, x_err_high)
difference_histogram.SetPointEXlow(point, x_err_low)
difference_histogram.SetPointEYhigh(point, y_err_high_difference)
difference_histogram.SetPointEYlow(point, y_err_low_difference)
elif isinstance(difference_histogram, ROOT.TGraphErrors):
x_err = minuend_histogram.GetErrorX(point)
y_err_minuend = minuend_histogram.GetErrorY(point)
y_err_subtrahend = subtrahend_histogram.GetErrorY(point)
y_err_difference = sqrt(pow(y_err_minuend, 2) + pow(y_err_subtrahend, 2))
difference_histogram.SetPointError(point, x_err, y_err_difference)
else: log.fatal("difference_histogram is neither ROOT.TGraphAsymmErrors nor ROOT.TGraphErrors")
else:
minuend_histogram = roottools.RootTools.to_histogram(minuend_histogram)
subtrahend_histogram = roottools.RootTools.to_histogram(subtrahend_histogram)
# check for same binning
minuend_histogram_binning = [roottools.RootTools.get_binning(minuend_histogram, 0), roottools.RootTools.get_binning(minuend_histogram, 1), roottools.RootTools.get_binning(minuend_histogram, 2)]
subtrahend_histogram_binning = [roottools.RootTools.get_binning(subtrahend_histogram, 0), roottools.RootTools.get_binning(subtrahend_histogram, 1), roottools.RootTools.get_binning(subtrahend_histogram, 2)]
minuend_histogram_n_bins = reduce(lambda a, b: a*b, map(len, minuend_histogram_binning))
subtrahend_histogram_n_bins = reduce(lambda a, b: a*b, map(len, subtrahend_histogram_binning))
for axis in range(minuend_histogram.GetDimension(), 3):
minuend_histogram_binning[axis] = 1
for axis in range(subtrahend_histogram.GetDimension(), 3):
subtrahend_histogram_binning[axis] = 1
if minuend_histogram_n_bins == subtrahend_histogram_n_bins:
difference_histogram = minuend_histogram.Clone(new_name)
difference_histogram.Add(subtrahend_histogram, -1.0)
else:
if minuend_histogram_n_bins < subtrahend_histogram_n_bins:
difference_histogram = subtrahend_histogram.Clone(new_name)
else:
difference_histogram = minuend_histogram.Clone(new_name)
difference_histogram.Reset()
for x_bin in xrange(0, difference_histogram.GetNbinsX()+2):
x_bin_center = difference_histogram.GetXaxis().GetBinCenter(x_bin)
x_bin_minuend = minuend_histogram.GetXaxis().FindBin(x_bin_center)
x_bin_subtrahend = subtrahend_histogram.GetXaxis().FindBin(x_bin_center)
if difference_histogram.GetDimension() > 1:
for y_bin in xrange(0, difference_histogram.GetNbinsY()+2):
y_bin_center = difference_histogram.GetYaxis().GetBinCenter(y_bin)
y_bin_minuend = minuend_histogram.GetYaxis().FindBin(y_bin_center)
y_bin_subtrahend = subtrahend_histogram.GetYaxis().FindBin(y_bin_center)
if difference_histogram.GetDimension() > 2:
for z_bin in xrange(0, difference_histogram.GetNbinsZ()+2):
z_bin_center = difference_histogram.GetZaxis().GetBinCenter(z_bin)
z_bin_minuend = minuend_histogram.GetZaxis().FindBin(z_bin_center)
z_bin_subtrahend = subtrahend_histogram.GetZaxis().FindBin(z_bin_center)
ratio, ratio_error = Difference.subtract_bin_contents(minuend_histogram, [x_bin_minuend, y_bin_minuend, z_bin_minuend], subtrahend_histogram, [x_bin_subtrahend, y_bin_subtrahend, z_bin_subtrahend])
difference_histogram.SetBinContent(x_bin, y_bin, z_bin, ratio)
difference_histogram.SetBinError(x_bin, y_bin, z_bin, ratio_error)
else:
ratio, ratio_error = Difference.subtract_bin_contents(minuend_histogram, [x_bin_minuend, y_bin_minuend], subtrahend_histogram, [x_bin_subtrahend, y_bin_subtrahend])
difference_histogram.SetBinContent(x_bin, y_bin, ratio)
difference_histogram.SetBinError(x_bin, y_bin, ratio_error)
else:
ratio, ratio_error = Difference.subtract_bin_contents(minuend_histogram, [x_bin_minuend], subtrahend_histogram, [x_bin_subtrahend])
difference_histogram.SetBinContent(x_bin, ratio)
difference_histogram.SetBinError(x_bin, ratio_error)
if hasattr(difference_histogram, "SetDirectory"): difference_histogram.SetDirectory(0)
difference_histogram.SetTitle("")
plotData.plotdict["root_objects"][difference_result_nick] = difference_histogram
@staticmethod
def subtract_bin_contents(minuend_histogram, minuend_bins, subtrahend_histogram, subtrahend_bins):
content_minuend = minuend_histogram.GetBinContent(*minuend_bins)
error_minuend = minuend_histogram.GetBinError(*minuend_bins)
content_subtrahend = subtrahend_histogram.GetBinContent(*subtrahend_bins)
error_subtrahend = subtrahend_histogram.GetBinError(*subtrahend_bins)
difference = content_minuend - content_subtrahend
difference_error = sqrt(pow(error_minuend, 2) + pow(error_subtrahend, 2))
return difference, difference_error | clipl/analysis_modules/difference.py |
import logging
import clipl.utility.logger as logger
log = logging.getLogger(__name__)
import hashlib
import ROOT
from math import sqrt, pow
import clipl.analysisbase as analysisbase
import clipl.utility.roottools as roottools
import clipl.analysis_modules.scaleerrors as scaleerrors
class Difference(analysisbase.AnalysisBase):
"""Add differences of histograms"""
def modify_argument_parser(self, parser, args):
super(Difference, self).modify_argument_parser(parser, args)
self.difference_options = parser.add_argument_group("{} options".format(self.name()))
self.difference_options.add_argument("--difference-minuend-nicks", nargs="+",
help="Nick names for the minuends of the difference. Multiple nicks (whitespace separated) will be summed up before calculating the difference.")
self.difference_options.add_argument("--difference-subtrahend-nicks", nargs="+",
help="Nick names for the subtrahend of the difference. Multiple nicks (whitespace separated) will be summed up before calculating the difference.")
self.difference_options.add_argument("--difference-result-nicks", nargs="+",
help="Nick names for the resulting difference graphs.")
def prepare_args(self, parser, plotData):
super(Difference, self).prepare_args(parser, plotData)
self.prepare_list_args(plotData, ["difference_minuend_nicks", "difference_subtrahend_nicks", "difference_result_nicks"])
self.auto_set_arguments(plotData, ["difference_minuend_nicks", "difference_subtrahend_nicks"], "difference_result_nicks", "difference")
for index, (difference_minuend_nick, difference_subtrahend_nick, difference_result_nick) in enumerate(zip(*[plotData.plotdict[k] for k in ["difference_minuend_nicks", "difference_subtrahend_nicks", "difference_result_nicks"]])):
plotData.plotdict["difference_minuend_nicks"][index] = difference_minuend_nick.split()
plotData.plotdict["difference_subtrahend_nicks"][index] = difference_subtrahend_nick.split()
if difference_result_nick is None:
plotData.plotdict["difference_result_nicks"][index] = "difference_{num}_over_{denom}".format(
num="_".join(plotData.plotdict["difference_minuend_nicks"][index]),
denom="_".join(plotData.plotdict["difference_subtrahend_nicks"][index]))
if not plotData.plotdict["difference_result_nicks"][index] in plotData.plotdict["nicks"]:
plotData.plotdict["nicks"].append(plotData.plotdict["difference_result_nicks"][index])
@staticmethod
def get_histograms(plotdict={}, difference_nicks=None, difference_minuend_nicks=None, difference_subtrahend_nicks=None, difference_result_nick=None):
histogram = None
for nick in difference_nicks:
root_object = plotdict[nick]
if histogram is None:
histogram = root_object.Clone(
"difference_" + hashlib.md5("_".join([str(difference_nicks), str(difference_subtrahend_nicks), difference_result_nick])).hexdigest()
)
else: histogram.Add(root_object)
if hasattr(histogram, "SetDirectory"): histogram.SetDirectory(0)
return histogram
def run(self, plotData=None):
super(Difference, self).run(plotData)
for difference_minuend_nicks, difference_subtrahend_nicks, difference_result_nick in zip(
*[plotData.plotdict[k] for k in ["difference_minuend_nicks", "difference_subtrahend_nicks", "difference_result_nicks"]]):
new_name = "histogram_" + hashlib.md5("_".join(map(str, [difference_minuend_nicks, difference_subtrahend_nicks, difference_result_nick]))).hexdigest()
# Create nick sum histograms
minuend_histogram = self.get_histograms(plotdict=plotData.plotdict["root_objects"], difference_nicks=difference_minuend_nicks, difference_minuend_nicks=difference_minuend_nicks, difference_subtrahend_nicks=difference_subtrahend_nicks, difference_result_nick=difference_result_nick)
subtrahend_histogram = self.get_histograms(plotdict=plotData.plotdict["root_objects"], difference_nicks=difference_subtrahend_nicks, difference_minuend_nicks=difference_minuend_nicks, difference_subtrahend_nicks=difference_subtrahend_nicks, difference_result_nick=difference_result_nick)
# preparations for differences with one function
if ((isinstance(minuend_histogram, ROOT.TGraph) and isinstance(subtrahend_histogram, ROOT.TF1)) or
(isinstance(minuend_histogram, ROOT.TF1) and isinstance(subtrahend_histogram, ROOT.TGraph))):
graph = minuend_histogram if isinstance(minuend_histogram, ROOT.TGraph) else subtrahend_histogram
function = subtrahend_histogram if isinstance(minuend_histogram, ROOT.TGraph) else minuend_histogram
function_graph = graph.Clone("function_graph_"+function.GetName())
scaleerrors.ScaleErrors.scale_errors(function_graph, scale_factor=0.0)
x_values = function_graph.GetX()
x_values = [x_values[index] for index in xrange(function_graph.GetN())]
if not isinstance(function_graph, ROOT.TGraph2D):
y_values = [function.Eval(x_value) for x_value in x_values]
for index, (x_value, y_value) in enumerate(zip(x_values, y_values)):
function_graph.SetPoint(index, x_value, y_value)
else:
y_values = function_graph.GetY()
y_values = [y_values[index] for index in xrange(function_graph.GetN())]
z_values = [function.Eval(x_value, y_value) for x_value, y_value in zip(x_values, y_values)]
for index, (x_value, y_value, z_value) in enumerate(zip(x_values, y_values, z_values)):
function_graph.SetPoint(index, x_value, y_value, z_value)
if isinstance(minuend_histogram, ROOT.TGraph):
subtrahend_histogram = function_graph
else:
minuend_histogram = function_graph
# Calculate difference
if isinstance(minuend_histogram, ROOT.TGraph) and isinstance(subtrahend_histogram, ROOT.TGraph):
if isinstance(minuend_histogram, ROOT.TGraphAsymmErrors) and isinstance(subtrahend_histogram, ROOT.TGraphAsymmErrors):
difference_histogram = ROOT.TGraphAsymmErrors()
elif isinstance(minuend_histogram, ROOT.TGraphErrors) and isinstance(subtrahend_histogram, ROOT.TGraphErrors):
difference_histogram = ROOT.TGraphErrors()
else:
difference_histogram = ROOT.TGraph()
for point in range(0, minuend_histogram.GetN()):
x_value = ROOT.Double(0)
y_value_minuend = ROOT.Double(0)
minuend_histogram.GetPoint(point, x_value, y_value_minuend)
y_value_subtrahend = subtrahend_histogram.Eval(x_value)
difference = y_value_minuend - y_value_subtrahend
difference_histogram.SetPoint(point, x_value, difference)
if isinstance(difference_histogram, ROOT.TGraphAsymmErrors):
x_err_high = minuend_histogram.GetErrorXhigh(point)
x_err_low = minuend_histogram.GetErrorXlow(point)
y_err_high_minuend = minuend_histogram.GetErrorYhigh(point)
y_err_low_minuend = minuend_histogram.GetErrorYlow(point)
y_err_high_subtrahend = subtrahend_histogram.GetErrorYhigh(point)
y_err_low_subtrahend = subtrahend_histogram.GetErrorYlow(point)
y_err_high_difference = sqrt(pow(y_err_high_minuend, 2) + pow(y_err_high_subtrahend, 2))
y_err_low_difference = sqrt(pow(y_err_low_minuend, 2) + pow(y_err_low_subtrahend, 2))
difference_histogram.SetPointEXhigh(point, x_err_high)
difference_histogram.SetPointEXlow(point, x_err_low)
difference_histogram.SetPointEYhigh(point, y_err_high_difference)
difference_histogram.SetPointEYlow(point, y_err_low_difference)
elif isinstance(difference_histogram, ROOT.TGraphErrors):
x_err = minuend_histogram.GetErrorX(point)
y_err_minuend = minuend_histogram.GetErrorY(point)
y_err_subtrahend = subtrahend_histogram.GetErrorY(point)
y_err_difference = sqrt(pow(y_err_minuend, 2) + pow(y_err_subtrahend, 2))
difference_histogram.SetPointError(point, x_err, y_err_difference)
else: log.fatal("difference_histogram is neither ROOT.TGraphAsymmErrors nor ROOT.TGraphErrors")
else:
minuend_histogram = roottools.RootTools.to_histogram(minuend_histogram)
subtrahend_histogram = roottools.RootTools.to_histogram(subtrahend_histogram)
# check for same binning
minuend_histogram_binning = [roottools.RootTools.get_binning(minuend_histogram, 0), roottools.RootTools.get_binning(minuend_histogram, 1), roottools.RootTools.get_binning(minuend_histogram, 2)]
subtrahend_histogram_binning = [roottools.RootTools.get_binning(subtrahend_histogram, 0), roottools.RootTools.get_binning(subtrahend_histogram, 1), roottools.RootTools.get_binning(subtrahend_histogram, 2)]
minuend_histogram_n_bins = reduce(lambda a, b: a*b, map(len, minuend_histogram_binning))
subtrahend_histogram_n_bins = reduce(lambda a, b: a*b, map(len, subtrahend_histogram_binning))
for axis in range(minuend_histogram.GetDimension(), 3):
minuend_histogram_binning[axis] = 1
for axis in range(subtrahend_histogram.GetDimension(), 3):
subtrahend_histogram_binning[axis] = 1
if minuend_histogram_n_bins == subtrahend_histogram_n_bins:
difference_histogram = minuend_histogram.Clone(new_name)
difference_histogram.Add(subtrahend_histogram, -1.0)
else:
if minuend_histogram_n_bins < subtrahend_histogram_n_bins:
difference_histogram = subtrahend_histogram.Clone(new_name)
else:
difference_histogram = minuend_histogram.Clone(new_name)
difference_histogram.Reset()
for x_bin in xrange(0, difference_histogram.GetNbinsX()+2):
x_bin_center = difference_histogram.GetXaxis().GetBinCenter(x_bin)
x_bin_minuend = minuend_histogram.GetXaxis().FindBin(x_bin_center)
x_bin_subtrahend = subtrahend_histogram.GetXaxis().FindBin(x_bin_center)
if difference_histogram.GetDimension() > 1:
for y_bin in xrange(0, difference_histogram.GetNbinsY()+2):
y_bin_center = difference_histogram.GetYaxis().GetBinCenter(y_bin)
y_bin_minuend = minuend_histogram.GetYaxis().FindBin(y_bin_center)
y_bin_subtrahend = subtrahend_histogram.GetYaxis().FindBin(y_bin_center)
if difference_histogram.GetDimension() > 2:
for z_bin in xrange(0, difference_histogram.GetNbinsZ()+2):
z_bin_center = difference_histogram.GetZaxis().GetBinCenter(z_bin)
z_bin_minuend = minuend_histogram.GetZaxis().FindBin(z_bin_center)
z_bin_subtrahend = subtrahend_histogram.GetZaxis().FindBin(z_bin_center)
ratio, ratio_error = Difference.subtract_bin_contents(minuend_histogram, [x_bin_minuend, y_bin_minuend, z_bin_minuend], subtrahend_histogram, [x_bin_subtrahend, y_bin_subtrahend, z_bin_subtrahend])
difference_histogram.SetBinContent(x_bin, y_bin, z_bin, ratio)
difference_histogram.SetBinError(x_bin, y_bin, z_bin, ratio_error)
else:
ratio, ratio_error = Difference.subtract_bin_contents(minuend_histogram, [x_bin_minuend, y_bin_minuend], subtrahend_histogram, [x_bin_subtrahend, y_bin_subtrahend])
difference_histogram.SetBinContent(x_bin, y_bin, ratio)
difference_histogram.SetBinError(x_bin, y_bin, ratio_error)
else:
ratio, ratio_error = Difference.subtract_bin_contents(minuend_histogram, [x_bin_minuend], subtrahend_histogram, [x_bin_subtrahend])
difference_histogram.SetBinContent(x_bin, ratio)
difference_histogram.SetBinError(x_bin, ratio_error)
if hasattr(difference_histogram, "SetDirectory"): difference_histogram.SetDirectory(0)
difference_histogram.SetTitle("")
plotData.plotdict["root_objects"][difference_result_nick] = difference_histogram
@staticmethod
def subtract_bin_contents(minuend_histogram, minuend_bins, subtrahend_histogram, subtrahend_bins):
content_minuend = minuend_histogram.GetBinContent(*minuend_bins)
error_minuend = minuend_histogram.GetBinError(*minuend_bins)
content_subtrahend = subtrahend_histogram.GetBinContent(*subtrahend_bins)
error_subtrahend = subtrahend_histogram.GetBinError(*subtrahend_bins)
difference = content_minuend - content_subtrahend
difference_error = sqrt(pow(error_minuend, 2) + pow(error_subtrahend, 2))
return difference, difference_error | 0.516839 | 0.301144 |
import sys
import os
sys.path.insert(0,
os.path.dirname(os.path.dirname(os.path.realpath(__file__))))
import bson_dataframe
import numpy
import pyarrow
import unittest
class TestArrayGenerator(bson_dataframe.TypeVisitor):
def __init__(self, n, typ, nullable):
self.nullable = nullable
self.data = bson_dataframe.ArrayData()
self.data.length = n
self.data.type = typ
if pyarrow.types.is_null(typ):
self.data.null_count = n
self.data.buffers.append(None)
elif self.nullable:
m = n // 8 + 1
mask = numpy.ndarray(m, numpy.uint8, numpy.random.bytes(m))
vals = numpy.unpackbits(mask)[:n]
self.data.null_count = n - numpy.sum(vals)
buf = numpy.packbits(vals).tobytes()
self.data.buffers.append(pyarrow.py_buffer(buf))
else:
self.data.null_count = 0
self.data.buffers.append(None)
self.accept(typ)
self.array = self.data.make_array()
def visit_null(self, typ):
pass
def visit_bool(self, typ):
n = self.data.length // 8 + 1
bits = numpy.ndarray(n, numpy.uint8, numpy.random.bytes(n))
vals = numpy.unpackbits(bits)[:self.data.length]
buf = numpy.packbits(vals).tobytes()
self.data.buffers.append(pyarrow.py_buffer(buf))
def _visit_flat(self, typ):
buf = numpy.random.bytes(self.data.length * typ.bit_width // 8)
self.data.buffers.append(pyarrow.py_buffer(buf))
def visit_int8(self, typ):
self._visit_flat(typ)
def visit_int16(self, typ):
self._visit_flat(typ)
def visit_int32(self, typ):
self._visit_flat(typ)
def visit_int64(self, typ):
self._visit_flat(typ)
def visit_uint8(self, typ):
self._visit_flat(typ)
def visit_uint16(self, typ):
self._visit_flat(typ)
def visit_uint32(self, typ):
self._visit_flat(typ)
def visit_uint64(self, typ):
self._visit_flat(typ)
def visit_float16(self, typ):
self._visit_flat(typ)
def visit_time32(self, typ):
self._visit_flat(typ)
def visit_time64(self, typ):
self._visit_flat(typ)
def visit_date32(self, typ):
self._visit_flat(typ)
def visit_date64(self, typ):
self._visit_flat(typ)
def visit_timestamp(self, typ):
self._visit_flat(typ)
def visit_fixed_size_binary(self, typ):
self._visit_flat(typ)
def visit_float32(self, typ):
buf = numpy.random.rand(self.data.length).astype(
numpy.float32).tobytes()
self.data.buffers.append(pyarrow.py_buffer(buf))
def visit_float64(self, typ):
buf = numpy.random.rand(self.data.length).astype(
numpy.float64).tobytes()
self.data.buffers.append(pyarrow.py_buffer(buf))
def _generate_offsets(self):
counts = numpy.random.randint(0, 10, self.data.length + 1, numpy.int32)
counts[0] = 0
if self.nullable:
buf = self.data.buffers[0]
bits = numpy.ndarray(len(buf), numpy.uint8, buf)
mask = numpy.unpackbits(bits, bitorder='little')[:self.data.length]
for i, v in enumerate(mask):
if not v:
counts[i + 1] = 0
offsets = numpy.cumsum(counts, dtype=numpy.int32)
buf = offsets.tobytes()
self.data.buffers.append(pyarrow.py_buffer(buf))
return offsets[-1]
def visit_binary(self, typ):
n = self._generate_offsets()
buf = numpy.random.bytes(n)
self.data.buffers.append(pyarrow.py_buffer(buf))
def visit_string(self, typ):
self.visit_binary(typ)
def visit_list(self, typ):
n = self._generate_offsets()
self.data.children.append(
TestArrayGenerator(n, typ.value_type, self.nullable).array)
def visit_struct(self, typ):
for field in typ:
self.data.children.append(
TestArrayGenerator(self.data.length, field.type,
self.nullable).array)
def visit_dictionary(self, typ):
index = numpy.random.randint(0, 10, self.data.length,
typ.index_type.to_pandas_dtype())
buf = index.tobytes()
self.data.buffers.append(pyarrow.py_buffer(buf))
self.data.dictionary = TestArrayGenerator(10, typ.value_type,
False).array
def test_table(n, types=None, offset=None, length=None, nullable=True):
if types is None:
types = [
pyarrow.null(),
pyarrow.bool_(),
pyarrow.int8(),
pyarrow.int16(),
pyarrow.int32(),
pyarrow.int64(),
pyarrow.uint8(),
pyarrow.uint16(),
pyarrow.uint32(),
pyarrow.uint64(),
pyarrow.float16(),
pyarrow.float32(),
pyarrow.float64(),
pyarrow.date32(),
pyarrow.date64(),
pyarrow.timestamp('s'),
pyarrow.timestamp('ms'),
pyarrow.timestamp('us'),
pyarrow.timestamp('ns'),
pyarrow.time32('s'),
pyarrow.time32('ms'),
pyarrow.time64('us'),
pyarrow.time64('ns'),
pyarrow.string(),
pyarrow.binary(),
pyarrow.binary(4),
pyarrow.dictionary(pyarrow.int32(), pyarrow.string(), True),
pyarrow.dictionary(pyarrow.int64(), pyarrow.int64(), True),
pyarrow.dictionary(pyarrow.int32(), pyarrow.string(), False),
pyarrow.dictionary(pyarrow.int64(), pyarrow.int64(), False),
pyarrow.list_(pyarrow.int32()),
pyarrow.struct([pyarrow.field('int32', pyarrow.int32())]),
pyarrow.list_(
pyarrow.struct([pyarrow.field('int32', pyarrow.int32())])),
pyarrow.struct(
[pyarrow.field('int32', pyarrow.list_(pyarrow.int32()))]),
]
data = list()
for t in types:
name = str(t)
array = TestArrayGenerator(n, t, False).array
if offset is not None:
array = array.slice(offset, length)
data.append(pyarrow.column(name, array))
if nullable:
name = str(t) + ' (null)'
array = TestArrayGenerator(n, t, True).array
if offset is not None:
array = array.slice(offset, length)
data.append(pyarrow.column(name, array))
return pyarrow.Table.from_arrays(data)
class TestArrowBSON(unittest.TestCase):
def test_offsets_codec(self):
val = numpy.array([0, 1, 3, 3, 4, 8, 9, 13], numpy.int32)
enc = bson_dataframe.encode_offsets(val)
dec = bson_dataframe.decode_offsets(enc)
self.assertEqual(val.tobytes(), dec.tobytes())
self.assertEqual(val.dtype, enc.dtype)
self.assertEqual(val.dtype, dec.dtype)
off = val + numpy.int32(10)
enc = bson_dataframe.encode_offsets(off)
dec = bson_dataframe.decode_offsets(enc)
self.assertEqual(val.tobytes(), dec.tobytes())
self.assertEqual(val.dtype, enc.dtype)
self.assertEqual(val.dtype, dec.dtype)
def test_datetime_codec(self):
val = numpy.array([1, 1, 3, 3, 4, 8, 9, 13], numpy.int32)
enc = bson_dataframe.encode_datetime(val)
dec = bson_dataframe.decode_datetime(enc)
self.assertEqual(val.tobytes(), dec.tobytes())
self.assertEqual(val.dtype, enc.dtype)
self.assertEqual(val.dtype, dec.dtype)
def test_write(self):
table = test_table(1000)
buf = bson_dataframe.write_table(table)
doc = bson_dataframe.validate(bson_dataframe.write_table(table))
with open('test.json', 'w') as out:
out.write(doc)
def test_read(self):
table = test_table(1000)
buf = bson_dataframe.write_table(table)
ret = bson_dataframe.read_table(buf)
self.assertTrue(ret.equals(table))
def test_slice(self):
table = test_table(1000, offset=100, length=500)
buf = bson_dataframe.write_table(table)
ret = bson_dataframe.read_table(buf)
self.assertTrue(ret.equals(table))
def test_slice_head(self):
table = test_table(1000, offset=0, length=500)
buf = bson_dataframe.write_table(table)
ret = bson_dataframe.read_table(buf)
self.assertTrue(ret.equals(table))
def test_slice_tail(self):
table = test_table(1000, offset=500, length=500)
buf = bson_dataframe.write_table(table)
ret = bson_dataframe.read_table(buf)
self.assertTrue(ret.equals(table))
if __name__ == '__main__':
unittest.main() | python/tests/test.py | import sys
import os
sys.path.insert(0,
os.path.dirname(os.path.dirname(os.path.realpath(__file__))))
import bson_dataframe
import numpy
import pyarrow
import unittest
class TestArrayGenerator(bson_dataframe.TypeVisitor):
def __init__(self, n, typ, nullable):
self.nullable = nullable
self.data = bson_dataframe.ArrayData()
self.data.length = n
self.data.type = typ
if pyarrow.types.is_null(typ):
self.data.null_count = n
self.data.buffers.append(None)
elif self.nullable:
m = n // 8 + 1
mask = numpy.ndarray(m, numpy.uint8, numpy.random.bytes(m))
vals = numpy.unpackbits(mask)[:n]
self.data.null_count = n - numpy.sum(vals)
buf = numpy.packbits(vals).tobytes()
self.data.buffers.append(pyarrow.py_buffer(buf))
else:
self.data.null_count = 0
self.data.buffers.append(None)
self.accept(typ)
self.array = self.data.make_array()
def visit_null(self, typ):
pass
def visit_bool(self, typ):
n = self.data.length // 8 + 1
bits = numpy.ndarray(n, numpy.uint8, numpy.random.bytes(n))
vals = numpy.unpackbits(bits)[:self.data.length]
buf = numpy.packbits(vals).tobytes()
self.data.buffers.append(pyarrow.py_buffer(buf))
def _visit_flat(self, typ):
buf = numpy.random.bytes(self.data.length * typ.bit_width // 8)
self.data.buffers.append(pyarrow.py_buffer(buf))
def visit_int8(self, typ):
self._visit_flat(typ)
def visit_int16(self, typ):
self._visit_flat(typ)
def visit_int32(self, typ):
self._visit_flat(typ)
def visit_int64(self, typ):
self._visit_flat(typ)
def visit_uint8(self, typ):
self._visit_flat(typ)
def visit_uint16(self, typ):
self._visit_flat(typ)
def visit_uint32(self, typ):
self._visit_flat(typ)
def visit_uint64(self, typ):
self._visit_flat(typ)
def visit_float16(self, typ):
self._visit_flat(typ)
def visit_time32(self, typ):
self._visit_flat(typ)
def visit_time64(self, typ):
self._visit_flat(typ)
def visit_date32(self, typ):
self._visit_flat(typ)
def visit_date64(self, typ):
self._visit_flat(typ)
def visit_timestamp(self, typ):
self._visit_flat(typ)
def visit_fixed_size_binary(self, typ):
self._visit_flat(typ)
def visit_float32(self, typ):
buf = numpy.random.rand(self.data.length).astype(
numpy.float32).tobytes()
self.data.buffers.append(pyarrow.py_buffer(buf))
def visit_float64(self, typ):
buf = numpy.random.rand(self.data.length).astype(
numpy.float64).tobytes()
self.data.buffers.append(pyarrow.py_buffer(buf))
def _generate_offsets(self):
counts = numpy.random.randint(0, 10, self.data.length + 1, numpy.int32)
counts[0] = 0
if self.nullable:
buf = self.data.buffers[0]
bits = numpy.ndarray(len(buf), numpy.uint8, buf)
mask = numpy.unpackbits(bits, bitorder='little')[:self.data.length]
for i, v in enumerate(mask):
if not v:
counts[i + 1] = 0
offsets = numpy.cumsum(counts, dtype=numpy.int32)
buf = offsets.tobytes()
self.data.buffers.append(pyarrow.py_buffer(buf))
return offsets[-1]
def visit_binary(self, typ):
n = self._generate_offsets()
buf = numpy.random.bytes(n)
self.data.buffers.append(pyarrow.py_buffer(buf))
def visit_string(self, typ):
self.visit_binary(typ)
def visit_list(self, typ):
n = self._generate_offsets()
self.data.children.append(
TestArrayGenerator(n, typ.value_type, self.nullable).array)
def visit_struct(self, typ):
for field in typ:
self.data.children.append(
TestArrayGenerator(self.data.length, field.type,
self.nullable).array)
def visit_dictionary(self, typ):
index = numpy.random.randint(0, 10, self.data.length,
typ.index_type.to_pandas_dtype())
buf = index.tobytes()
self.data.buffers.append(pyarrow.py_buffer(buf))
self.data.dictionary = TestArrayGenerator(10, typ.value_type,
False).array
def test_table(n, types=None, offset=None, length=None, nullable=True):
if types is None:
types = [
pyarrow.null(),
pyarrow.bool_(),
pyarrow.int8(),
pyarrow.int16(),
pyarrow.int32(),
pyarrow.int64(),
pyarrow.uint8(),
pyarrow.uint16(),
pyarrow.uint32(),
pyarrow.uint64(),
pyarrow.float16(),
pyarrow.float32(),
pyarrow.float64(),
pyarrow.date32(),
pyarrow.date64(),
pyarrow.timestamp('s'),
pyarrow.timestamp('ms'),
pyarrow.timestamp('us'),
pyarrow.timestamp('ns'),
pyarrow.time32('s'),
pyarrow.time32('ms'),
pyarrow.time64('us'),
pyarrow.time64('ns'),
pyarrow.string(),
pyarrow.binary(),
pyarrow.binary(4),
pyarrow.dictionary(pyarrow.int32(), pyarrow.string(), True),
pyarrow.dictionary(pyarrow.int64(), pyarrow.int64(), True),
pyarrow.dictionary(pyarrow.int32(), pyarrow.string(), False),
pyarrow.dictionary(pyarrow.int64(), pyarrow.int64(), False),
pyarrow.list_(pyarrow.int32()),
pyarrow.struct([pyarrow.field('int32', pyarrow.int32())]),
pyarrow.list_(
pyarrow.struct([pyarrow.field('int32', pyarrow.int32())])),
pyarrow.struct(
[pyarrow.field('int32', pyarrow.list_(pyarrow.int32()))]),
]
data = list()
for t in types:
name = str(t)
array = TestArrayGenerator(n, t, False).array
if offset is not None:
array = array.slice(offset, length)
data.append(pyarrow.column(name, array))
if nullable:
name = str(t) + ' (null)'
array = TestArrayGenerator(n, t, True).array
if offset is not None:
array = array.slice(offset, length)
data.append(pyarrow.column(name, array))
return pyarrow.Table.from_arrays(data)
class TestArrowBSON(unittest.TestCase):
def test_offsets_codec(self):
val = numpy.array([0, 1, 3, 3, 4, 8, 9, 13], numpy.int32)
enc = bson_dataframe.encode_offsets(val)
dec = bson_dataframe.decode_offsets(enc)
self.assertEqual(val.tobytes(), dec.tobytes())
self.assertEqual(val.dtype, enc.dtype)
self.assertEqual(val.dtype, dec.dtype)
off = val + numpy.int32(10)
enc = bson_dataframe.encode_offsets(off)
dec = bson_dataframe.decode_offsets(enc)
self.assertEqual(val.tobytes(), dec.tobytes())
self.assertEqual(val.dtype, enc.dtype)
self.assertEqual(val.dtype, dec.dtype)
def test_datetime_codec(self):
val = numpy.array([1, 1, 3, 3, 4, 8, 9, 13], numpy.int32)
enc = bson_dataframe.encode_datetime(val)
dec = bson_dataframe.decode_datetime(enc)
self.assertEqual(val.tobytes(), dec.tobytes())
self.assertEqual(val.dtype, enc.dtype)
self.assertEqual(val.dtype, dec.dtype)
def test_write(self):
table = test_table(1000)
buf = bson_dataframe.write_table(table)
doc = bson_dataframe.validate(bson_dataframe.write_table(table))
with open('test.json', 'w') as out:
out.write(doc)
def test_read(self):
table = test_table(1000)
buf = bson_dataframe.write_table(table)
ret = bson_dataframe.read_table(buf)
self.assertTrue(ret.equals(table))
def test_slice(self):
table = test_table(1000, offset=100, length=500)
buf = bson_dataframe.write_table(table)
ret = bson_dataframe.read_table(buf)
self.assertTrue(ret.equals(table))
def test_slice_head(self):
table = test_table(1000, offset=0, length=500)
buf = bson_dataframe.write_table(table)
ret = bson_dataframe.read_table(buf)
self.assertTrue(ret.equals(table))
def test_slice_tail(self):
table = test_table(1000, offset=500, length=500)
buf = bson_dataframe.write_table(table)
ret = bson_dataframe.read_table(buf)
self.assertTrue(ret.equals(table))
if __name__ == '__main__':
unittest.main() | 0.265309 | 0.225353 |
import sqlite3
import threading
conn = sqlite3.connect('bot.db', check_same_thread=False, isolation_level=None)
c = conn.cursor()
lock = threading.Lock()
def acquireLock(func):
def wrapper(*args, **kwargs):
try:
lock.acquire(True)
return func(*args, **kwargs)
finally:
lock.release()
return wrapper
tables = [
"CREATE TABLE IF NOT EXISTS owners (id integer PRIMARY KEY, filename text NOT NULL, owner text NOT NULL)",
"""\
CREATE TABLE IF NOT EXISTS srfavs (id integer PRIMARY KEY, title text NOT NULL, \
duration integer NOT NULL, user_duration integer, link text NOT NULL, username text NOT NULL)\
""",
"CREATE TABLE IF NOT EXISTS links (id integer PRIMARY KEY, link text NOT NULL, filename text NOT NULL)",
"CREATE TABLE IF NOT EXISTS imgcount (id integer PRIMARY KEY, count integer NOT NULL)",
"CREATE TABLE IF NOT EXISTS moderators (id integer PRIMARY KEY, username text NOT NULL)",
"CREATE TABLE IF NOT EXISTS banned (id integer PRIMARY KEY, username text NOT NULL)"
]
for create_table_query in tables:
c.execute(create_table_query)
@acquireLock
def add_owner(filename, owner):
c.execute('INSERT INTO owners (filename, owner) VALUES (:filename, :owner)',
{'filename': filename, 'owner': owner})
@acquireLock
def remove_owner(filename):
c.executemany('DELETE FROM owners WHERE filename = ?', filename)
@acquireLock
def add_srfavs(title, duration, user_duration, link, username):
c.execute('INSERT INTO srfavs (title, duration, user_duration, link, username) '
'VALUES (:title, :duration, :user_duration, :link, :username)',
{'title': title, 'duration': duration, 'user_duration': user_duration,
'link': link, 'username': username})
@acquireLock
def remove_srfavs(data):
c.executemany("DELETE FROM srfavs WHERE title = ? and duration = ? and user_duration = ? "
"and link = ? and username = ?", data)
@acquireLock
def check_srfavs_list(username):
c.execute('SELECT title, duration, user_duration, link FROM srfavs WHERE username = :username',
{'username': username})
return c.fetchall()
@acquireLock
def check_owner(filename, owner):
c.execute('SELECT owner FROM owners WHERE filename = :filename AND owner = :owner', {'filename': filename,
'owner': owner})
return c.fetchall()
@acquireLock
def check_ownerlist(owner):
c.execute('SELECT filename FROM owners WHERE owner = :owner', {'owner': owner})
return c.fetchall()
@acquireLock
def update_owner_filename(filename, new_filename):
c.execute('UPDATE owners SET filename = :new_filename WHERE filename = :filename', {'filename': filename,
'new_filename':
new_filename})
@acquireLock
def add_link(link, filename):
c.execute('INSERT INTO links (link, filename) VALUES (:link, :filename)',
{'link': link, 'filename': filename})
@acquireLock
def remove_link(filename):
c.executemany('DELETE FROM links WHERE filename = ?', filename)
@acquireLock
def update_link_filename(filename, new_filename):
c.execute('UPDATE links SET filename = :new_filename WHERE filename = :filename', {'filename': filename,
'new_filename':
new_filename})
@acquireLock
def get_links_filenames():
c.execute('SELECT filename FROM links')
return c.fetchall()
@acquireLock
def get_links_and_filenames():
c.execute('SELECT link, filename FROM links')
return c.fetchall()
@acquireLock
def get_link(filename):
c.execute('SELECT link FROM links WHERE filename = :filename', {'filename': filename})
return c.fetchall()
@acquireLock
def get_imgcount():
c.execute('SELECT count FROM imgcount')
return c.fetchone()
numba = get_imgcount()
if not numba:
c.execute('INSERT INTO imgcount (count) VALUES (1)')
numba = get_imgcount()
numba = str(numba[0])
@acquireLock
def update_imgcount(count):
global numba
numba = str(int(numba) + 1)
c.execute('UPDATE imgcount SET count = :count', {'count': count})
@acquireLock
def check_if_mod(username):
c.execute('SELECT username FROM moderators WHERE username = :username', {'username': username})
return c.fetchall()
@acquireLock
def check_moderators():
c.execute('SELECT username FROM moderators')
return c.fetchall()
@acquireLock
def add_mod(username):
c.executemany("INSERT INTO moderators (username) VALUES (?)", username)
@acquireLock
def remove_mod(username):
c.executemany("DELETE FROM moderators WHERE username = ?", username)
@acquireLock
def check_if_banned(username):
c.execute('SELECT username FROM banned WHERE username = :username', {'username': username})
return c.fetchall()
@acquireLock
def check_banned():
c.execute('SELECT username FROM banned')
return c.fetchall()
@acquireLock
def add_ban(username):
c.executemany("INSERT INTO banned (username) VALUES (?)", username)
@acquireLock
def remove_ban(username):
c.executemany("DELETE FROM banned WHERE username = ?", username)
@acquireLock
def sql_query(query):
try:
c.execute(query)
except Exception as e:
return [(str(e),)]
return c.fetchall() | src/db.py | import sqlite3
import threading
conn = sqlite3.connect('bot.db', check_same_thread=False, isolation_level=None)
c = conn.cursor()
lock = threading.Lock()
def acquireLock(func):
def wrapper(*args, **kwargs):
try:
lock.acquire(True)
return func(*args, **kwargs)
finally:
lock.release()
return wrapper
tables = [
"CREATE TABLE IF NOT EXISTS owners (id integer PRIMARY KEY, filename text NOT NULL, owner text NOT NULL)",
"""\
CREATE TABLE IF NOT EXISTS srfavs (id integer PRIMARY KEY, title text NOT NULL, \
duration integer NOT NULL, user_duration integer, link text NOT NULL, username text NOT NULL)\
""",
"CREATE TABLE IF NOT EXISTS links (id integer PRIMARY KEY, link text NOT NULL, filename text NOT NULL)",
"CREATE TABLE IF NOT EXISTS imgcount (id integer PRIMARY KEY, count integer NOT NULL)",
"CREATE TABLE IF NOT EXISTS moderators (id integer PRIMARY KEY, username text NOT NULL)",
"CREATE TABLE IF NOT EXISTS banned (id integer PRIMARY KEY, username text NOT NULL)"
]
for create_table_query in tables:
c.execute(create_table_query)
@acquireLock
def add_owner(filename, owner):
c.execute('INSERT INTO owners (filename, owner) VALUES (:filename, :owner)',
{'filename': filename, 'owner': owner})
@acquireLock
def remove_owner(filename):
c.executemany('DELETE FROM owners WHERE filename = ?', filename)
@acquireLock
def add_srfavs(title, duration, user_duration, link, username):
c.execute('INSERT INTO srfavs (title, duration, user_duration, link, username) '
'VALUES (:title, :duration, :user_duration, :link, :username)',
{'title': title, 'duration': duration, 'user_duration': user_duration,
'link': link, 'username': username})
@acquireLock
def remove_srfavs(data):
c.executemany("DELETE FROM srfavs WHERE title = ? and duration = ? and user_duration = ? "
"and link = ? and username = ?", data)
@acquireLock
def check_srfavs_list(username):
c.execute('SELECT title, duration, user_duration, link FROM srfavs WHERE username = :username',
{'username': username})
return c.fetchall()
@acquireLock
def check_owner(filename, owner):
c.execute('SELECT owner FROM owners WHERE filename = :filename AND owner = :owner', {'filename': filename,
'owner': owner})
return c.fetchall()
@acquireLock
def check_ownerlist(owner):
c.execute('SELECT filename FROM owners WHERE owner = :owner', {'owner': owner})
return c.fetchall()
@acquireLock
def update_owner_filename(filename, new_filename):
c.execute('UPDATE owners SET filename = :new_filename WHERE filename = :filename', {'filename': filename,
'new_filename':
new_filename})
@acquireLock
def add_link(link, filename):
c.execute('INSERT INTO links (link, filename) VALUES (:link, :filename)',
{'link': link, 'filename': filename})
@acquireLock
def remove_link(filename):
c.executemany('DELETE FROM links WHERE filename = ?', filename)
@acquireLock
def update_link_filename(filename, new_filename):
c.execute('UPDATE links SET filename = :new_filename WHERE filename = :filename', {'filename': filename,
'new_filename':
new_filename})
@acquireLock
def get_links_filenames():
c.execute('SELECT filename FROM links')
return c.fetchall()
@acquireLock
def get_links_and_filenames():
c.execute('SELECT link, filename FROM links')
return c.fetchall()
@acquireLock
def get_link(filename):
c.execute('SELECT link FROM links WHERE filename = :filename', {'filename': filename})
return c.fetchall()
@acquireLock
def get_imgcount():
c.execute('SELECT count FROM imgcount')
return c.fetchone()
numba = get_imgcount()
if not numba:
c.execute('INSERT INTO imgcount (count) VALUES (1)')
numba = get_imgcount()
numba = str(numba[0])
@acquireLock
def update_imgcount(count):
global numba
numba = str(int(numba) + 1)
c.execute('UPDATE imgcount SET count = :count', {'count': count})
@acquireLock
def check_if_mod(username):
c.execute('SELECT username FROM moderators WHERE username = :username', {'username': username})
return c.fetchall()
@acquireLock
def check_moderators():
c.execute('SELECT username FROM moderators')
return c.fetchall()
@acquireLock
def add_mod(username):
c.executemany("INSERT INTO moderators (username) VALUES (?)", username)
@acquireLock
def remove_mod(username):
c.executemany("DELETE FROM moderators WHERE username = ?", username)
@acquireLock
def check_if_banned(username):
c.execute('SELECT username FROM banned WHERE username = :username', {'username': username})
return c.fetchall()
@acquireLock
def check_banned():
c.execute('SELECT username FROM banned')
return c.fetchall()
@acquireLock
def add_ban(username):
c.executemany("INSERT INTO banned (username) VALUES (?)", username)
@acquireLock
def remove_ban(username):
c.executemany("DELETE FROM banned WHERE username = ?", username)
@acquireLock
def sql_query(query):
try:
c.execute(query)
except Exception as e:
return [(str(e),)]
return c.fetchall() | 0.305076 | 0.040999 |
import os
import xarray as xr
from termcolor import cprint
from trace_for_guess.skip import skip
def drop_superfluous_trace_vars(data):
"""Drop all unneeded data variables from a TraCE-21ka xarray dataset."""
return data.drop(["P0", "co2vmr", "gw", "hyai", "date", "date_written",
"datesec", "hyai", "hyam", "hybi", "hybm", "mdt",
"nbdate", "nbsec", "ndbase", "ndcur", "nlon", "nsbase",
"nscur", "nsteph", "ntrk", "ntrm", "ntrn",
"time_written", "wnummax"])
def get_monthly_means(trace_file):
"""
Aggregate TraCE-21ka file over time to have only 12 data points per cell.
Args:
trace_file: File path of the TraCE-21ka NetCDF file.
Returns:
A xarray.Dataset object of the file.
"""
data = xr.open_dataset(trace_file, decode_times=False)
# Create list with numbers from 0 (January) to 12 (December).
month_numbers = [i for i in range(12)]
data = drop_superfluous_trace_vars(data) # TODO: Do we need this?
# Repeat the months (assuming the dataset begins with January).
year_count = len(data["time"]) // 12
month_dim = month_numbers * year_count
# Overwrite the time dimension with months.
data["time"].values = month_dim
# Create mean monthly temperatures over the whole time span for each grid
# cell.
data = data.groupby('time').mean('time')
return data
def aggregate_modern_trace(trace_file, out_file):
"""Calculate 12 monthly means from values from TraCE file over time.
Args:
trace_file: Path to original TraCE-21ka NetCDF file.
out_file: Path to output file (will *not* be overwritten).
Returns:
The created output file (equals `out_file`).
Raises:
FileNotFoundError: The file `trace_file` wasn’t found.
RuntimeError: Output file was not created.
"""
if not os.path.isfile(trace_file):
raise FileNotFoundError("Input file doesn’t exist: '%s'" % trace_file)
if skip(trace_file, out_file):
return out_file
cprint(f"Aggregating monthly averages from file '{trace_file}'.", 'yellow')
dataset = get_monthly_means(trace_file)
dataset.to_netcdf(out_file, mode='w', engine='netcdf4')
dataset.close()
if os.path.isfile(out_file):
cprint(f"Successfully created output file '{out_file}'.", 'green')
else:
raise RuntimeError(f"Output file '{out_file}' was not created.")
return out_file | trace_for_guess/aggregate_modern_trace.py |
import os
import xarray as xr
from termcolor import cprint
from trace_for_guess.skip import skip
def drop_superfluous_trace_vars(data):
"""Drop all unneeded data variables from a TraCE-21ka xarray dataset."""
return data.drop(["P0", "co2vmr", "gw", "hyai", "date", "date_written",
"datesec", "hyai", "hyam", "hybi", "hybm", "mdt",
"nbdate", "nbsec", "ndbase", "ndcur", "nlon", "nsbase",
"nscur", "nsteph", "ntrk", "ntrm", "ntrn",
"time_written", "wnummax"])
def get_monthly_means(trace_file):
"""
Aggregate TraCE-21ka file over time to have only 12 data points per cell.
Args:
trace_file: File path of the TraCE-21ka NetCDF file.
Returns:
A xarray.Dataset object of the file.
"""
data = xr.open_dataset(trace_file, decode_times=False)
# Create list with numbers from 0 (January) to 12 (December).
month_numbers = [i for i in range(12)]
data = drop_superfluous_trace_vars(data) # TODO: Do we need this?
# Repeat the months (assuming the dataset begins with January).
year_count = len(data["time"]) // 12
month_dim = month_numbers * year_count
# Overwrite the time dimension with months.
data["time"].values = month_dim
# Create mean monthly temperatures over the whole time span for each grid
# cell.
data = data.groupby('time').mean('time')
return data
def aggregate_modern_trace(trace_file, out_file):
"""Calculate 12 monthly means from values from TraCE file over time.
Args:
trace_file: Path to original TraCE-21ka NetCDF file.
out_file: Path to output file (will *not* be overwritten).
Returns:
The created output file (equals `out_file`).
Raises:
FileNotFoundError: The file `trace_file` wasn’t found.
RuntimeError: Output file was not created.
"""
if not os.path.isfile(trace_file):
raise FileNotFoundError("Input file doesn’t exist: '%s'" % trace_file)
if skip(trace_file, out_file):
return out_file
cprint(f"Aggregating monthly averages from file '{trace_file}'.", 'yellow')
dataset = get_monthly_means(trace_file)
dataset.to_netcdf(out_file, mode='w', engine='netcdf4')
dataset.close()
if os.path.isfile(out_file):
cprint(f"Successfully created output file '{out_file}'.", 'green')
else:
raise RuntimeError(f"Output file '{out_file}' was not created.")
return out_file | 0.520496 | 0.422147 |
import pytest
import warnings
import numpy as np
import scipy.signal as signal
from tsfuse.data.synthetic import series, brownian
from tsfuse.transformers.frequency import *
from tsfuse.transformers.statistics import *
@pytest.fixture
def x():
return brownian()
def test_fft_real(x):
result = FFT(attr='real').transform(x)
for i, a in series(x):
actual = result.values[i]
expected = np.fft.rfft(a).real
np.testing.assert_almost_equal(actual, expected)
def test_fft_imag(x):
result = FFT(attr='imag').transform(x)
for i, a in series(x):
actual = result.values[i]
expected = np.fft.rfft(a).imag
np.testing.assert_almost_equal(actual, expected)
def test_fft_abs(x):
result = FFT(attr='abs').transform(x)
for i, a in series(x):
actual = result.values[i]
expected = np.abs(np.fft.rfft(a))
np.testing.assert_almost_equal(actual, expected)
def test_fft_angle(x):
result = FFT(attr='angle').transform(x)
for i, a in series(x):
actual = result.values[i]
expected = np.angle(np.fft.rfft(a), deg=True)
np.testing.assert_almost_equal(actual, expected)
def test_fft_moments(x):
def moment(a, moment):
return a.dot(np.arange(len(a)) ** moment) / np.sum(a)
def mean(a):
return moment(a, 1)
def variance(a):
return moment(a, 2) - mean(a) ** 2
def skewness(a):
return (moment(a, 3) - 3 * mean(a) * variance(a) - mean(a) ** 3) / variance(a) ** (1.5)
def kurtosis(a):
return ((moment(a, 4) - 4 * mean(a) * moment(a, 3)
+ 6 * moment(a, 2) * mean(a) ** 2 - 3 * mean(a))
/ variance(a) ** 2)
fft = FFT().transform(x)
result_mean = SpectralMean().transform(x)
result_variance = SpectralVariance().transform(x)
result_skewness = SpectralSkewness().transform(x)
result_kurtosis = SpectralKurtosis().transform(x)
with warnings.catch_warnings():
warnings.simplefilter("ignore")
for i, a in series(x):
np.testing.assert_allclose(result_mean.values[i], mean(a))
np.testing.assert_allclose(result_variance.values[i], variance(a))
np.testing.assert_allclose(result_skewness.values[i], skewness(a))
np.testing.assert_allclose(result_kurtosis.values[i], kurtosis(a))
def test_cwt_ricker_width_1(x):
result = CWT(wavelet='ricker', width=1).transform(x)
for i, a in series(x):
actual = result.values[i]
expected = signal.cwt(a, signal.ricker, widths=(1,)).flatten()
np.testing.assert_almost_equal(actual, expected)
def test_cwt_ricker_width_2(x):
result = CWT(wavelet='ricker', width=2).transform(x)
for i, a in series(x):
actual = result.values[i]
expected = signal.cwt(a, signal.ricker, widths=(2,)).flatten()
np.testing.assert_almost_equal(actual, expected)
def test_power_spectral_density(x):
result = PowerSpectralDensity().transform(x)
for i, a in series(x):
actual = result.values[i]
expected = signal.welch(a, nperseg=min(len(a), 256))[1]
np.testing.assert_almost_equal(actual, expected) | tests/transformers/test_frequency.py | import pytest
import warnings
import numpy as np
import scipy.signal as signal
from tsfuse.data.synthetic import series, brownian
from tsfuse.transformers.frequency import *
from tsfuse.transformers.statistics import *
@pytest.fixture
def x():
return brownian()
def test_fft_real(x):
result = FFT(attr='real').transform(x)
for i, a in series(x):
actual = result.values[i]
expected = np.fft.rfft(a).real
np.testing.assert_almost_equal(actual, expected)
def test_fft_imag(x):
result = FFT(attr='imag').transform(x)
for i, a in series(x):
actual = result.values[i]
expected = np.fft.rfft(a).imag
np.testing.assert_almost_equal(actual, expected)
def test_fft_abs(x):
result = FFT(attr='abs').transform(x)
for i, a in series(x):
actual = result.values[i]
expected = np.abs(np.fft.rfft(a))
np.testing.assert_almost_equal(actual, expected)
def test_fft_angle(x):
result = FFT(attr='angle').transform(x)
for i, a in series(x):
actual = result.values[i]
expected = np.angle(np.fft.rfft(a), deg=True)
np.testing.assert_almost_equal(actual, expected)
def test_fft_moments(x):
def moment(a, moment):
return a.dot(np.arange(len(a)) ** moment) / np.sum(a)
def mean(a):
return moment(a, 1)
def variance(a):
return moment(a, 2) - mean(a) ** 2
def skewness(a):
return (moment(a, 3) - 3 * mean(a) * variance(a) - mean(a) ** 3) / variance(a) ** (1.5)
def kurtosis(a):
return ((moment(a, 4) - 4 * mean(a) * moment(a, 3)
+ 6 * moment(a, 2) * mean(a) ** 2 - 3 * mean(a))
/ variance(a) ** 2)
fft = FFT().transform(x)
result_mean = SpectralMean().transform(x)
result_variance = SpectralVariance().transform(x)
result_skewness = SpectralSkewness().transform(x)
result_kurtosis = SpectralKurtosis().transform(x)
with warnings.catch_warnings():
warnings.simplefilter("ignore")
for i, a in series(x):
np.testing.assert_allclose(result_mean.values[i], mean(a))
np.testing.assert_allclose(result_variance.values[i], variance(a))
np.testing.assert_allclose(result_skewness.values[i], skewness(a))
np.testing.assert_allclose(result_kurtosis.values[i], kurtosis(a))
def test_cwt_ricker_width_1(x):
result = CWT(wavelet='ricker', width=1).transform(x)
for i, a in series(x):
actual = result.values[i]
expected = signal.cwt(a, signal.ricker, widths=(1,)).flatten()
np.testing.assert_almost_equal(actual, expected)
def test_cwt_ricker_width_2(x):
result = CWT(wavelet='ricker', width=2).transform(x)
for i, a in series(x):
actual = result.values[i]
expected = signal.cwt(a, signal.ricker, widths=(2,)).flatten()
np.testing.assert_almost_equal(actual, expected)
def test_power_spectral_density(x):
result = PowerSpectralDensity().transform(x)
for i, a in series(x):
actual = result.values[i]
expected = signal.welch(a, nperseg=min(len(a), 256))[1]
np.testing.assert_almost_equal(actual, expected) | 0.779825 | 0.862468 |
import numpy as np
from model.factor import RandomVar
from learning.parameter import UniformDirichlet
from inference.exact import VariableElimination
class NaiveBayes:
"""Multinomial Naive Bayes implementation.
This implementation is inefficient. It serves mainly as a
proof of concept that Naive Bayes can be seen in the framework of proba-
bilistic graphical models.
"""
def __init__(self):
self.classes_ = None
self.scope_ = None
self.bn_ = None
def fit(self, X, y):
"""Fit a Multinomial Naive Bayes model to the data.
Parameters:
-----------
X : two-dimensional np.array or python matrix of integers
Matrix representing the observations. It is assumed that
`X[:, i]` is a sample from a discrete random variable $X_i$ that
takes values between `0` and `X[:, i].max()`
y : one-dimensional np.array or python list of integers
Array representing the classes assigned to each observation
"""
X = np.asarray(X, dtype=np.int)
if X.min() < 0:
raise Exception('Invalid samples')
self.classes_, y = np.unique(y, return_inverse=True)
C = RandomVar('Y', len(self.classes_))
scope = []
for i in range(X.shape[1]):
scope.append(RandomVar('X{0}'.format(i), X[:, i].max() + 1))
graph = {v: set() for v in scope}
graph[C] = set(scope)
scope.append(C)
Xy = np.concatenate([X, y.reshape(-1, 1)], axis=1)
self.bn_ = UniformDirichlet(scope).fit_predict(Xy, graph)
self.scope_ = scope
return self
def predict(self, X):
"""Predict classes for observations in `X` given the learned model"""
if X.shape[1] != len(self.scope_) - 1:
raise Exception('Invalid observations')
ypred = np.zeros(X.shape[0], np.int)
varelim = VariableElimination(self.bn_)
for i, x in enumerate(X):
evidence = []
for j in range(len(x)):
evidence.append((self.scope_[j], x[j]))
c = varelim.maximum_a_posteriori(evidence)[0][1]
ypred[i] = self.classes_[c]
return ypred
def predict_proba(self, X):
"""Predict probability of an observation belonging to each class.
Return a matrix whose lines correspond to the observations and whose
columns represent the probability of the observation belonging to each
class, in the order in which they appear in `self.classes_`"""
if X.shape[1] != len(self.scope_) - 1:
raise Exception('Invalid observations')
proba = np.zeros((X.shape[0], len(self.classes_)), np.float)
varelim = VariableElimination(self.bn_)
for i, x in enumerate(X):
evidence = []
for j in range(len(x)):
evidence.append((self.scope_[j], x[j]))
proba[i] = varelim.posterior([self.scope_[-1]], evidence).values
return proba
def score(self, X, y):
"""Score prediction given `X` wrt correct prediction `y`"""
return ((self.predict(X) == y).sum())/float(len(y)) | learning/classification.py |
import numpy as np
from model.factor import RandomVar
from learning.parameter import UniformDirichlet
from inference.exact import VariableElimination
class NaiveBayes:
"""Multinomial Naive Bayes implementation.
This implementation is inefficient. It serves mainly as a
proof of concept that Naive Bayes can be seen in the framework of proba-
bilistic graphical models.
"""
def __init__(self):
self.classes_ = None
self.scope_ = None
self.bn_ = None
def fit(self, X, y):
"""Fit a Multinomial Naive Bayes model to the data.
Parameters:
-----------
X : two-dimensional np.array or python matrix of integers
Matrix representing the observations. It is assumed that
`X[:, i]` is a sample from a discrete random variable $X_i$ that
takes values between `0` and `X[:, i].max()`
y : one-dimensional np.array or python list of integers
Array representing the classes assigned to each observation
"""
X = np.asarray(X, dtype=np.int)
if X.min() < 0:
raise Exception('Invalid samples')
self.classes_, y = np.unique(y, return_inverse=True)
C = RandomVar('Y', len(self.classes_))
scope = []
for i in range(X.shape[1]):
scope.append(RandomVar('X{0}'.format(i), X[:, i].max() + 1))
graph = {v: set() for v in scope}
graph[C] = set(scope)
scope.append(C)
Xy = np.concatenate([X, y.reshape(-1, 1)], axis=1)
self.bn_ = UniformDirichlet(scope).fit_predict(Xy, graph)
self.scope_ = scope
return self
def predict(self, X):
"""Predict classes for observations in `X` given the learned model"""
if X.shape[1] != len(self.scope_) - 1:
raise Exception('Invalid observations')
ypred = np.zeros(X.shape[0], np.int)
varelim = VariableElimination(self.bn_)
for i, x in enumerate(X):
evidence = []
for j in range(len(x)):
evidence.append((self.scope_[j], x[j]))
c = varelim.maximum_a_posteriori(evidence)[0][1]
ypred[i] = self.classes_[c]
return ypred
def predict_proba(self, X):
"""Predict probability of an observation belonging to each class.
Return a matrix whose lines correspond to the observations and whose
columns represent the probability of the observation belonging to each
class, in the order in which they appear in `self.classes_`"""
if X.shape[1] != len(self.scope_) - 1:
raise Exception('Invalid observations')
proba = np.zeros((X.shape[0], len(self.classes_)), np.float)
varelim = VariableElimination(self.bn_)
for i, x in enumerate(X):
evidence = []
for j in range(len(x)):
evidence.append((self.scope_[j], x[j]))
proba[i] = varelim.posterior([self.scope_[-1]], evidence).values
return proba
def score(self, X, y):
"""Score prediction given `X` wrt correct prediction `y`"""
return ((self.predict(X) == y).sum())/float(len(y)) | 0.90703 | 0.700267 |
# Python Import Modules:
from __future__ import print_function
# Instructions:
# Part I
# Given the following list:
# students = [
# {'first_name': 'Michael', 'last_name' : 'Jordan'},
# {'first_name' : 'John', 'last_name' : 'Rosales'},
# {'first_name' : 'Mark', 'last_name' : 'Guillen'},
# {'first_name' : 'KB', 'last_name' : 'Tonel'}
# ]
# Create a program that outputs:
# <NAME>
# <NAME>
# <NAME>
# <NAME>
# Part II
# Now, given the following dictionary:
# users = {
# 'Students': [
# {'first_name': 'Michael', 'last_name' : 'Jordan'},
# {'first_name' : 'John', 'last_name' : 'Rosales'},
# {'first_name' : 'Mark', 'last_name' : 'Guillen'},
# {'first_name' : 'KB', 'last_name' : 'Tonel'}
# ],
# 'Instructors': [
# {'first_name' : 'Michael', 'last_name' : 'Choi'},
# {'first_name' : 'Martin', 'last_name' : 'Puryear'}
# ]
# }
# Create a program that prints the following format (including number of characters in each combined name):
# Students
# 1 - <NAME> - 13
# 2 - <NAME> - 11
# 3 - <NAME> - 11
# 4 - <NAME> - 7
# Instructors
# 1 - <NAME> - 11
# 2 - <NAME> - 13
# ---------------
# Part 1:
# dictionary
students = [
{'first_name': 'Michael', 'last_name': 'Jordan'},
{'first_name': 'John', 'last_name': 'Rosales'},
{'first_name': 'Mark', 'last_name': 'Guillen'},
{'first_name': 'KB', 'last_name': 'Tonel'}
]
# f(x) print students
def print_student_info(studentlist):
# loop through items in students
for items in studentlist:
print(items['first_name'], items['last_name'])
print_student_info(students)
# ---------------
# Part 2:
print('-----')
# dictionary
users = {
'Students': [
# key:value pair
{'first_name': 'Michael', 'last_name': 'Jordan'},
{'first_name': 'John', 'last_name': 'Rosales'},
{'first_name': 'Mark', 'last_name': 'Guillen'},
{'first_name': 'KB', 'last_name': 'Tonel'}
],
'Instructors': [
{'first_name': 'Michael', 'last_name': 'Choi'},
{'first_name': 'Martin', 'last_name': 'Puryear'}
]
}
# f(x) dictinary
def print_school_info(userdict):
# loop dictinary
for item in userdict:
# print item
print(item)
# set variable for counting
counter = 0
# loop through people in dictionary
for person in userdict[item]:
# increment counter variable
counter+=1
# print counter + first + last + length of person's name
print(counter,person['first_name'].upper(),person['last_name'].upper(),'-',(len(person['first_name']) + len(person['last_na'])))
# call f(x)
print_school_info(users)
# L|5 | python_names.py |
# Python Import Modules:
from __future__ import print_function
# Instructions:
# Part I
# Given the following list:
# students = [
# {'first_name': 'Michael', 'last_name' : 'Jordan'},
# {'first_name' : 'John', 'last_name' : 'Rosales'},
# {'first_name' : 'Mark', 'last_name' : 'Guillen'},
# {'first_name' : 'KB', 'last_name' : 'Tonel'}
# ]
# Create a program that outputs:
# <NAME>
# <NAME>
# <NAME>
# <NAME>
# Part II
# Now, given the following dictionary:
# users = {
# 'Students': [
# {'first_name': 'Michael', 'last_name' : 'Jordan'},
# {'first_name' : 'John', 'last_name' : 'Rosales'},
# {'first_name' : 'Mark', 'last_name' : 'Guillen'},
# {'first_name' : 'KB', 'last_name' : 'Tonel'}
# ],
# 'Instructors': [
# {'first_name' : 'Michael', 'last_name' : 'Choi'},
# {'first_name' : 'Martin', 'last_name' : 'Puryear'}
# ]
# }
# Create a program that prints the following format (including number of characters in each combined name):
# Students
# 1 - <NAME> - 13
# 2 - <NAME> - 11
# 3 - <NAME> - 11
# 4 - <NAME> - 7
# Instructors
# 1 - <NAME> - 11
# 2 - <NAME> - 13
# ---------------
# Part 1:
# dictionary
students = [
{'first_name': 'Michael', 'last_name': 'Jordan'},
{'first_name': 'John', 'last_name': 'Rosales'},
{'first_name': 'Mark', 'last_name': 'Guillen'},
{'first_name': 'KB', 'last_name': 'Tonel'}
]
# f(x) print students
def print_student_info(studentlist):
# loop through items in students
for items in studentlist:
print(items['first_name'], items['last_name'])
print_student_info(students)
# ---------------
# Part 2:
print('-----')
# dictionary
users = {
'Students': [
# key:value pair
{'first_name': 'Michael', 'last_name': 'Jordan'},
{'first_name': 'John', 'last_name': 'Rosales'},
{'first_name': 'Mark', 'last_name': 'Guillen'},
{'first_name': 'KB', 'last_name': 'Tonel'}
],
'Instructors': [
{'first_name': 'Michael', 'last_name': 'Choi'},
{'first_name': 'Martin', 'last_name': 'Puryear'}
]
}
# f(x) dictinary
def print_school_info(userdict):
# loop dictinary
for item in userdict:
# print item
print(item)
# set variable for counting
counter = 0
# loop through people in dictionary
for person in userdict[item]:
# increment counter variable
counter+=1
# print counter + first + last + length of person's name
print(counter,person['first_name'].upper(),person['last_name'].upper(),'-',(len(person['first_name']) + len(person['last_na'])))
# call f(x)
print_school_info(users)
# L|5 | 0.46223 | 0.275577 |
from typing import Any, Dict, Iterable
from inspect import isclass
import abc
from ._argument_spec import ArgumentSpec
def _needs_spec(bound_object: Any) -> bool:
return isclass(bound_object)
class BindingSpec(metaclass=abc.ABCMeta):
@abc.abstractmethod
def has_instance(self) -> bool:
"""
Returns true if the binding spec have an
instance ready to be returned, False otherwise.
"""
raise NotImplementedError()
@abc.abstractmethod
def get_instance(self) -> Any:
"""
Returns the instance associated with the BindingSpec.
"""
raise NotImplementedError()
@abc.abstractmethod
def construct_instance(self, keyword_arguments: Dict[str, object]) -> Any:
"""
Constructs an instance of the bound object given a map of arguments
"""
pass
@abc.abstractmethod
def get_argument_specs(self) -> Iterable[ArgumentSpec]:
"""
Returns the bindings necessary to construct the instance.
"""
pass
class InstanceBindingSpec(BindingSpec):
def __init__(self, bound_object: Any):
self._validate_object(bound_object)
self._instance = bound_object
def get_instance(self):
return self._instance
def construct_instance(self, keyword_arguments: Dict[str, object]) -> Any:
raise TypeError("{} doesn't need to construct instances".format(
InstanceBindingSpec.__name__))
def has_instance(self) -> bool:
return True
@staticmethod
def _validate_object(bound_object: Any):
if bound_object is None:
raise TypeError("Binding None is not allowed")
if _needs_spec(bound_object):
raise TypeError(
("{} should only be used with object "
"instances and unbound callables.").format(
InstanceBindingSpec.__name__))
def get_argument_specs(self) -> Iterable[ArgumentSpec]:
raise TypeError("{} doesn't have any argument specs".format(
InstanceBindingSpec.__name__)) | zanna/_binding_spec.py | from typing import Any, Dict, Iterable
from inspect import isclass
import abc
from ._argument_spec import ArgumentSpec
def _needs_spec(bound_object: Any) -> bool:
return isclass(bound_object)
class BindingSpec(metaclass=abc.ABCMeta):
@abc.abstractmethod
def has_instance(self) -> bool:
"""
Returns true if the binding spec have an
instance ready to be returned, False otherwise.
"""
raise NotImplementedError()
@abc.abstractmethod
def get_instance(self) -> Any:
"""
Returns the instance associated with the BindingSpec.
"""
raise NotImplementedError()
@abc.abstractmethod
def construct_instance(self, keyword_arguments: Dict[str, object]) -> Any:
"""
Constructs an instance of the bound object given a map of arguments
"""
pass
@abc.abstractmethod
def get_argument_specs(self) -> Iterable[ArgumentSpec]:
"""
Returns the bindings necessary to construct the instance.
"""
pass
class InstanceBindingSpec(BindingSpec):
def __init__(self, bound_object: Any):
self._validate_object(bound_object)
self._instance = bound_object
def get_instance(self):
return self._instance
def construct_instance(self, keyword_arguments: Dict[str, object]) -> Any:
raise TypeError("{} doesn't need to construct instances".format(
InstanceBindingSpec.__name__))
def has_instance(self) -> bool:
return True
@staticmethod
def _validate_object(bound_object: Any):
if bound_object is None:
raise TypeError("Binding None is not allowed")
if _needs_spec(bound_object):
raise TypeError(
("{} should only be used with object "
"instances and unbound callables.").format(
InstanceBindingSpec.__name__))
def get_argument_specs(self) -> Iterable[ArgumentSpec]:
raise TypeError("{} doesn't have any argument specs".format(
InstanceBindingSpec.__name__)) | 0.888057 | 0.16872 |
import pytest
import json
from common.testresult import TestResults, TestResult
import pickle
import base64
def test__testresults_append__type_not_testresult__throws_error():
# Arrange
test_results = TestResults()
# Act/Assert
with pytest.raises(TypeError):
test_results.append("Test")
def test__testresults_append__type_testresult__appends_testresult():
# Arrange
test_results = TestResults()
# Act
test_results.append(TestResult("Test Name", True, 1, []))
# Assert
assert len(test_results.results) == 1
def test__eq__test_results_not_equal__are_not_equal():
# Arrange
test_results = TestResults()
test_results.append(TestResult("Test NameX", True, 1, []))
test_results.append(TestResult("Test Name1", True, 1, [], ValueError("Error")))
test_results1 = TestResults()
test_results1.append(TestResult("Test Name", True, 1, []))
test_results1.append(TestResult("Test Name1", True, 1, [], ValueError("Error")))
# Act / Assert
are_not_equal = test_results != test_results1
assert are_not_equal == True
def test__deserialize__no_constraints__is_serializable_and_deserializable():
# Arrange
test_results = TestResults()
test_results.append(TestResult("Test Name", True, 1, []))
test_results.append(TestResult("Test Name1", True, 1, [], ValueError("Error")))
serialized_data = test_results.serialize()
deserialized_data = TestResults().deserialize(serialized_data)
assert test_results == deserialized_data
def test__deserialize__empty_pickle_data__throws_exception():
# Arrange
test_results = TestResults()
invalid_pickle = ""
# Act / Assert
with pytest.raises(Exception):
test_results.deserialize(invalid_pickle)
def test__deserialize__invalid_pickle_data__throws_Exception():
# Arrange
test_results = TestResults()
invalid_pickle = "test"
# Act / Assert
with pytest.raises(Exception):
test_results.deserialize(invalid_pickle)
def test__eq__test_results_equal_but_not_same_ref__are_equal():
# Arrange
test_results = TestResults()
test_results.append(TestResult("Test Name", True, 1, []))
test_results.append(TestResult("Test Name1", True, 1, [], ValueError("Error")))
test_results1 = TestResults()
test_results1.append(TestResult("Test Name", True, 1, []))
test_results1.append(TestResult("Test Name1", True, 1, [], ValueError("Error")))
# Act / Assert
assert test_results == test_results1
def test__num_tests__5_test_cases__is_5():
# Arrange
test_results = TestResults()
test_results.append(TestResult("Test Name", True, 1, []))
test_results.append(TestResult("Test Name1", False, 1, [], ValueError("Error")))
test_results.append(TestResult("Test Name1", False, 1, [], ValueError("Error")))
test_results.append(TestResult("Test Name1", False, 1, [], ValueError("Error")))
test_results.append(TestResult("Test Name1", False, 1, [], ValueError("Error")))
# Act / Assert
assert 5 == test_results.test_cases
def test__num_failures__5_test_cases_4_failures__is_4():
# Arrange
test_results = TestResults()
test_results.append(TestResult("Test Name", True, 1, []))
test_results.append(TestResult("Test Name1", False, 1, [], ValueError("Error")))
test_results.append(TestResult("Test Name1", False, 1, [], ValueError("Error")))
test_results.append(TestResult("Test Name1", False, 1, [], ValueError("Error")))
test_results.append(TestResult("Test Name1", False, 1, [], ValueError("Error")))
# Act / Assert
assert 4 == test_results.num_failures
def test__total_execution_time__5_test_cases__is_sum_of_execution_times():
# Arrange
test_results = TestResults()
test_results.append(TestResult("Test Name", True, 1.12, []))
test_results.append(TestResult("Test Name1", False, 1.0005, [], ValueError("Error")))
test_results.append(TestResult("Test Name1", False, 10.000034, [], ValueError("Error")))
test_results.append(TestResult("Test Name1", False, 7.66, [], ValueError("Error")))
test_results.append(TestResult("Test Name1", False, 13.21, [], ValueError("Error")))
# Act / Assert
assert 32.990534 == test_results.total_execution_time
def test__serialize__result_data__is_base64_str():
test_results = TestResults()
serialized_data = test_results.serialize()
serialized_bin_data = base64.encodebytes(pickle.dumps(test_results))
assert serialized_data == str(serialized_bin_data, "utf-8")
def test__deserialize__data_is_base64_str__can_deserialize():
test_results = TestResults()
serialized_bin_data = pickle.dumps(test_results)
serialized_str = str(base64.encodebytes(serialized_bin_data), "utf-8")
test_results_from_data = TestResults().deserialize(serialized_str)
assert test_results == test_results_from_data | tests/nutter/test_testresult.py | import pytest
import json
from common.testresult import TestResults, TestResult
import pickle
import base64
def test__testresults_append__type_not_testresult__throws_error():
# Arrange
test_results = TestResults()
# Act/Assert
with pytest.raises(TypeError):
test_results.append("Test")
def test__testresults_append__type_testresult__appends_testresult():
# Arrange
test_results = TestResults()
# Act
test_results.append(TestResult("Test Name", True, 1, []))
# Assert
assert len(test_results.results) == 1
def test__eq__test_results_not_equal__are_not_equal():
# Arrange
test_results = TestResults()
test_results.append(TestResult("Test NameX", True, 1, []))
test_results.append(TestResult("Test Name1", True, 1, [], ValueError("Error")))
test_results1 = TestResults()
test_results1.append(TestResult("Test Name", True, 1, []))
test_results1.append(TestResult("Test Name1", True, 1, [], ValueError("Error")))
# Act / Assert
are_not_equal = test_results != test_results1
assert are_not_equal == True
def test__deserialize__no_constraints__is_serializable_and_deserializable():
# Arrange
test_results = TestResults()
test_results.append(TestResult("Test Name", True, 1, []))
test_results.append(TestResult("Test Name1", True, 1, [], ValueError("Error")))
serialized_data = test_results.serialize()
deserialized_data = TestResults().deserialize(serialized_data)
assert test_results == deserialized_data
def test__deserialize__empty_pickle_data__throws_exception():
# Arrange
test_results = TestResults()
invalid_pickle = ""
# Act / Assert
with pytest.raises(Exception):
test_results.deserialize(invalid_pickle)
def test__deserialize__invalid_pickle_data__throws_Exception():
# Arrange
test_results = TestResults()
invalid_pickle = "test"
# Act / Assert
with pytest.raises(Exception):
test_results.deserialize(invalid_pickle)
def test__eq__test_results_equal_but_not_same_ref__are_equal():
# Arrange
test_results = TestResults()
test_results.append(TestResult("Test Name", True, 1, []))
test_results.append(TestResult("Test Name1", True, 1, [], ValueError("Error")))
test_results1 = TestResults()
test_results1.append(TestResult("Test Name", True, 1, []))
test_results1.append(TestResult("Test Name1", True, 1, [], ValueError("Error")))
# Act / Assert
assert test_results == test_results1
def test__num_tests__5_test_cases__is_5():
# Arrange
test_results = TestResults()
test_results.append(TestResult("Test Name", True, 1, []))
test_results.append(TestResult("Test Name1", False, 1, [], ValueError("Error")))
test_results.append(TestResult("Test Name1", False, 1, [], ValueError("Error")))
test_results.append(TestResult("Test Name1", False, 1, [], ValueError("Error")))
test_results.append(TestResult("Test Name1", False, 1, [], ValueError("Error")))
# Act / Assert
assert 5 == test_results.test_cases
def test__num_failures__5_test_cases_4_failures__is_4():
# Arrange
test_results = TestResults()
test_results.append(TestResult("Test Name", True, 1, []))
test_results.append(TestResult("Test Name1", False, 1, [], ValueError("Error")))
test_results.append(TestResult("Test Name1", False, 1, [], ValueError("Error")))
test_results.append(TestResult("Test Name1", False, 1, [], ValueError("Error")))
test_results.append(TestResult("Test Name1", False, 1, [], ValueError("Error")))
# Act / Assert
assert 4 == test_results.num_failures
def test__total_execution_time__5_test_cases__is_sum_of_execution_times():
# Arrange
test_results = TestResults()
test_results.append(TestResult("Test Name", True, 1.12, []))
test_results.append(TestResult("Test Name1", False, 1.0005, [], ValueError("Error")))
test_results.append(TestResult("Test Name1", False, 10.000034, [], ValueError("Error")))
test_results.append(TestResult("Test Name1", False, 7.66, [], ValueError("Error")))
test_results.append(TestResult("Test Name1", False, 13.21, [], ValueError("Error")))
# Act / Assert
assert 32.990534 == test_results.total_execution_time
def test__serialize__result_data__is_base64_str():
test_results = TestResults()
serialized_data = test_results.serialize()
serialized_bin_data = base64.encodebytes(pickle.dumps(test_results))
assert serialized_data == str(serialized_bin_data, "utf-8")
def test__deserialize__data_is_base64_str__can_deserialize():
test_results = TestResults()
serialized_bin_data = pickle.dumps(test_results)
serialized_str = str(base64.encodebytes(serialized_bin_data), "utf-8")
test_results_from_data = TestResults().deserialize(serialized_str)
assert test_results == test_results_from_data | 0.317532 | 0.424889 |
from django.core.urlresolvers import reverse
from django.shortcuts import redirect, render_to_response
from django.template.context import RequestContext
from guardian.decorators import permission_required_or_403
from userena.decorators import secure_required
from accounts.forms import UserAddForm
from django.contrib.auth.models import User
from django.core.mail import send_mass_mail, EmailMessage
from settings import SERVER_EMAIL
@secure_required
@permission_required_or_403('auth.add_user')
def user_add(request):
"""
Add a user using a subclass of Userena's SignupForm,
which takes care of Profile creation, adding necessary
user permissions, password generation, and sending an
activation email.
The only reason this doesn't use userena.views.signup is
that userena.views.signup logs out the current user (the
admin user) after a user is added. (That makes sense for
creating an account for yourself, but not for creating
someone else's account.)
"""
form = UserAddForm()
if request.method == 'POST':
form = UserAddForm(request.POST, request.FILES)
if form.is_valid():
user = form.save()
redirect_to = reverse(
'userena_signup_complete',
kwargs={'username': user.username}
)
return redirect(redirect_to)
return render_to_response('accounts/user_add_form.html', {
'form': form,
},
context_instance=RequestContext(request)
)
#sends mass emails to all users in a single email
def email_all(request):
status = None
if request.method == 'POST':
subject = request.REQUEST.get('subject').encode("ascii")
message = request.REQUEST.get('message').encode("ascii")
if not subject or not message:
return render_to_response('accounts/email_all_form.html',
context_instance=RequestContext(request)
)
all_users = User.objects.all()
email_list = []
for u in all_users:
if u.email:
email_list.append(u.email.encode("ascii") )
email = EmailMessage(subject, message,SERVER_EMAIL,
[], bcc=email_list )
email.send(fail_silently=True)
status = "Successfully Sent Emails"
return render_to_response('accounts/email_all_form.html',
{'status':status},
context_instance=RequestContext(request)
) | accounts/views.py | from django.core.urlresolvers import reverse
from django.shortcuts import redirect, render_to_response
from django.template.context import RequestContext
from guardian.decorators import permission_required_or_403
from userena.decorators import secure_required
from accounts.forms import UserAddForm
from django.contrib.auth.models import User
from django.core.mail import send_mass_mail, EmailMessage
from settings import SERVER_EMAIL
@secure_required
@permission_required_or_403('auth.add_user')
def user_add(request):
"""
Add a user using a subclass of Userena's SignupForm,
which takes care of Profile creation, adding necessary
user permissions, password generation, and sending an
activation email.
The only reason this doesn't use userena.views.signup is
that userena.views.signup logs out the current user (the
admin user) after a user is added. (That makes sense for
creating an account for yourself, but not for creating
someone else's account.)
"""
form = UserAddForm()
if request.method == 'POST':
form = UserAddForm(request.POST, request.FILES)
if form.is_valid():
user = form.save()
redirect_to = reverse(
'userena_signup_complete',
kwargs={'username': user.username}
)
return redirect(redirect_to)
return render_to_response('accounts/user_add_form.html', {
'form': form,
},
context_instance=RequestContext(request)
)
#sends mass emails to all users in a single email
def email_all(request):
status = None
if request.method == 'POST':
subject = request.REQUEST.get('subject').encode("ascii")
message = request.REQUEST.get('message').encode("ascii")
if not subject or not message:
return render_to_response('accounts/email_all_form.html',
context_instance=RequestContext(request)
)
all_users = User.objects.all()
email_list = []
for u in all_users:
if u.email:
email_list.append(u.email.encode("ascii") )
email = EmailMessage(subject, message,SERVER_EMAIL,
[], bcc=email_list )
email.send(fail_silently=True)
status = "Successfully Sent Emails"
return render_to_response('accounts/email_all_form.html',
{'status':status},
context_instance=RequestContext(request)
) | 0.453746 | 0.097433 |
import pandas as pd
import numpy as np
def unique_allele_test(row):
if row['total_total_alt_allele_counts'] > 0:
top_well = sorted(wells, key=lambda w: row[w + '_total_alt_allele_counts'])[-1]
if row[top_well + '_total_alt_allele_counts'] / row['total_total_alt_allele_counts'] > 0.9:
return top_well
return 'No'
def get_allele_counts(x):
# just picking out the AD portion of the column
s = x.split(':')
if len(s) > 1:
return s[1]
else:
return '0'
def get_ref_allele_counts(x):
return int(x.split(',')[0])
def get_alt_allele_counts(x):
return sum([int(i) for i in x.split(',')[1:]])
def get_af_traj(r, gu):
# row, gens_use
afs = []
for g in gu:
if r['G' + str(g) + '_tot_counts'] > 0:
afs.append(str(g) + '_' + str(r['G'+str(g)+'_alt_allele_counts'] / r['G'+str(g)+'_tot_counts']))
return ';'.join(afs)
nd = pd.read_csv('../../Output/WGS/FINAL_FULL_VARIANTS.tsv', delimiter='\t')
new_cols = ['CHROM', 'POS', 'REF', 'ALT', 'QUAL', 'ANN', 'FORMAT']
wells = sorted(list(set([i.split('_')[-1] for i in nd if i not in new_cols])))
print('file read')
for i in [j for j in nd.columns if j not in new_cols]:
nd[i + '_allele_counts'] = nd[i].apply(lambda x: get_allele_counts(x))
nd[i + '_ref_allele_counts'] = nd[i + '_allele_counts'].apply(lambda x: get_ref_allele_counts(x))
nd[i + '_alt_allele_counts'] = nd[i + '_allele_counts'].apply(lambda x: get_alt_allele_counts(x))
print('1')
for w in wells:
acols = [i for i in nd if w in i and '_alt_allele_counts' in i]
nd[w + '_total_alt_allele_counts'] = np.sum(nd[acols], axis=1)
print('2')
nd['total_total_alt_allele_counts'] = np.nansum(nd[[w + '_total_alt_allele_counts' for w in wells]], axis=1)
nd['unique_allele'] = nd.apply(lambda r: unique_allele_test(r), axis=1)
nd.to_csv('../../Output/WGS/allele_table_expanded.tsv', index=False, sep='\t')
print('3')
nd = pd.read_csv('../Final_output/allele_table_expanded.tsv', delimiter='\t')
gens = [70, 1410, 2640, 5150, 7530, 10150]
for w in wells:
well_cols = [i for i in nd if w in i]
gens_use = [g for g in gens if 'G' + str(g) + '_' + w in well_cols]
td = nd[nd[w + '_total_alt_allele_counts']>2][new_cols + well_cols + ['total_total_alt_allele_counts']].rename(columns={i:i.replace('_'+w, '') for i in well_cols})
td['percentage_of_total_alt_counts'] = td[w + '_total_alt_allele_counts'] / td['total_total_alt_allele_counts']
for g in gens_use:
td['G' + str(g) + '_tot_counts'] = td['G'+str(g)+'_alt_allele_counts'] + td['G'+str(g)+'_ref_allele_counts']
td['G' + str(g) + '_af'] = td['G'+str(g)+'_alt_allele_counts'] / td['G' + str(g) + '_tot_counts']
td['af_trajectory'] = td.apply(lambda row: get_af_traj(row, gens_use), axis=1)
use_cols = ['CHROM', 'POS', 'REF', 'ALT', 'QUAL', 'ANN', 'af_trajectory'] + ['G'+str(g)+'_allele_counts' for g in gens_use]
td[use_cols + ['percentage_of_total_alt_counts']].to_csv('../../Output/WGS/well_output/' + w + '_full.tsv', index=False, sep='\t')
td[td['percentage_of_total_alt_counts'] > 0.9][use_cols].to_csv('../../Output/WGS/well_output/' + w + '_unique90.tsv', index=False, sep='\t') | WGS/.ipynb_checkpoints/process_final_files-checkpoint.py | import pandas as pd
import numpy as np
def unique_allele_test(row):
if row['total_total_alt_allele_counts'] > 0:
top_well = sorted(wells, key=lambda w: row[w + '_total_alt_allele_counts'])[-1]
if row[top_well + '_total_alt_allele_counts'] / row['total_total_alt_allele_counts'] > 0.9:
return top_well
return 'No'
def get_allele_counts(x):
# just picking out the AD portion of the column
s = x.split(':')
if len(s) > 1:
return s[1]
else:
return '0'
def get_ref_allele_counts(x):
return int(x.split(',')[0])
def get_alt_allele_counts(x):
return sum([int(i) for i in x.split(',')[1:]])
def get_af_traj(r, gu):
# row, gens_use
afs = []
for g in gu:
if r['G' + str(g) + '_tot_counts'] > 0:
afs.append(str(g) + '_' + str(r['G'+str(g)+'_alt_allele_counts'] / r['G'+str(g)+'_tot_counts']))
return ';'.join(afs)
nd = pd.read_csv('../../Output/WGS/FINAL_FULL_VARIANTS.tsv', delimiter='\t')
new_cols = ['CHROM', 'POS', 'REF', 'ALT', 'QUAL', 'ANN', 'FORMAT']
wells = sorted(list(set([i.split('_')[-1] for i in nd if i not in new_cols])))
print('file read')
for i in [j for j in nd.columns if j not in new_cols]:
nd[i + '_allele_counts'] = nd[i].apply(lambda x: get_allele_counts(x))
nd[i + '_ref_allele_counts'] = nd[i + '_allele_counts'].apply(lambda x: get_ref_allele_counts(x))
nd[i + '_alt_allele_counts'] = nd[i + '_allele_counts'].apply(lambda x: get_alt_allele_counts(x))
print('1')
for w in wells:
acols = [i for i in nd if w in i and '_alt_allele_counts' in i]
nd[w + '_total_alt_allele_counts'] = np.sum(nd[acols], axis=1)
print('2')
nd['total_total_alt_allele_counts'] = np.nansum(nd[[w + '_total_alt_allele_counts' for w in wells]], axis=1)
nd['unique_allele'] = nd.apply(lambda r: unique_allele_test(r), axis=1)
nd.to_csv('../../Output/WGS/allele_table_expanded.tsv', index=False, sep='\t')
print('3')
nd = pd.read_csv('../Final_output/allele_table_expanded.tsv', delimiter='\t')
gens = [70, 1410, 2640, 5150, 7530, 10150]
for w in wells:
well_cols = [i for i in nd if w in i]
gens_use = [g for g in gens if 'G' + str(g) + '_' + w in well_cols]
td = nd[nd[w + '_total_alt_allele_counts']>2][new_cols + well_cols + ['total_total_alt_allele_counts']].rename(columns={i:i.replace('_'+w, '') for i in well_cols})
td['percentage_of_total_alt_counts'] = td[w + '_total_alt_allele_counts'] / td['total_total_alt_allele_counts']
for g in gens_use:
td['G' + str(g) + '_tot_counts'] = td['G'+str(g)+'_alt_allele_counts'] + td['G'+str(g)+'_ref_allele_counts']
td['G' + str(g) + '_af'] = td['G'+str(g)+'_alt_allele_counts'] / td['G' + str(g) + '_tot_counts']
td['af_trajectory'] = td.apply(lambda row: get_af_traj(row, gens_use), axis=1)
use_cols = ['CHROM', 'POS', 'REF', 'ALT', 'QUAL', 'ANN', 'af_trajectory'] + ['G'+str(g)+'_allele_counts' for g in gens_use]
td[use_cols + ['percentage_of_total_alt_counts']].to_csv('../../Output/WGS/well_output/' + w + '_full.tsv', index=False, sep='\t')
td[td['percentage_of_total_alt_counts'] > 0.9][use_cols].to_csv('../../Output/WGS/well_output/' + w + '_unique90.tsv', index=False, sep='\t') | 0.19046 | 0.122891 |
import matplotlib
matplotlib.use('agg')
import cartopy.crs as ccrs
import matplotlib.pyplot as plt
import seaborn as sns
from glob import glob
import json
import pandas as pd
import numpy as np
import json
from pathlib import Path
import pkg_resources
def main():
for c in range(2):
files = glob(f'./*/wasserstein_dist_channel{c}.json')
data = []
for file in files:
with open(file, 'r') as f:
dct = json.load(f)
dct['city'] = Path(file).parts[-2].replace('_', ' ').title()
data.append(dct)
df_wide = pd.DataFrame(data)
df = df_wide.melt(value_name='wasserstein distance', var_name='model', id_vars=['city'])
df = df[df.model != 'ground truth']
with sns.plotting_context('paper'), sns.axes_style('whitegrid'), sns.color_palette('deep'):
g = sns.displot(df, x='wasserstein distance', hue='model', kind='hist', bins=30, log_scale=True, height=3, aspect=1.5)
g.fig.savefig(f'wasserstein_hist_channel{c}.png', bbox_inches='tight')
g.fig.savefig(f'wasserstein_hist_channel{c}.pdf', bbox_inches='tight')
worser = int(np.sum(df_wide.ours > 1.1*df_wide.mse))
better = int(np.sum(df_wide.mse > 1.1*df_wide.ours))
equal = len(df_wide) - worser - better
with open(f'stats_channel{c}.json', 'w') as f:
json.dump({'worser': worser, 'equal': equal, 'better': better}, f, indent=4)
cities = pd.read_csv(pkg_resources.resource_filename('phire', 'data/cities.csv'))
def plot(col):
merged = df_wide.sort_values(col).merge(cities, left_on='city', right_on='city_ascii')
overperformer = merged.head(30)
underperformer = merged.tail(30)
fig, ax = plt.subplots(figsize=(10,5), subplot_kw={'projection':ccrs.Robinson()})
#ax.coastlines()
ax.stock_img()
for _, city in overperformer.iterrows():
ax.plot(city.lng, city.lat, 'g*', transform=ccrs.Geodetic())
for _, city in underperformer.iterrows():
ax.plot(city.lng, city.lat, 'r*', transform=ccrs.Geodetic())
fig.savefig(f'outliers_{col}_channel{c}.png', bbox_inches='tight')
fig.savefig(f'outliers_{col}_channel{c}.pdf', bbox_inches='tight')
plot('ours')
plot('mse')
if __name__ == '__main__':
main() | scripts/plots/wasserstein_histogram.py | import matplotlib
matplotlib.use('agg')
import cartopy.crs as ccrs
import matplotlib.pyplot as plt
import seaborn as sns
from glob import glob
import json
import pandas as pd
import numpy as np
import json
from pathlib import Path
import pkg_resources
def main():
for c in range(2):
files = glob(f'./*/wasserstein_dist_channel{c}.json')
data = []
for file in files:
with open(file, 'r') as f:
dct = json.load(f)
dct['city'] = Path(file).parts[-2].replace('_', ' ').title()
data.append(dct)
df_wide = pd.DataFrame(data)
df = df_wide.melt(value_name='wasserstein distance', var_name='model', id_vars=['city'])
df = df[df.model != 'ground truth']
with sns.plotting_context('paper'), sns.axes_style('whitegrid'), sns.color_palette('deep'):
g = sns.displot(df, x='wasserstein distance', hue='model', kind='hist', bins=30, log_scale=True, height=3, aspect=1.5)
g.fig.savefig(f'wasserstein_hist_channel{c}.png', bbox_inches='tight')
g.fig.savefig(f'wasserstein_hist_channel{c}.pdf', bbox_inches='tight')
worser = int(np.sum(df_wide.ours > 1.1*df_wide.mse))
better = int(np.sum(df_wide.mse > 1.1*df_wide.ours))
equal = len(df_wide) - worser - better
with open(f'stats_channel{c}.json', 'w') as f:
json.dump({'worser': worser, 'equal': equal, 'better': better}, f, indent=4)
cities = pd.read_csv(pkg_resources.resource_filename('phire', 'data/cities.csv'))
def plot(col):
merged = df_wide.sort_values(col).merge(cities, left_on='city', right_on='city_ascii')
overperformer = merged.head(30)
underperformer = merged.tail(30)
fig, ax = plt.subplots(figsize=(10,5), subplot_kw={'projection':ccrs.Robinson()})
#ax.coastlines()
ax.stock_img()
for _, city in overperformer.iterrows():
ax.plot(city.lng, city.lat, 'g*', transform=ccrs.Geodetic())
for _, city in underperformer.iterrows():
ax.plot(city.lng, city.lat, 'r*', transform=ccrs.Geodetic())
fig.savefig(f'outliers_{col}_channel{c}.png', bbox_inches='tight')
fig.savefig(f'outliers_{col}_channel{c}.pdf', bbox_inches='tight')
plot('ours')
plot('mse')
if __name__ == '__main__':
main() | 0.319758 | 0.269203 |
from openmdao.core.vec_wrapper import SrcVecWrapper, TgtVecWrapper
from openmdao.core.data_transfer import DataTransfer
from openmdao.core.mpi_wrap import FakeComm
class BasicImpl(object):
"""Basic vector and data transfer implementation factory."""
idx_arr_type = int
@staticmethod
def world_comm():
return FakeComm()
@staticmethod
def create_src_vecwrapper(sysdata, probdata, comm):
"""
Create a vecwrapper for source variables.
Args
----
sysdata : _SysData
A data object for System level data.
probdata : _ProbData
A data object for Problem level data that we need in order to store
flags that span multiple layers in the hierarchy.
comm : a fake communicator or None.
This arg is ignored.
Returns
-------
`SrcVecWrapper`
"""
return SrcVecWrapper(sysdata, probdata, comm)
@staticmethod
def create_tgt_vecwrapper(sysdata, probdata, comm):
"""
Create a vecwrapper for target variables.
Args
-----
sysdata : _SysData
A data object for system level data
probdata : _ProbData
A data object for Problem level data that we need in order to store
flags that span multiple layers in the hierarchy.
comm : a fake communicator or None.
This arg is ignored.
Returns
-------
`TgtVecWrapper`
"""
return TgtVecWrapper(sysdata, probdata, comm)
@staticmethod
def create_data_xfer(src_vec, tgt_vec,
src_idxs, tgt_idxs, vec_conns, byobj_conns,
mode, sysdata):
"""
Create an object for performing data transfer between source
and target vectors.
Args
----
src_vec : `VecWrapper`
Source vecwrapper for the transfer. In rev mode it will be the
target.
tgt_vec : `VecWrapper`
Target vecwrapper for the transfer. In rev mode it will be the
source.
src_idxs : array
Indices of the source variables in the source vector.
tgt_idxs : array
Indices of the target variables in the target vector.
vec_conns : dict
Mapping of 'pass by vector' variables to the source variables that
they are connected to.
byobj_conns : dict
Mapping of 'pass by object' variables to the source variables that
they are connected to.
mode : str
Either 'fwd' or 'rev', indicating a forward or reverse scatter.
sysdata : `SysData` object
The `SysData` object for the Group that will contain the new
`DataTransfer` object.
Returns
-------
`DataTransfer`
A `DataTransfer` object.
"""
return DataTransfer(src_idxs, tgt_idxs, vec_conns, byobj_conns, mode,
sysdata) | openmdao/core/basic_impl.py |
from openmdao.core.vec_wrapper import SrcVecWrapper, TgtVecWrapper
from openmdao.core.data_transfer import DataTransfer
from openmdao.core.mpi_wrap import FakeComm
class BasicImpl(object):
"""Basic vector and data transfer implementation factory."""
idx_arr_type = int
@staticmethod
def world_comm():
return FakeComm()
@staticmethod
def create_src_vecwrapper(sysdata, probdata, comm):
"""
Create a vecwrapper for source variables.
Args
----
sysdata : _SysData
A data object for System level data.
probdata : _ProbData
A data object for Problem level data that we need in order to store
flags that span multiple layers in the hierarchy.
comm : a fake communicator or None.
This arg is ignored.
Returns
-------
`SrcVecWrapper`
"""
return SrcVecWrapper(sysdata, probdata, comm)
@staticmethod
def create_tgt_vecwrapper(sysdata, probdata, comm):
"""
Create a vecwrapper for target variables.
Args
-----
sysdata : _SysData
A data object for system level data
probdata : _ProbData
A data object for Problem level data that we need in order to store
flags that span multiple layers in the hierarchy.
comm : a fake communicator or None.
This arg is ignored.
Returns
-------
`TgtVecWrapper`
"""
return TgtVecWrapper(sysdata, probdata, comm)
@staticmethod
def create_data_xfer(src_vec, tgt_vec,
src_idxs, tgt_idxs, vec_conns, byobj_conns,
mode, sysdata):
"""
Create an object for performing data transfer between source
and target vectors.
Args
----
src_vec : `VecWrapper`
Source vecwrapper for the transfer. In rev mode it will be the
target.
tgt_vec : `VecWrapper`
Target vecwrapper for the transfer. In rev mode it will be the
source.
src_idxs : array
Indices of the source variables in the source vector.
tgt_idxs : array
Indices of the target variables in the target vector.
vec_conns : dict
Mapping of 'pass by vector' variables to the source variables that
they are connected to.
byobj_conns : dict
Mapping of 'pass by object' variables to the source variables that
they are connected to.
mode : str
Either 'fwd' or 'rev', indicating a forward or reverse scatter.
sysdata : `SysData` object
The `SysData` object for the Group that will contain the new
`DataTransfer` object.
Returns
-------
`DataTransfer`
A `DataTransfer` object.
"""
return DataTransfer(src_idxs, tgt_idxs, vec_conns, byobj_conns, mode,
sysdata) | 0.867626 | 0.399724 |
import subprocess
import warnings
from freemoovr.proxy.base_zmq import MultiServerBaseZMQ
class _OSGFileParsingMixin(object):
@staticmethod
def get_animations(path):
return _OSGFileParsingMixin._parse_osg_file(path)[1]
@staticmethod
def get_nodes(path):
return _OSGFileParsingMixin._parse_osg_file(path)[0]
@staticmethod
def _parse_osg_file(name):
"""returns a list of named nodes and a list of animations present in the osg file
NOTE: `parseosg` executable needs to be in path
"""
stdout = subprocess.check_output("parseosg %s" % name, shell=True)
nodes = []
animations = []
for line in stdout.split('\n'):
if line and ('=' in line):
thing, name = line.split('=')
if thing == 'MatrixTransformNode':
nodes.append(name)
elif thing == 'Animation':
animations.append(name)
return nodes, animations
class StimulusOSGController(MultiServerBaseZMQ, _OSGFileParsingMixin):
"""compatible with the fishvr StimulusOSGController"""
def __init__(self, *args, **kwargs):
super(StimulusOSGController, self).__init__(*args, **kwargs)
self.wait_for_servers(error_after=10.0)
self.set_stimulus_plugin('StimulusOSG')
def load_osg(self, f):
self.send({'osg_filename': {'data': f}})
def move_world(self, x, y, z, orientation_x=0, orientation_y=0, orientation_z=0):
self.send({'osg_model_pose': {'position': {'x': x, 'y': y, 'z': z},
'orientation': {'w': 1,
'x': orientation_x, 'y': orientation_y, 'z': orientation_z
}}})
def scale_world(self, scale_x, scale_y, scale_z):
self.send({'osg_model_scale': {'x': scale_x, 'y': scale_y, 'z': scale_z}})
def move_node(self, name, x, y, z, scale=1, hidden=False, orientation_x=0, orientation_y=0, orientation_z=0):
self.send({'osg_submodel_pose': {'header': {'frame_id': name},
'pose': {'position': {'x': x, 'y': y, 'z': z},
'orientation': {
'w': 0 if hidden else scale,
'x': orientation_x, 'y': orientation_y, 'z': orientation_z
}}}})
def animation_start(self, name):
warnings.warn('animation in StimulusOSGController is deprecated. please use StimulusOSG2Controller')
self.send({'osg_animation_start': {'data': name}})
def animation_stop(self, name):
warnings.warn('animation in StimulusOSGController is deprecated. please use StimulusOSG2Controller')
self.send({'osg_animation_stop': {'data': name}})
class _OSG2FileHandle(object):
"""Handle class returned by the StimulusOSG2Controller.load_osg(path)
do not instantiate directly.
"""
def __init__(self, proxy, filename):
self._p = proxy
self._f = filename
self._hidden = False
self._scale = 1
self._x0 = self._y0 = self._z0 = self._ox0 = self._oy0 = self._oz0 = 0.0
self._x = self._y = self._z = self._ox = self._oy = self._oz = 0.0
def _transform(self, x0, y0, z0, ox0, oy0, oz0):
self._x0 = x0
self._y0 = y0
self._z0 = z0
self._ox0 = ox0
self._oy0 = oy0
self._oz0 = oz0
self.move(x=self._x, y=self._y, z=self._z,
hidden=self._hidden, scale=self._scale,
orientation_x=self._ox0, orientation_y=self._oy0, orientation_z=self._oz0)
def move(self, x=0, y=0, z=0, scale=1, hidden=False, orientation_x=0, orientation_y=0, orientation_z=0):
self._p.send({'osg2_submodel_pose': {'header': {'frame_id': self._f},
'pose': {'position': {'x': x + self._x0, 'y': y + self._y0, 'z': z + self._z0},
'orientation': {
'w': 0 if hidden else scale,
'x': orientation_x + self._ox0, 'y': orientation_y + self._oy0, 'z': orientation_z + self._oz0
}}}})
self._x = x
self._y = y
self._z = z
self._ox = orientation_x
self._oy = orientation_y
self._oz = orientation_z
self._scale = scale
self._hidden = hidden
def hide(self):
self.move(hidden=True)
def fade_in(self):
self._p.send({'osg2_submodel_fade_in': {'data': self._f}})
def fade_out(self):
self._p.send({'osg2_submodel_fade_out': {'data': self._f}})
def animation_start(self, name):
self._p.send({'osg2_animation_start': {'data': "%s|%s" % (self._f, name)}})
def animation_stop(self, name):
self._p.send({'osg2_animation_stop': {'data': "%s|%s" % (self._f, name)}})
def unload(self):
self._p.load_osg(self._f + '.unload')
def clone(self, new_name, how='auto'):
new_name = "%s+%s+%s" % (self._f, new_name, {'shallow': 's', 'deep': 'd', 'auto': 'a'}.get(how, 'a'))
self._p.send({'osg2_filename_duplicate': {'data': new_name}})
return _OSG2FileHandle(self._p, new_name)
class StimulusOSG2Controller(MultiServerBaseZMQ, _OSGFileParsingMixin):
"""compatible with the fishvr StimulusOSG2Controller"""
def __init__(self, *args, **kwargs):
super(StimulusOSG2Controller, self).__init__(*args, **kwargs)
self.wait_for_servers(error_after=10.0)
self.set_stimulus_plugin('StimulusOSG2')
def move_world(self, x=0, y=0, z=0, scale=1, orientation_x=0, orientation_y=0, orientation_z=0):
raise NotImplementedError
def set_animation_duration(self, v):
self.send({'osg2_animation_duration': {'data': float(v)}})
def unload_all(self):
self.send({'osg2_filename': {'data': '/dev/null'}})
def load_osg(self, path):
self.send({'osg2_filename': {'data': path}})
return _OSG2FileHandle(self, path)
if __name__ == "__main__":
import sys
# noinspection PyProtectedMember
n, a = _OSGFileParsingMixin._parse_osg_file(sys.argv[1])
print "nodes:", ",".join(n)
print "animations:", ",".join(a) | src/freemoovr/proxy/stimulus_osg.py | import subprocess
import warnings
from freemoovr.proxy.base_zmq import MultiServerBaseZMQ
class _OSGFileParsingMixin(object):
@staticmethod
def get_animations(path):
return _OSGFileParsingMixin._parse_osg_file(path)[1]
@staticmethod
def get_nodes(path):
return _OSGFileParsingMixin._parse_osg_file(path)[0]
@staticmethod
def _parse_osg_file(name):
"""returns a list of named nodes and a list of animations present in the osg file
NOTE: `parseosg` executable needs to be in path
"""
stdout = subprocess.check_output("parseosg %s" % name, shell=True)
nodes = []
animations = []
for line in stdout.split('\n'):
if line and ('=' in line):
thing, name = line.split('=')
if thing == 'MatrixTransformNode':
nodes.append(name)
elif thing == 'Animation':
animations.append(name)
return nodes, animations
class StimulusOSGController(MultiServerBaseZMQ, _OSGFileParsingMixin):
"""compatible with the fishvr StimulusOSGController"""
def __init__(self, *args, **kwargs):
super(StimulusOSGController, self).__init__(*args, **kwargs)
self.wait_for_servers(error_after=10.0)
self.set_stimulus_plugin('StimulusOSG')
def load_osg(self, f):
self.send({'osg_filename': {'data': f}})
def move_world(self, x, y, z, orientation_x=0, orientation_y=0, orientation_z=0):
self.send({'osg_model_pose': {'position': {'x': x, 'y': y, 'z': z},
'orientation': {'w': 1,
'x': orientation_x, 'y': orientation_y, 'z': orientation_z
}}})
def scale_world(self, scale_x, scale_y, scale_z):
self.send({'osg_model_scale': {'x': scale_x, 'y': scale_y, 'z': scale_z}})
def move_node(self, name, x, y, z, scale=1, hidden=False, orientation_x=0, orientation_y=0, orientation_z=0):
self.send({'osg_submodel_pose': {'header': {'frame_id': name},
'pose': {'position': {'x': x, 'y': y, 'z': z},
'orientation': {
'w': 0 if hidden else scale,
'x': orientation_x, 'y': orientation_y, 'z': orientation_z
}}}})
def animation_start(self, name):
warnings.warn('animation in StimulusOSGController is deprecated. please use StimulusOSG2Controller')
self.send({'osg_animation_start': {'data': name}})
def animation_stop(self, name):
warnings.warn('animation in StimulusOSGController is deprecated. please use StimulusOSG2Controller')
self.send({'osg_animation_stop': {'data': name}})
class _OSG2FileHandle(object):
"""Handle class returned by the StimulusOSG2Controller.load_osg(path)
do not instantiate directly.
"""
def __init__(self, proxy, filename):
self._p = proxy
self._f = filename
self._hidden = False
self._scale = 1
self._x0 = self._y0 = self._z0 = self._ox0 = self._oy0 = self._oz0 = 0.0
self._x = self._y = self._z = self._ox = self._oy = self._oz = 0.0
def _transform(self, x0, y0, z0, ox0, oy0, oz0):
self._x0 = x0
self._y0 = y0
self._z0 = z0
self._ox0 = ox0
self._oy0 = oy0
self._oz0 = oz0
self.move(x=self._x, y=self._y, z=self._z,
hidden=self._hidden, scale=self._scale,
orientation_x=self._ox0, orientation_y=self._oy0, orientation_z=self._oz0)
def move(self, x=0, y=0, z=0, scale=1, hidden=False, orientation_x=0, orientation_y=0, orientation_z=0):
self._p.send({'osg2_submodel_pose': {'header': {'frame_id': self._f},
'pose': {'position': {'x': x + self._x0, 'y': y + self._y0, 'z': z + self._z0},
'orientation': {
'w': 0 if hidden else scale,
'x': orientation_x + self._ox0, 'y': orientation_y + self._oy0, 'z': orientation_z + self._oz0
}}}})
self._x = x
self._y = y
self._z = z
self._ox = orientation_x
self._oy = orientation_y
self._oz = orientation_z
self._scale = scale
self._hidden = hidden
def hide(self):
self.move(hidden=True)
def fade_in(self):
self._p.send({'osg2_submodel_fade_in': {'data': self._f}})
def fade_out(self):
self._p.send({'osg2_submodel_fade_out': {'data': self._f}})
def animation_start(self, name):
self._p.send({'osg2_animation_start': {'data': "%s|%s" % (self._f, name)}})
def animation_stop(self, name):
self._p.send({'osg2_animation_stop': {'data': "%s|%s" % (self._f, name)}})
def unload(self):
self._p.load_osg(self._f + '.unload')
def clone(self, new_name, how='auto'):
new_name = "%s+%s+%s" % (self._f, new_name, {'shallow': 's', 'deep': 'd', 'auto': 'a'}.get(how, 'a'))
self._p.send({'osg2_filename_duplicate': {'data': new_name}})
return _OSG2FileHandle(self._p, new_name)
class StimulusOSG2Controller(MultiServerBaseZMQ, _OSGFileParsingMixin):
"""compatible with the fishvr StimulusOSG2Controller"""
def __init__(self, *args, **kwargs):
super(StimulusOSG2Controller, self).__init__(*args, **kwargs)
self.wait_for_servers(error_after=10.0)
self.set_stimulus_plugin('StimulusOSG2')
def move_world(self, x=0, y=0, z=0, scale=1, orientation_x=0, orientation_y=0, orientation_z=0):
raise NotImplementedError
def set_animation_duration(self, v):
self.send({'osg2_animation_duration': {'data': float(v)}})
def unload_all(self):
self.send({'osg2_filename': {'data': '/dev/null'}})
def load_osg(self, path):
self.send({'osg2_filename': {'data': path}})
return _OSG2FileHandle(self, path)
if __name__ == "__main__":
import sys
# noinspection PyProtectedMember
n, a = _OSGFileParsingMixin._parse_osg_file(sys.argv[1])
print "nodes:", ",".join(n)
print "animations:", ",".join(a) | 0.634996 | 0.174656 |
import tensorflow as tf
from tensorflow.keras.layers import Layer
class SplitString(Layer):
"""
String Splitter Layer
"""
def __init__(self,
attr_feats,
sep='_',
feature_name=None,
**kwargs):
self.attr_feats = attr_feats
self.attr_len = len(attr_feats)
self.sep = sep
self.feature_name = feature_name
super().__init__(**kwargs)
def call(self, inputs):
outputs = {}
if self.feature_name is not None:
outputs[self.feature_name] = inputs
if self.attr_len <= 1:
return outputs
split_tensor = tf.strings.split(inputs, sep=self.sep).to_tensor(shape=inputs.shape + [self.attr_len])
split_tensors = tf.split(split_tensor, self.attr_len, axis=-1)
for i in range(self.attr_len):
outputs[self.attr_feats[i]] = tf.cast(tf.squeeze(split_tensors[i], axis=-1), tf.string)
return outputs
class LookupTable(Layer):
def __init__(self,
vocab_path: str = None,
default_value: int = 0,
**kwargs):
self.vocab_path = vocab_path
self.default_value = default_value
if self.vocab_path:
initializer = tf.lookup.TextFileInitializer(vocab_path,
'string',
1,
'int64',
0,
delimiter=',')
self.table = tf.lookup.StaticHashTable(initializer,
default_value=self.default_value)
super(LookupTable, self).__init__(**kwargs)
def build(self, input_shape):
super(LookupTable, self).build(input_shape)
def call(self, inputs):
if not self.vocab_path:
return inputs
else:
return self.table.lookup(inputs)
def get_config(self):
config = {
'vocab_path': self.vocab_path,
'default_value': self.default_value
}
base_config = super().get_config()
return dict(list(config.items()) + list(base_config.items()))
class MaskZero(Layer):
"""Set values to zeroes when the row is masked
"""
def call(self, inputs, mask=None):
if mask is None:
return inputs
mask = tf.expand_dims(tf.cast(mask, tf.float32), axis=-1)
return mask * inputs
def compute_mask(self, inputs, mask=None):
return mask
def get_config(self):
return super().get_config()
class ListMeanPooling(Layer):
def __init__(self, **kwargs):
super(ListMeanPooling, self).__init__(**kwargs)
self.epsilon = 1e-12
def build(self, input_shape):
super().build(input_shape)
def call(self, inputs, mask=None):
if not isinstance(inputs, list):
return inputs
inputs = tf.stack(inputs, axis=0)
if mask is None:
return tf.reduce_mean(inputs, axis=0, keepdims=False)
mask = tf.stack(mask, axis=0)
mask = tf.expand_dims(tf.cast(mask, tf.float32), axis=-1)
inputs_sum = tf.reduce_sum(inputs * mask, axis=0, keepdims=False)
mask_sum = tf.reduce_sum(mask, axis=0, keepdims=False)
mean = tf.divide(inputs_sum, tf.math.maximum(mask_sum, tf.constant(self.epsilon, dtype=tf.float32)))
return mean
def compute_output_shape(self, input_shape):
if isinstance(input_shape, list):
return input_shape[0]
return input_shape
def compute_mask(self, inputs, mask=None):
if mask is None or not isinstance(mask, list):
return mask
mask = tf.stack(mask, axis=0)
mask = tf.reduce_sum(tf.cast(mask, tf.float32), axis=0, keepdims=False)
mask = tf.cast(mask, tf.bool)
return mask
def get_config(self):
config = {
'epsilon': self.epsilon
}
base_config = super().get_config()
return dict(list(base_config.items()) + list(config.items())) | core/layers.py | import tensorflow as tf
from tensorflow.keras.layers import Layer
class SplitString(Layer):
"""
String Splitter Layer
"""
def __init__(self,
attr_feats,
sep='_',
feature_name=None,
**kwargs):
self.attr_feats = attr_feats
self.attr_len = len(attr_feats)
self.sep = sep
self.feature_name = feature_name
super().__init__(**kwargs)
def call(self, inputs):
outputs = {}
if self.feature_name is not None:
outputs[self.feature_name] = inputs
if self.attr_len <= 1:
return outputs
split_tensor = tf.strings.split(inputs, sep=self.sep).to_tensor(shape=inputs.shape + [self.attr_len])
split_tensors = tf.split(split_tensor, self.attr_len, axis=-1)
for i in range(self.attr_len):
outputs[self.attr_feats[i]] = tf.cast(tf.squeeze(split_tensors[i], axis=-1), tf.string)
return outputs
class LookupTable(Layer):
def __init__(self,
vocab_path: str = None,
default_value: int = 0,
**kwargs):
self.vocab_path = vocab_path
self.default_value = default_value
if self.vocab_path:
initializer = tf.lookup.TextFileInitializer(vocab_path,
'string',
1,
'int64',
0,
delimiter=',')
self.table = tf.lookup.StaticHashTable(initializer,
default_value=self.default_value)
super(LookupTable, self).__init__(**kwargs)
def build(self, input_shape):
super(LookupTable, self).build(input_shape)
def call(self, inputs):
if not self.vocab_path:
return inputs
else:
return self.table.lookup(inputs)
def get_config(self):
config = {
'vocab_path': self.vocab_path,
'default_value': self.default_value
}
base_config = super().get_config()
return dict(list(config.items()) + list(base_config.items()))
class MaskZero(Layer):
"""Set values to zeroes when the row is masked
"""
def call(self, inputs, mask=None):
if mask is None:
return inputs
mask = tf.expand_dims(tf.cast(mask, tf.float32), axis=-1)
return mask * inputs
def compute_mask(self, inputs, mask=None):
return mask
def get_config(self):
return super().get_config()
class ListMeanPooling(Layer):
def __init__(self, **kwargs):
super(ListMeanPooling, self).__init__(**kwargs)
self.epsilon = 1e-12
def build(self, input_shape):
super().build(input_shape)
def call(self, inputs, mask=None):
if not isinstance(inputs, list):
return inputs
inputs = tf.stack(inputs, axis=0)
if mask is None:
return tf.reduce_mean(inputs, axis=0, keepdims=False)
mask = tf.stack(mask, axis=0)
mask = tf.expand_dims(tf.cast(mask, tf.float32), axis=-1)
inputs_sum = tf.reduce_sum(inputs * mask, axis=0, keepdims=False)
mask_sum = tf.reduce_sum(mask, axis=0, keepdims=False)
mean = tf.divide(inputs_sum, tf.math.maximum(mask_sum, tf.constant(self.epsilon, dtype=tf.float32)))
return mean
def compute_output_shape(self, input_shape):
if isinstance(input_shape, list):
return input_shape[0]
return input_shape
def compute_mask(self, inputs, mask=None):
if mask is None or not isinstance(mask, list):
return mask
mask = tf.stack(mask, axis=0)
mask = tf.reduce_sum(tf.cast(mask, tf.float32), axis=0, keepdims=False)
mask = tf.cast(mask, tf.bool)
return mask
def get_config(self):
config = {
'epsilon': self.epsilon
}
base_config = super().get_config()
return dict(list(base_config.items()) + list(config.items())) | 0.88631 | 0.273497 |
import argparse
import os
from ConfigParser import ConfigParser
# Local utils
aux = os.path.dirname(sys.argv[2])
sys.path.insert(0, aux)
import casa_utils as utils
"""Perform uvcontsub and apply calibration table if required
The input for applycal can be configured in the `lineapplycal` (default)
section of the configuration file. For observations with more than one EB, the
command line argument `eb` can be used. To specify the table for each EB, 2
methods are available:
1. Define a section `lineapplycal<i>` with `i` the EB number
2. Comma separated values for each EB. If multiple table are used per EB,
space is used per EB, e.g `caltable = cal1EB1 cal2EB1, cal1EB2 ...`. With
the exception of the `interp` parameter, which uses semi-colon separated
values.
"""
def run_uvcontsub(args):
args.outvis = args.uvdata[0]+'.contsub'
if args.noredo and os.path.isdir(args.outvis):
casalog.post('Skipping uvcontsub')
else:
uvcontsub(vis=args.uvdata[0], fitspw=args.fitspw, want_cont=False,
combine='spw', excludechans=True, fitorder=1)
def main():
# Configuration file default values
config_default = {'calwt':'false', 'interp':'linear', 'flagbackup':'false',
'spwmap':''}
# Command line options
parser = argparse.ArgumentParser()
parser.add_argument('-c', nargs=1,
help='Casa parameter.')
parser.add_argument('--section', nargs=1, type=str, default=['uvcontsub'],
help='Section in config file')
parser.add_argument('--eb', nargs=1, type=int, default=[None],
help='EB number')
parser.add_argument('--noredo', action='store_true',
help='Do not redo if files exists')
parser.add_argument('configfile', nargs=1, action=utils.NormalizePath,
help='Configuration file')
parser.add_argument('uvdata', nargs=1, action=utils.NormalizePath,
help='uv data ms')
parser.add_argument('chans', nargs='*', action=utils.NormalizePath,
help='Channel list files')
parser.set_defaults(pipe=[utils.verify_args, utils.load_config,
utils.load_chan_list, run_uvcontsub, utils._run_cal],
config=config_default, calsect='lineapplycal', outvis=None,
fitspw=None)
args = parser.parse_args()
# Run steps in pipe
for step in args.pipe:
step(args)
if __name__=="__main__":
main() | run_uvcontsub.py | import argparse
import os
from ConfigParser import ConfigParser
# Local utils
aux = os.path.dirname(sys.argv[2])
sys.path.insert(0, aux)
import casa_utils as utils
"""Perform uvcontsub and apply calibration table if required
The input for applycal can be configured in the `lineapplycal` (default)
section of the configuration file. For observations with more than one EB, the
command line argument `eb` can be used. To specify the table for each EB, 2
methods are available:
1. Define a section `lineapplycal<i>` with `i` the EB number
2. Comma separated values for each EB. If multiple table are used per EB,
space is used per EB, e.g `caltable = cal1EB1 cal2EB1, cal1EB2 ...`. With
the exception of the `interp` parameter, which uses semi-colon separated
values.
"""
def run_uvcontsub(args):
args.outvis = args.uvdata[0]+'.contsub'
if args.noredo and os.path.isdir(args.outvis):
casalog.post('Skipping uvcontsub')
else:
uvcontsub(vis=args.uvdata[0], fitspw=args.fitspw, want_cont=False,
combine='spw', excludechans=True, fitorder=1)
def main():
# Configuration file default values
config_default = {'calwt':'false', 'interp':'linear', 'flagbackup':'false',
'spwmap':''}
# Command line options
parser = argparse.ArgumentParser()
parser.add_argument('-c', nargs=1,
help='Casa parameter.')
parser.add_argument('--section', nargs=1, type=str, default=['uvcontsub'],
help='Section in config file')
parser.add_argument('--eb', nargs=1, type=int, default=[None],
help='EB number')
parser.add_argument('--noredo', action='store_true',
help='Do not redo if files exists')
parser.add_argument('configfile', nargs=1, action=utils.NormalizePath,
help='Configuration file')
parser.add_argument('uvdata', nargs=1, action=utils.NormalizePath,
help='uv data ms')
parser.add_argument('chans', nargs='*', action=utils.NormalizePath,
help='Channel list files')
parser.set_defaults(pipe=[utils.verify_args, utils.load_config,
utils.load_chan_list, run_uvcontsub, utils._run_cal],
config=config_default, calsect='lineapplycal', outvis=None,
fitspw=None)
args = parser.parse_args()
# Run steps in pipe
for step in args.pipe:
step(args)
if __name__=="__main__":
main() | 0.440469 | 0.330687 |
from opt.lowrank import LowRankOp
from .base_solver import *
from opt.utils import get_size
from opt import PruningOp, SPruningOp, BertQuantizeOp
from misc.train_bert import get_bert_FIM
import matplotlib.pyplot as plt
import pickle
import os
from os.path import exists
class BaselineSolver(BaseSolver):
def __init__(self, net, ops, task, configs=None, tokenizer=None, logger=None):
super().__init__(net, ops)
self.configs = configs
self.tokenizer = tokenizer
self.logger = logger
self.task = task
self.get_all_layer_profile_and_cache()
def get_profile(self, layer_list: list):
profile = {}
storage = {}
loss = {}
for layer_name in layer_list:
for Op in self.ops:
op = Op(self.net)
if layer_name in op.operatable:
FIM = get_bert_FIM(self.configs, self.net, self.tokenizer, layer_name, self.logger)
if isinstance(op, PruningOp) or isinstance(op, SPruningOp):
for rate in np.arange(0.00, 1.05, 0.05):
_, diff, storage_save = op.apply([layer_name], amount=rate, with_profile=True)
obj = (diff[layer_name] ** 2 * FIM).sum()
name = f"{op.op_name}_{layer_name}_{rate:.2f}"
loss[name] = obj
storage[name] = storage_save[layer_name]
profile[name] = storage[name] / (loss[name] + 1e-12)
elif isinstance(op, BertQuantizeOp):
op.model.to("cpu")
for mode in [None, "fbgemm"]:
op.reset()
op.set_config(mode)
_, diff, storage_save = op.apply([layer_name], with_profile=True)
obj = (diff[layer_name] ** 2 * FIM).sum()
name = f"{layer_name}_{op.op_name}_{op.mode}"
loss[name] = obj
storage[name] = storage_save[layer_name]
profile[name] = storage[name] / (loss[name] + 1e-12)
elif isinstance(op, LowRankOp):
for r in np.arange(50, 500, 50):
_, diff, storage_save = op.apply([layer_name], rank=r, with_profile=True)
obj = (diff[layer_name] ** 2 * FIM).sum()
name = f"{op.op_name}_{layer_name}_{r:.2f}"
loss[name] = obj
storage[name] = storage_save[layer_name]
profile[name] = storage[name] / (loss[name] + 1e-12)
return profile, storage, loss
def get_all_layer_profile_and_cache(self):
data_dir = "results/profileData/"
try:
self.profile = pickle.load(open(data_dir + f"{self.task}-profile.p", "rb"))
self.storage = pickle.load(open(data_dir + f"{self.task}-storage.p", "rb"))
self.loss = pickle.load(open(data_dir + f"{self.task}-loss.p", "rb"))
except:
if not exists(data_dir):
os.mkdir(data_dir)
self.profile, self.storage, self.loss = self.get_profile(self.operatable)
pickle.dump(self.profile, open(data_dir + f"{self.task}-profile.p", "wb"))
pickle.dump(self.storage, open(data_dir + f"{self.task}-storage.p", "wb"))
pickle.dump(self.loss, open(data_dir + f"{self.task}-loss.p", "wb"))
op = self.ops[0](self.net)
self.model_size = sum([self.storage[f"upruning_{layer_name}_0.00"] for layer_name in op.operatable])
def get_solution(self, storage_threshold):
layer_list = self.operatable
op = "upruning"
total_storage = {}
total_loss = {}
for k, v in self.loss.items():
op_name, layer_name, attrs = k.split("_")
if op_name != op:
continue
v = np.log(v)
s = self.storage[k]
total_storage[attrs] = total_storage.get(attrs, 0) + s
total_loss[attrs] = total_loss.get(attrs, 0) + v
best_loss = 0
best_rate = 0
for a, v in total_storage.items():
print(a, v, total_loss[a])
if v <= storage_threshold and total_loss[a] < best_loss:
best_loss = total_loss[a]
best_rate = a
print(f"best rate is: {best_rate}")
solution = []
for layer in layer_list:
name = "_".join([layer, op, str(best_rate)])
solution.append(name)
return solution | solver/baseline_solver.py | from opt.lowrank import LowRankOp
from .base_solver import *
from opt.utils import get_size
from opt import PruningOp, SPruningOp, BertQuantizeOp
from misc.train_bert import get_bert_FIM
import matplotlib.pyplot as plt
import pickle
import os
from os.path import exists
class BaselineSolver(BaseSolver):
def __init__(self, net, ops, task, configs=None, tokenizer=None, logger=None):
super().__init__(net, ops)
self.configs = configs
self.tokenizer = tokenizer
self.logger = logger
self.task = task
self.get_all_layer_profile_and_cache()
def get_profile(self, layer_list: list):
profile = {}
storage = {}
loss = {}
for layer_name in layer_list:
for Op in self.ops:
op = Op(self.net)
if layer_name in op.operatable:
FIM = get_bert_FIM(self.configs, self.net, self.tokenizer, layer_name, self.logger)
if isinstance(op, PruningOp) or isinstance(op, SPruningOp):
for rate in np.arange(0.00, 1.05, 0.05):
_, diff, storage_save = op.apply([layer_name], amount=rate, with_profile=True)
obj = (diff[layer_name] ** 2 * FIM).sum()
name = f"{op.op_name}_{layer_name}_{rate:.2f}"
loss[name] = obj
storage[name] = storage_save[layer_name]
profile[name] = storage[name] / (loss[name] + 1e-12)
elif isinstance(op, BertQuantizeOp):
op.model.to("cpu")
for mode in [None, "fbgemm"]:
op.reset()
op.set_config(mode)
_, diff, storage_save = op.apply([layer_name], with_profile=True)
obj = (diff[layer_name] ** 2 * FIM).sum()
name = f"{layer_name}_{op.op_name}_{op.mode}"
loss[name] = obj
storage[name] = storage_save[layer_name]
profile[name] = storage[name] / (loss[name] + 1e-12)
elif isinstance(op, LowRankOp):
for r in np.arange(50, 500, 50):
_, diff, storage_save = op.apply([layer_name], rank=r, with_profile=True)
obj = (diff[layer_name] ** 2 * FIM).sum()
name = f"{op.op_name}_{layer_name}_{r:.2f}"
loss[name] = obj
storage[name] = storage_save[layer_name]
profile[name] = storage[name] / (loss[name] + 1e-12)
return profile, storage, loss
def get_all_layer_profile_and_cache(self):
data_dir = "results/profileData/"
try:
self.profile = pickle.load(open(data_dir + f"{self.task}-profile.p", "rb"))
self.storage = pickle.load(open(data_dir + f"{self.task}-storage.p", "rb"))
self.loss = pickle.load(open(data_dir + f"{self.task}-loss.p", "rb"))
except:
if not exists(data_dir):
os.mkdir(data_dir)
self.profile, self.storage, self.loss = self.get_profile(self.operatable)
pickle.dump(self.profile, open(data_dir + f"{self.task}-profile.p", "wb"))
pickle.dump(self.storage, open(data_dir + f"{self.task}-storage.p", "wb"))
pickle.dump(self.loss, open(data_dir + f"{self.task}-loss.p", "wb"))
op = self.ops[0](self.net)
self.model_size = sum([self.storage[f"upruning_{layer_name}_0.00"] for layer_name in op.operatable])
def get_solution(self, storage_threshold):
layer_list = self.operatable
op = "upruning"
total_storage = {}
total_loss = {}
for k, v in self.loss.items():
op_name, layer_name, attrs = k.split("_")
if op_name != op:
continue
v = np.log(v)
s = self.storage[k]
total_storage[attrs] = total_storage.get(attrs, 0) + s
total_loss[attrs] = total_loss.get(attrs, 0) + v
best_loss = 0
best_rate = 0
for a, v in total_storage.items():
print(a, v, total_loss[a])
if v <= storage_threshold and total_loss[a] < best_loss:
best_loss = total_loss[a]
best_rate = a
print(f"best rate is: {best_rate}")
solution = []
for layer in layer_list:
name = "_".join([layer, op, str(best_rate)])
solution.append(name)
return solution | 0.590189 | 0.188716 |
class Solution:
def _minPathSum(self, grid):
"""
:type grid: List[List[int]]
:rtype: int
"""
import sys
n_row = len(grid)
if (n_row == 0):
return 0
n_col = len(grid[0])
size = n_row * n_col
dp = [sys.maxsize] * (n_row + 1) * (n_col + 1)
dp[size-1] = grid[n_row-1][n_col-1]
i = size-2
while(i >= 0):
i_row = i // n_col
i_col = i % n_col
min_path = min(dp[i + n_col], dp[i + 1])
dp[i] = grid[i_row][i_col] + min_path
i -= 1
return dp[0]
def minPathSum(self, grid):
"""
:type grid: List[List[int]]
:rtype: int
"""
n_row = len(grid)
n_col = len(grid[0])
# extend the dp matrix with pseudo boundary to eliminate the switch cases
dp = [[sys.maxsize for _ in range(n_col + 1)] for _ in range(n_row + 1)]
dp[n_row-1][n_col] = 0
for i_row in range(n_row-1, -1, -1):
for i_col in range(n_col-1, -1, -1):
min_path = min(dp[i_row+1][i_col], dp[i_row][i_col+1])
dp[i_row][i_col] = grid[i_row][i_col] + min_path
return dp[0][0]
def print_matrix(m):
for i in range(len(m)):
print(m[i])
def verify(case_name, test_input, test_target, test_func):
"""
utility function for unit testing
"""
actual_output = test_func(*test_input)
print(case_name, test_input, ' target:', test_target,
' output:', actual_output)
assert(test_target == actual_output)
import sys
if __name__ == "__main__":
solution = Solution()
test_case_1_input = ([
[1,3,1],
[1,5,1],
[4,2,1]], )
test_case_1_target = 7
verify('test case 1:',
test_case_1_input, test_case_1_target, solution.minPathSum)
test_case_2_input = ([
[1,3,1]], )
test_case_2_target = 5
verify('test case 2:',
test_case_2_input, test_case_2_target, solution.minPathSum)
test_case_3_input = ([[1]], )
test_case_3_target = 1
verify('test case 3:',
test_case_3_input, test_case_3_target, solution.minPathSum) | python/64_minimum_path_sum.py | class Solution:
def _minPathSum(self, grid):
"""
:type grid: List[List[int]]
:rtype: int
"""
import sys
n_row = len(grid)
if (n_row == 0):
return 0
n_col = len(grid[0])
size = n_row * n_col
dp = [sys.maxsize] * (n_row + 1) * (n_col + 1)
dp[size-1] = grid[n_row-1][n_col-1]
i = size-2
while(i >= 0):
i_row = i // n_col
i_col = i % n_col
min_path = min(dp[i + n_col], dp[i + 1])
dp[i] = grid[i_row][i_col] + min_path
i -= 1
return dp[0]
def minPathSum(self, grid):
"""
:type grid: List[List[int]]
:rtype: int
"""
n_row = len(grid)
n_col = len(grid[0])
# extend the dp matrix with pseudo boundary to eliminate the switch cases
dp = [[sys.maxsize for _ in range(n_col + 1)] for _ in range(n_row + 1)]
dp[n_row-1][n_col] = 0
for i_row in range(n_row-1, -1, -1):
for i_col in range(n_col-1, -1, -1):
min_path = min(dp[i_row+1][i_col], dp[i_row][i_col+1])
dp[i_row][i_col] = grid[i_row][i_col] + min_path
return dp[0][0]
def print_matrix(m):
for i in range(len(m)):
print(m[i])
def verify(case_name, test_input, test_target, test_func):
"""
utility function for unit testing
"""
actual_output = test_func(*test_input)
print(case_name, test_input, ' target:', test_target,
' output:', actual_output)
assert(test_target == actual_output)
import sys
if __name__ == "__main__":
solution = Solution()
test_case_1_input = ([
[1,3,1],
[1,5,1],
[4,2,1]], )
test_case_1_target = 7
verify('test case 1:',
test_case_1_input, test_case_1_target, solution.minPathSum)
test_case_2_input = ([
[1,3,1]], )
test_case_2_target = 5
verify('test case 2:',
test_case_2_input, test_case_2_target, solution.minPathSum)
test_case_3_input = ([[1]], )
test_case_3_target = 1
verify('test case 3:',
test_case_3_input, test_case_3_target, solution.minPathSum) | 0.49048 | 0.468487 |
from ClassesDAO.ClienteDAO import ClienteDAO
from ClassesDAO.FuncionarioDAO import FuncionarioDAO
from ClassesDAO.LivroDAO import LivroDAO
from ClassesDAO.CategoriaDAO import CategoriaDAO
# Classe que implementa as telas da aplicação e também é classe que inicia a aplicação
class AluguelPython(object):
# Menu principal da aplicação
def menu_principal(self):
print("==========================================")
print("Sistema de Alugueis Python")
print("==========================================")
print("Opção\tDescrição")
print("------------------------------------------")
print("0\t\tSair da Aplicação")
print("1\t\tOperações de Clientes")
print("2\t\tOperações de Funcionario")
print("3\t\tOperações de Livro")
print("4\t\tOperações de Categoria")
print("------------------------------------------")
opcao = int(input("Digite uma opção [0-4]: "))
if opcao == 0:
return
if opcao == 1:
self.menu_clientes()
return
if opcao == 2:
self.menu_funcionarios()
return
if opcao == 3:
self.menu_livros()
return
if opcao == 4:
self.menu_categorias()
return
self.menu_principal()
# Menu que exibe as opções para o cliente
def menu_clientes(self):
print("==========================================")
print("Operações do cliente")
print("==========================================")
print("Opção\tDescrição")
print("------------------------------------------")
print("0\t\tVoltar ao Menu Principal")
print("1\t\tListar Todos os Clientes Existentes")
print("2\t\tListar um Cliente Existente")
print("3\t\tInserir um Novo Cliente")
print("4\t\tAtualizar um Cliente Existente")
print("5\t\tRemover um Cliente Existente")
print("------------------------------------------")
opcao = int(input("Digite uma opção [0-5]: "))
if opcao == 0:
self.menu_principal()
return
if opcao == 1:
self.menu_listar_todos_clientes()
return
if opcao == 2:
self.menu_listar_um_cliente()
return
if opcao == 3:
self.menu_inserir_um_cliente()
return
if opcao == 4:
self.menu_atualizar_um_cliente()
return
if opcao == 5:
self.menu_remover_um_cliente()
return
self.menu_clientes()
# Menu que exibe a ação para listar todos os clientes cadastradas
def menu_listar_todos_clientes(self):
print("==========================================")
print("Listar Todos os Clientes Existentes")
print("==========================================")
clienteDAO = ClienteDAO()
clientes = clienteDAO.listas_todas()
for c in clientes:
print("*** Código: " + str(c.codigo) + " - Nome: " + c.nome + " - CPF: " + str(c.cpf) + " - Endereco: " + c.endereco + " ***")
print("*** " + str(len(clientes)) + " clientes(s) encontrada(s) ***")
self.menu_clientes()
# Menu que exibe a ação para listar um cliente existente
def menu_listar_um_cliente(self):
print("==========================================")
print("Listar um Cliente Existente")
print("==========================================")
codigo = int(input("Digite o código do cliente: "))
clienteDAO = ClienteDAO()
cliente = clienteDAO.listar(codigo)
if cliente is not None:
print("*** Código: " + str(cliente.codigo) + " - Nome: " + cliente.nome + " - CPF: " + str(cliente.cpf) + " - Endereco: " + cliente.endereco + " ***")
else:
print("*** Não foi possível localizar este funcionario ***")
self.menu_clientes()
# Menu que exibe a ação para inserir um novo cliente
def menu_inserir_um_cliente(self):
print("==========================================")
print("Inserir um Novo Cliente")
print("==========================================")
codigo = int(input("Digite o código do novo cliente: "))
nome = input("Digite o nome do novo cliente: ")
cpf = input("Digite o cpf do novo cliente: ")
endereco = input("Digite o endereco do novo cliente: ")
clienteDAO = ClienteDAO()
sucesso = clienteDAO.inserir(codigo, nome, cpf, endereco)
if sucesso == True:
print("*** Cliente inserido com sucesso ***")
else:
print("*** Não foi possível inserir este cliente ***")
self.menu_clientes()
# Menu que exibe a ação para atualizar os dados de um cliente
def menu_atualizar_um_cliente(self):
print("==========================================")
print("Atualizar um Cliente Existente")
print("==========================================")
codigo = int(input("Digite o código do cliente: "))
nome = input("Digite o novo nome do cliente: ")
cpf = input("Digite o novo cpf do cliente: ")
endereco = input("Digite o novo endereco do cliente: ")
clienteDAO = ClienteDAO()
sucesso = clienteDAO.atualizar(codigo, nome, cpf, endereco)
if sucesso == True:
print("*** Cliente atualizado com sucesso ***")
else:
print("*** Não foi possível atualizar este cliente ***")
self.menu_clientes()
# Menu que exibe a ação para remover um cliente
def menu_remover_um_cliente(self):
print("==========================================")
print("Remover um Cliente Existente")
print("==========================================")
codigo = int(input("Digite o código do cliente: "))
clienteDAO = ClienteDAO()
sucesso = clienteDAO.remover(codigo)
if sucesso == True:
print("*** Cliente removido com sucesso ***")
else:
print("*** Não foi possível remover este cliente ***")
self.menu_clientes()
#FUNCIONARIO------------------------------------------------------------------------
def menu_funcionarios(self):
print("==========================================")
print("Operações do Funcionario")
print("==========================================")
print("Opção\tDescrição")
print("------------------------------------------")
print("0\t\tVoltar ao Menu Principal")
print("1\t\tListar Todos os Funcionarios existentes")
print("2\t\tInserir um Funcionarios")
print("3\t\tAtualizar um Funcionarios")
print("4\t\tRemover um Funcionarios")
print("------------------------------------------")
opcao = int(input("Digite uma opção [0-4]: "))
if opcao == 0:
self.menu_principal()
return
if opcao == 1:
self.menu_listar_todos_funcionarios()
return
if opcao == 2:
self.menu_inserir_um_funcionario()
return
if opcao == 3:
self.menu_atualizar_um_funcionario()
return
if opcao == 4:
self.menu_remover_um_funcionario()
return
self.menu_funcionarios()
# Menu que exibe a ação para listar todos os funcionarios
def menu_listar_todos_funcionarios(self):
print("==========================================")
print("Listar Todos os Funcionarios Existentes")
print("==========================================")
funcionarioDAO = FuncionarioDAO()
funcionarios = funcionarioDAO.listas_todas()
for f in funcionarios:
print("*** Código: " + str(f.codigo) + " - Nome: " + f.nome + " - CPF: " + str(f.cpf) + " - Endereco: " + f.endereco + " - Salario: " + str(f.salario) +"***")
print("*** " + str(len(funcionarios)) + " pessoa(s) encontrada(s) ***")
self.menu_funcionarios()
# Menu que exibe a ação para inserir um novo funcionario
def menu_inserir_um_funcionario(self):
print("==========================================")
print("Inserir um novo funcionario")
print("==========================================")
codigo = int(input("Digite o código do novo funcionario: "))
nome = input("Digite o nome do novo funcionario: ")
cpf = input("Digite o cpf do novo funcionario: ")
endereco = input("Digite o endereco do novo funcionario: ")
salario = input("Digite o salario do novo funcionario: ")
funcionarioDAO = FuncionarioDAO()
sucesso = funcionarioDAO.inserir(codigo, nome, cpf, endereco,salario)
if sucesso == True:
print("*** Funcionario inserido com sucesso ***")
else:
print("*** Não foi possível inserir esta pessoa ***")
self.menu_funcionarios()
# Menu que exibe a ação para atualizar os dados de um funcionario
def menu_atualizar_um_funcionario(self):
print("==========================================")
print("Atualizar um Funcionario existente")
print("==========================================")
codigo = int(input("Digite o código do Funcionario: "))
nome = input("Digite o novo nome do Funcionario: ")
cpf = input("Digite o novo cpf do Funcionario: ")
endereco = input("Digite o novo endereco do Funcionario: ")
salario = input("Digite o novo salario do Funcionario: ")
funcionarioDAO = FuncionarioDAO()
sucesso = funcionarioDAO.atualizar(codigo, nome, cpf, endereco,salario)
if sucesso == True:
print("*** Funcionario atualizado com sucesso ***")
else:
print("*** Não foi possível atualizar este funcionario ***")
self.menu_funcionarios()
# Menu que exibe a ação para remover um funcionario existente
def menu_remover_um_funcionario(self):
print("==========================================")
print("Remover um Funcionario Existente")
print("==========================================")
codigo = int(input("Digite o código da pessoa: "))
funcionarioDAO = FuncionarioDAO()
sucesso = funcionarioDAO.remover(codigo)
if sucesso == True:
print("*** Funcionario removido com sucesso ***")
else:
print("*** Não foi possível remover este funcionario ***")
self.menu_funcionarios()
#LIVROS -------------------------------------------------------------------------------------
def menu_livros(self):
print("==========================================")
print("Operações de livro")
print("==========================================")
print("Opção\tDescrição")
print("------------------------------------------")
print("0\t\tVoltar ao Menu Principal")
print("1\t\tListar Todos os Livros")
print("2\t\tListar Livro Existente")
print("3\t\tListar as Categorias de um Livro existente")
print("4\t\tInserir um novo livro")
print("5\t\tInserir um autor para um livro")
print("6\t\tAtualizar um livro existente")
print("7\t\tRemover um livro existente")
print("------------------------------------------")
opcao = int(input("Digite uma opção [0-7]: "))
if opcao == 0:
self.menu_principal()
return
if opcao == 1:
self.menu_listar_todos_livros()
return
if opcao == 2:
self.menu_listar_um_livro()
return
if opcao == 3:
self.menu_listar_categorias_um_livro()
return
if opcao == 4:
self.menu_inserir_um_livro()
return
if opcao == 5:
self.menu_inserir_um_autor()
return
if opcao == 6:
self.menu_atualizar_um_livro()
return
if opcao == 7:
self.menu_remover_um_livro()
return
self.menu_livros()
def menu_listar_todos_livros(self):
print("==========================================")
print("Listar Todos os Livros Existentes")
print("==========================================")
livroDAO = LivroDAO()
livros = livroDAO.listar_todos()
for l in livros:
print("*** Código: " + str(l.codigo) + " - Titulo: " + l.titulo + " - Ano: " + str(l.ano) + " - Edicao: " + str(l.edicao) + " - Editora: " + l.editora + " - Quantidade de paginas: " + str(l.quant_paginas) + "***")
print("*** " + str(len(livros)) + " livro(s) encontrado(s) ***")
self.menu_livros()
def menu_listar_um_livro(self):
print("==========================================")
print("Listar um Livro Existente")
print("==========================================")
codigo = int(input("Digite o código do livro: "))
livroDAO = LivroDAO()
livro = livroDAO.listar_um_livro(codigo)
if livro is not None:
print("*** Código: " + str(livro.codigo) + " - Titulo: " + livro.titulo + " - Ano: " + str(livro.ano) + " - Edicao: " + str(livro.edicao) + " - Editora: " + livro.editora + " - Quantidade de paginas: " + str(livro.quant_paginas) + "***")
else:
print("*** Não foi possível localizar este livro ***")
self.menu_livros()
def menu_listar_categorias_um_livro(self):
print("==========================================")
print("Listar as Categorias de um Livro")
print("==========================================")
livroId = int(input('Entre o codigo do livro: '))
print("==========================================")
livroDAO = LivroDAO()
livro = livroDAO.listar_categorias(livroId)
if livro is not None:
print("Dados do Livro de Codigo = %s" % livroId)
print("Livro - Codigo: %s - Titulo: %s - Ano: %s - Edicao: %s - Editora: %s - Paginas: %s - Qtd. Categorias: %s" % (
livro.codigo, livro.titulo, livro.ano, livro.edicao, livro.editora, livro.quant_paginas,
len(livro.categorias)))
print("Categorias do Livro: %s" % livro.titulo)
for c in livro.categorias:
print("Codigo: %s - Descricao: %s" % (c.codigo, c.descricao))
else:
print("*** Não foi possível localizar este livro ***")
self.menu_livros()
def menu_inserir_um_livro(self):
print("==========================================")
print("Inserir um novo livro")
print("==========================================")
codigo = int(input("Digite o código do novo livro: "))
titulo = input("Digite o titulo do novo livro: ")
ano = int(input("Digite o ano do novo livro: "))
edicao = int(input("Digite a edicao do novo livro: "))
editora = input("Digite a editora do novo livro: ")
quant_paginas = int(input("Digite a quantidade de paginas do novo livro: "))
livroDAO = LivroDAO()
sucesso = livroDAO.inserir(codigo, titulo, ano,edicao,editora,quant_paginas)
if sucesso == True:
print("*** Livro inserido com sucesso ***")
else:
print("*** Não foi possível inserir este livro ***")
self.menu_livros()
def menu_inserir_um_autor(self):
print("==========================================")
print("Inserir um autor para um livro")
print("==========================================")
codigo = int(input("Digite o código do livro: "))
autor = input("Digite o nome do autor: ")
livroDAO = LivroDAO()
sucesso = livroDAO.inserir_autor(codigo, autor)
if sucesso == True:
print("*** Autor inserido com sucesso para o livro ***")
else:
print("*** Não foi possível inserir o autor para este livro ***")
self.menu_livros()
def menu_atualizar_um_livro(self):
print("==========================================")
print("Atualizar um Livro existente")
print("==========================================")
codigo = int(input("Digite o código do Livro a atualizar: "))
titulo = input("Digite o novo titulo do Livro: ")
ano = int(input("Digite o novo ano do Livro: "))
edicao = int(input("Digite o novo numero de edicao do Livro: "))
editora = input("Digite a nova editora do Livro: ")
quant_paginas = int(input("Digite a nova quantidade de paginas do Livro: "))
livroDAO = LivroDAO()
sucesso = livroDAO.atualizar(codigo, titulo,ano,edicao,editora,quant_paginas)
if sucesso == True:
print("*** Livro atualizado com sucesso ***")
else:
print("*** Não foi possível atualizar este livro ***")
self.menu_livros()
def menu_remover_um_livro(self):
print("==========================================")
print("Remover um Livro Existente")
print("==========================================")
print("DICA: Você nao conseguirá remover um livro se este estiver em algum aluguel")
codigo = int(input("Digite o código do livro a ser removido: "))
livroDAO = LivroDAO()
sucesso = livroDAO.remover(codigo)
if sucesso == True:
print("*** Livro removido com sucesso ***")
else:
print("*** Não foi possível remover este livro ***")
self.menu_livros()
# CATEGORIA -------------------------------------------------------------------------------------
def menu_categorias(self):
print("==========================================")
print("Operações de categorias")
print("==========================================")
print("Opção\tDescrição")
print("------------------------------------------")
print("0\t\tVoltar ao Menu Principal")
print("1\t\tListar uma categoria existente")
print("2\t\tListar todos os livros de uma Categoria")
print("------------------------------------------")
opcao = int(input("Digite uma opção [0-2]: "))
if opcao == 0:
self.menu_principal()
return
if opcao == 1:
self.menu_listar_uma_categoria()
return
if opcao == 2:
self.menu_listar_livros_de_uma_categoria()
return
self.menu_categorias()
def menu_listar_uma_categoria(self):
print("==========================================")
print("Listar uma Categoria Existente")
print("==========================================")
codigo = int(input("Digite o código da categoria: "))
categoriaDAO = CategoriaDAO()
categoria = categoriaDAO.listar_uma_categoria(codigo)
if categoria is not None:
print("*** Código: " + str(categoria.codigo) + " - Descricao: " + categoria.descricao + " ***")
else:
print("*** Não foi possível localizar esta categoria ***")
self.menu_categorias()
def menu_listar_livros_de_uma_categoria(self):
print("==========================================")
print("Listar todos os livros de uma Categoria")
print("==========================================")
codigo = int(input('Digite o codigo da categoria: '))
print("==========================================")
categoriaDAO = CategoriaDAO()
categoria = categoriaDAO.listar_livros_uma_categoria(codigo)
if categoria is not None:
print("Categoria selecionada: Codigo: %s - Descricao: %s - Quantidade de Livros: %s" % (categoria.codigo, categoria.descricao, len(categoria.livros)))
for l in categoria.livros:
print("Codigo do livro: %s - Titulo: %s - Ano: %s - Edicao: %s - Editora: %s - Paginas: %s" % (l.codigo, l.titulo,l.ano,l.edicao,l.editora, l.quant_paginas))
else:
print("*** Não foi possível localizar esta categoria ***")
self.menu_categorias()
# Código principal que inicializa a aplicação
if __name__ == "__main__":
aluguel_python = AluguelPython()
aluguel_python.menu_principal() | Sistema_aluguel.py | from ClassesDAO.ClienteDAO import ClienteDAO
from ClassesDAO.FuncionarioDAO import FuncionarioDAO
from ClassesDAO.LivroDAO import LivroDAO
from ClassesDAO.CategoriaDAO import CategoriaDAO
# Classe que implementa as telas da aplicação e também é classe que inicia a aplicação
class AluguelPython(object):
# Menu principal da aplicação
def menu_principal(self):
print("==========================================")
print("Sistema de Alugueis Python")
print("==========================================")
print("Opção\tDescrição")
print("------------------------------------------")
print("0\t\tSair da Aplicação")
print("1\t\tOperações de Clientes")
print("2\t\tOperações de Funcionario")
print("3\t\tOperações de Livro")
print("4\t\tOperações de Categoria")
print("------------------------------------------")
opcao = int(input("Digite uma opção [0-4]: "))
if opcao == 0:
return
if opcao == 1:
self.menu_clientes()
return
if opcao == 2:
self.menu_funcionarios()
return
if opcao == 3:
self.menu_livros()
return
if opcao == 4:
self.menu_categorias()
return
self.menu_principal()
# Menu que exibe as opções para o cliente
def menu_clientes(self):
print("==========================================")
print("Operações do cliente")
print("==========================================")
print("Opção\tDescrição")
print("------------------------------------------")
print("0\t\tVoltar ao Menu Principal")
print("1\t\tListar Todos os Clientes Existentes")
print("2\t\tListar um Cliente Existente")
print("3\t\tInserir um Novo Cliente")
print("4\t\tAtualizar um Cliente Existente")
print("5\t\tRemover um Cliente Existente")
print("------------------------------------------")
opcao = int(input("Digite uma opção [0-5]: "))
if opcao == 0:
self.menu_principal()
return
if opcao == 1:
self.menu_listar_todos_clientes()
return
if opcao == 2:
self.menu_listar_um_cliente()
return
if opcao == 3:
self.menu_inserir_um_cliente()
return
if opcao == 4:
self.menu_atualizar_um_cliente()
return
if opcao == 5:
self.menu_remover_um_cliente()
return
self.menu_clientes()
# Menu que exibe a ação para listar todos os clientes cadastradas
def menu_listar_todos_clientes(self):
print("==========================================")
print("Listar Todos os Clientes Existentes")
print("==========================================")
clienteDAO = ClienteDAO()
clientes = clienteDAO.listas_todas()
for c in clientes:
print("*** Código: " + str(c.codigo) + " - Nome: " + c.nome + " - CPF: " + str(c.cpf) + " - Endereco: " + c.endereco + " ***")
print("*** " + str(len(clientes)) + " clientes(s) encontrada(s) ***")
self.menu_clientes()
# Menu que exibe a ação para listar um cliente existente
def menu_listar_um_cliente(self):
print("==========================================")
print("Listar um Cliente Existente")
print("==========================================")
codigo = int(input("Digite o código do cliente: "))
clienteDAO = ClienteDAO()
cliente = clienteDAO.listar(codigo)
if cliente is not None:
print("*** Código: " + str(cliente.codigo) + " - Nome: " + cliente.nome + " - CPF: " + str(cliente.cpf) + " - Endereco: " + cliente.endereco + " ***")
else:
print("*** Não foi possível localizar este funcionario ***")
self.menu_clientes()
# Menu que exibe a ação para inserir um novo cliente
def menu_inserir_um_cliente(self):
print("==========================================")
print("Inserir um Novo Cliente")
print("==========================================")
codigo = int(input("Digite o código do novo cliente: "))
nome = input("Digite o nome do novo cliente: ")
cpf = input("Digite o cpf do novo cliente: ")
endereco = input("Digite o endereco do novo cliente: ")
clienteDAO = ClienteDAO()
sucesso = clienteDAO.inserir(codigo, nome, cpf, endereco)
if sucesso == True:
print("*** Cliente inserido com sucesso ***")
else:
print("*** Não foi possível inserir este cliente ***")
self.menu_clientes()
# Menu que exibe a ação para atualizar os dados de um cliente
def menu_atualizar_um_cliente(self):
print("==========================================")
print("Atualizar um Cliente Existente")
print("==========================================")
codigo = int(input("Digite o código do cliente: "))
nome = input("Digite o novo nome do cliente: ")
cpf = input("Digite o novo cpf do cliente: ")
endereco = input("Digite o novo endereco do cliente: ")
clienteDAO = ClienteDAO()
sucesso = clienteDAO.atualizar(codigo, nome, cpf, endereco)
if sucesso == True:
print("*** Cliente atualizado com sucesso ***")
else:
print("*** Não foi possível atualizar este cliente ***")
self.menu_clientes()
# Menu que exibe a ação para remover um cliente
def menu_remover_um_cliente(self):
print("==========================================")
print("Remover um Cliente Existente")
print("==========================================")
codigo = int(input("Digite o código do cliente: "))
clienteDAO = ClienteDAO()
sucesso = clienteDAO.remover(codigo)
if sucesso == True:
print("*** Cliente removido com sucesso ***")
else:
print("*** Não foi possível remover este cliente ***")
self.menu_clientes()
#FUNCIONARIO------------------------------------------------------------------------
def menu_funcionarios(self):
print("==========================================")
print("Operações do Funcionario")
print("==========================================")
print("Opção\tDescrição")
print("------------------------------------------")
print("0\t\tVoltar ao Menu Principal")
print("1\t\tListar Todos os Funcionarios existentes")
print("2\t\tInserir um Funcionarios")
print("3\t\tAtualizar um Funcionarios")
print("4\t\tRemover um Funcionarios")
print("------------------------------------------")
opcao = int(input("Digite uma opção [0-4]: "))
if opcao == 0:
self.menu_principal()
return
if opcao == 1:
self.menu_listar_todos_funcionarios()
return
if opcao == 2:
self.menu_inserir_um_funcionario()
return
if opcao == 3:
self.menu_atualizar_um_funcionario()
return
if opcao == 4:
self.menu_remover_um_funcionario()
return
self.menu_funcionarios()
# Menu que exibe a ação para listar todos os funcionarios
def menu_listar_todos_funcionarios(self):
print("==========================================")
print("Listar Todos os Funcionarios Existentes")
print("==========================================")
funcionarioDAO = FuncionarioDAO()
funcionarios = funcionarioDAO.listas_todas()
for f in funcionarios:
print("*** Código: " + str(f.codigo) + " - Nome: " + f.nome + " - CPF: " + str(f.cpf) + " - Endereco: " + f.endereco + " - Salario: " + str(f.salario) +"***")
print("*** " + str(len(funcionarios)) + " pessoa(s) encontrada(s) ***")
self.menu_funcionarios()
# Menu que exibe a ação para inserir um novo funcionario
def menu_inserir_um_funcionario(self):
print("==========================================")
print("Inserir um novo funcionario")
print("==========================================")
codigo = int(input("Digite o código do novo funcionario: "))
nome = input("Digite o nome do novo funcionario: ")
cpf = input("Digite o cpf do novo funcionario: ")
endereco = input("Digite o endereco do novo funcionario: ")
salario = input("Digite o salario do novo funcionario: ")
funcionarioDAO = FuncionarioDAO()
sucesso = funcionarioDAO.inserir(codigo, nome, cpf, endereco,salario)
if sucesso == True:
print("*** Funcionario inserido com sucesso ***")
else:
print("*** Não foi possível inserir esta pessoa ***")
self.menu_funcionarios()
# Menu que exibe a ação para atualizar os dados de um funcionario
def menu_atualizar_um_funcionario(self):
print("==========================================")
print("Atualizar um Funcionario existente")
print("==========================================")
codigo = int(input("Digite o código do Funcionario: "))
nome = input("Digite o novo nome do Funcionario: ")
cpf = input("Digite o novo cpf do Funcionario: ")
endereco = input("Digite o novo endereco do Funcionario: ")
salario = input("Digite o novo salario do Funcionario: ")
funcionarioDAO = FuncionarioDAO()
sucesso = funcionarioDAO.atualizar(codigo, nome, cpf, endereco,salario)
if sucesso == True:
print("*** Funcionario atualizado com sucesso ***")
else:
print("*** Não foi possível atualizar este funcionario ***")
self.menu_funcionarios()
# Menu que exibe a ação para remover um funcionario existente
def menu_remover_um_funcionario(self):
print("==========================================")
print("Remover um Funcionario Existente")
print("==========================================")
codigo = int(input("Digite o código da pessoa: "))
funcionarioDAO = FuncionarioDAO()
sucesso = funcionarioDAO.remover(codigo)
if sucesso == True:
print("*** Funcionario removido com sucesso ***")
else:
print("*** Não foi possível remover este funcionario ***")
self.menu_funcionarios()
#LIVROS -------------------------------------------------------------------------------------
def menu_livros(self):
print("==========================================")
print("Operações de livro")
print("==========================================")
print("Opção\tDescrição")
print("------------------------------------------")
print("0\t\tVoltar ao Menu Principal")
print("1\t\tListar Todos os Livros")
print("2\t\tListar Livro Existente")
print("3\t\tListar as Categorias de um Livro existente")
print("4\t\tInserir um novo livro")
print("5\t\tInserir um autor para um livro")
print("6\t\tAtualizar um livro existente")
print("7\t\tRemover um livro existente")
print("------------------------------------------")
opcao = int(input("Digite uma opção [0-7]: "))
if opcao == 0:
self.menu_principal()
return
if opcao == 1:
self.menu_listar_todos_livros()
return
if opcao == 2:
self.menu_listar_um_livro()
return
if opcao == 3:
self.menu_listar_categorias_um_livro()
return
if opcao == 4:
self.menu_inserir_um_livro()
return
if opcao == 5:
self.menu_inserir_um_autor()
return
if opcao == 6:
self.menu_atualizar_um_livro()
return
if opcao == 7:
self.menu_remover_um_livro()
return
self.menu_livros()
def menu_listar_todos_livros(self):
print("==========================================")
print("Listar Todos os Livros Existentes")
print("==========================================")
livroDAO = LivroDAO()
livros = livroDAO.listar_todos()
for l in livros:
print("*** Código: " + str(l.codigo) + " - Titulo: " + l.titulo + " - Ano: " + str(l.ano) + " - Edicao: " + str(l.edicao) + " - Editora: " + l.editora + " - Quantidade de paginas: " + str(l.quant_paginas) + "***")
print("*** " + str(len(livros)) + " livro(s) encontrado(s) ***")
self.menu_livros()
def menu_listar_um_livro(self):
print("==========================================")
print("Listar um Livro Existente")
print("==========================================")
codigo = int(input("Digite o código do livro: "))
livroDAO = LivroDAO()
livro = livroDAO.listar_um_livro(codigo)
if livro is not None:
print("*** Código: " + str(livro.codigo) + " - Titulo: " + livro.titulo + " - Ano: " + str(livro.ano) + " - Edicao: " + str(livro.edicao) + " - Editora: " + livro.editora + " - Quantidade de paginas: " + str(livro.quant_paginas) + "***")
else:
print("*** Não foi possível localizar este livro ***")
self.menu_livros()
def menu_listar_categorias_um_livro(self):
print("==========================================")
print("Listar as Categorias de um Livro")
print("==========================================")
livroId = int(input('Entre o codigo do livro: '))
print("==========================================")
livroDAO = LivroDAO()
livro = livroDAO.listar_categorias(livroId)
if livro is not None:
print("Dados do Livro de Codigo = %s" % livroId)
print("Livro - Codigo: %s - Titulo: %s - Ano: %s - Edicao: %s - Editora: %s - Paginas: %s - Qtd. Categorias: %s" % (
livro.codigo, livro.titulo, livro.ano, livro.edicao, livro.editora, livro.quant_paginas,
len(livro.categorias)))
print("Categorias do Livro: %s" % livro.titulo)
for c in livro.categorias:
print("Codigo: %s - Descricao: %s" % (c.codigo, c.descricao))
else:
print("*** Não foi possível localizar este livro ***")
self.menu_livros()
def menu_inserir_um_livro(self):
print("==========================================")
print("Inserir um novo livro")
print("==========================================")
codigo = int(input("Digite o código do novo livro: "))
titulo = input("Digite o titulo do novo livro: ")
ano = int(input("Digite o ano do novo livro: "))
edicao = int(input("Digite a edicao do novo livro: "))
editora = input("Digite a editora do novo livro: ")
quant_paginas = int(input("Digite a quantidade de paginas do novo livro: "))
livroDAO = LivroDAO()
sucesso = livroDAO.inserir(codigo, titulo, ano,edicao,editora,quant_paginas)
if sucesso == True:
print("*** Livro inserido com sucesso ***")
else:
print("*** Não foi possível inserir este livro ***")
self.menu_livros()
def menu_inserir_um_autor(self):
print("==========================================")
print("Inserir um autor para um livro")
print("==========================================")
codigo = int(input("Digite o código do livro: "))
autor = input("Digite o nome do autor: ")
livroDAO = LivroDAO()
sucesso = livroDAO.inserir_autor(codigo, autor)
if sucesso == True:
print("*** Autor inserido com sucesso para o livro ***")
else:
print("*** Não foi possível inserir o autor para este livro ***")
self.menu_livros()
def menu_atualizar_um_livro(self):
print("==========================================")
print("Atualizar um Livro existente")
print("==========================================")
codigo = int(input("Digite o código do Livro a atualizar: "))
titulo = input("Digite o novo titulo do Livro: ")
ano = int(input("Digite o novo ano do Livro: "))
edicao = int(input("Digite o novo numero de edicao do Livro: "))
editora = input("Digite a nova editora do Livro: ")
quant_paginas = int(input("Digite a nova quantidade de paginas do Livro: "))
livroDAO = LivroDAO()
sucesso = livroDAO.atualizar(codigo, titulo,ano,edicao,editora,quant_paginas)
if sucesso == True:
print("*** Livro atualizado com sucesso ***")
else:
print("*** Não foi possível atualizar este livro ***")
self.menu_livros()
def menu_remover_um_livro(self):
print("==========================================")
print("Remover um Livro Existente")
print("==========================================")
print("DICA: Você nao conseguirá remover um livro se este estiver em algum aluguel")
codigo = int(input("Digite o código do livro a ser removido: "))
livroDAO = LivroDAO()
sucesso = livroDAO.remover(codigo)
if sucesso == True:
print("*** Livro removido com sucesso ***")
else:
print("*** Não foi possível remover este livro ***")
self.menu_livros()
# CATEGORIA -------------------------------------------------------------------------------------
def menu_categorias(self):
print("==========================================")
print("Operações de categorias")
print("==========================================")
print("Opção\tDescrição")
print("------------------------------------------")
print("0\t\tVoltar ao Menu Principal")
print("1\t\tListar uma categoria existente")
print("2\t\tListar todos os livros de uma Categoria")
print("------------------------------------------")
opcao = int(input("Digite uma opção [0-2]: "))
if opcao == 0:
self.menu_principal()
return
if opcao == 1:
self.menu_listar_uma_categoria()
return
if opcao == 2:
self.menu_listar_livros_de_uma_categoria()
return
self.menu_categorias()
def menu_listar_uma_categoria(self):
print("==========================================")
print("Listar uma Categoria Existente")
print("==========================================")
codigo = int(input("Digite o código da categoria: "))
categoriaDAO = CategoriaDAO()
categoria = categoriaDAO.listar_uma_categoria(codigo)
if categoria is not None:
print("*** Código: " + str(categoria.codigo) + " - Descricao: " + categoria.descricao + " ***")
else:
print("*** Não foi possível localizar esta categoria ***")
self.menu_categorias()
def menu_listar_livros_de_uma_categoria(self):
print("==========================================")
print("Listar todos os livros de uma Categoria")
print("==========================================")
codigo = int(input('Digite o codigo da categoria: '))
print("==========================================")
categoriaDAO = CategoriaDAO()
categoria = categoriaDAO.listar_livros_uma_categoria(codigo)
if categoria is not None:
print("Categoria selecionada: Codigo: %s - Descricao: %s - Quantidade de Livros: %s" % (categoria.codigo, categoria.descricao, len(categoria.livros)))
for l in categoria.livros:
print("Codigo do livro: %s - Titulo: %s - Ano: %s - Edicao: %s - Editora: %s - Paginas: %s" % (l.codigo, l.titulo,l.ano,l.edicao,l.editora, l.quant_paginas))
else:
print("*** Não foi possível localizar esta categoria ***")
self.menu_categorias()
# Código principal que inicializa a aplicação
if __name__ == "__main__":
aluguel_python = AluguelPython()
aluguel_python.menu_principal() | 0.294621 | 0.2174 |
from django.contrib.auth import authenticate, login
from django.contrib.auth.decorators import login_required
from django.shortcuts import render, redirect
from django.contrib.auth.models import Group
from applications.access.forms import ClientRegistrationForm, AdminRegistrationForm
from applications.core.models import Clients, Account
from django.db.models import Max
from helpers.logger import LoggerManager
def signup(request):
"""
Function to sing up user that are not admins
:param request: This param contain all the information associated to the request
:param type request: Request
:return: The URL to render
:rtype: str
"""
try:
log = LoggerManager('info', 'singup-info', session=request.session)
if request.method == 'POST':
form = ClientRegistrationForm(request.POST)
if form.is_valid():
form.save()
max_id = Account.objects.all().aggregate(Max('id'))['id__max']
user = Account.objects.filter(id=max_id)
user.update(is_staff=False)
web_group, created = Group.objects.get_or_create(name='web_group')
web_group.user_set.add(user.get().id)
log.write_info(form.data)
return redirect('client_list')
else:
form = ClientRegistrationForm()
return render(request, 'registration/signup.html', {
'form': form
})
except Exception as ex:
log = LoggerManager('exception', 'singup-exception', session=request.session)
log.write_exception(ex)
@login_required(login_url='/accounts/login/')
def signup_user(request):
"""
Function to sing up user that are not admins
:param request: This param contain all the information associated to the request
:param type request: Request
:return: The URL to render
:rtype: str
"""
try:
log = LoggerManager('info', 'singup_manager-info', session=request.session)
if request.method == 'POST':
form = ClientRegistrationForm(request.POST)
if form.is_valid():
form.save()
max_id = Account.objects.all().aggregate(Max('id'))['id__max']
user = Account.objects.filter(id=max_id)
web_group, created = Group.objects.get_or_create(name=request.user.email)
web_group.user_set.add(request.user.id)
web_group.user_set.add(user.get().id)
log.write_info(form.data)
return redirect('client_list')
else:
form = ClientRegistrationForm()
return render(request, 'registration/signup.html', {
'form': form
})
except Exception as ex:
log = LoggerManager('exception', 'singup_manager-exception', session=request.session)
log.write_exception(ex)
@login_required(login_url='/accounts/login/')
def signup_manager(request):
"""
Function to sing up admins users
:param request: This param contain all the information associated to the request
:param type request: Request
:return: The URL to render
:rtype: str
"""
try:
log = LoggerManager('info', 'singup_manager-info', session=request.session)
if request.method == 'POST':
form = AdminRegistrationForm(request.POST)
if form.is_valid():
form.save()
max_id = Account.objects.all().aggregate(Max('id'))['id__max']
user = Account.objects.filter(id=max_id)
user.update(is_staff=True)
return redirect('client_list')
else:
form = AdminRegistrationForm()
return render(request, 'registration/signup.html', {
'form': form
})
except Exception as ex:
log = LoggerManager('exception', 'singup_manager-exception', session=request.session)
log.write_exception(ex) | src/applications/access/views.py | from django.contrib.auth import authenticate, login
from django.contrib.auth.decorators import login_required
from django.shortcuts import render, redirect
from django.contrib.auth.models import Group
from applications.access.forms import ClientRegistrationForm, AdminRegistrationForm
from applications.core.models import Clients, Account
from django.db.models import Max
from helpers.logger import LoggerManager
def signup(request):
"""
Function to sing up user that are not admins
:param request: This param contain all the information associated to the request
:param type request: Request
:return: The URL to render
:rtype: str
"""
try:
log = LoggerManager('info', 'singup-info', session=request.session)
if request.method == 'POST':
form = ClientRegistrationForm(request.POST)
if form.is_valid():
form.save()
max_id = Account.objects.all().aggregate(Max('id'))['id__max']
user = Account.objects.filter(id=max_id)
user.update(is_staff=False)
web_group, created = Group.objects.get_or_create(name='web_group')
web_group.user_set.add(user.get().id)
log.write_info(form.data)
return redirect('client_list')
else:
form = ClientRegistrationForm()
return render(request, 'registration/signup.html', {
'form': form
})
except Exception as ex:
log = LoggerManager('exception', 'singup-exception', session=request.session)
log.write_exception(ex)
@login_required(login_url='/accounts/login/')
def signup_user(request):
"""
Function to sing up user that are not admins
:param request: This param contain all the information associated to the request
:param type request: Request
:return: The URL to render
:rtype: str
"""
try:
log = LoggerManager('info', 'singup_manager-info', session=request.session)
if request.method == 'POST':
form = ClientRegistrationForm(request.POST)
if form.is_valid():
form.save()
max_id = Account.objects.all().aggregate(Max('id'))['id__max']
user = Account.objects.filter(id=max_id)
web_group, created = Group.objects.get_or_create(name=request.user.email)
web_group.user_set.add(request.user.id)
web_group.user_set.add(user.get().id)
log.write_info(form.data)
return redirect('client_list')
else:
form = ClientRegistrationForm()
return render(request, 'registration/signup.html', {
'form': form
})
except Exception as ex:
log = LoggerManager('exception', 'singup_manager-exception', session=request.session)
log.write_exception(ex)
@login_required(login_url='/accounts/login/')
def signup_manager(request):
"""
Function to sing up admins users
:param request: This param contain all the information associated to the request
:param type request: Request
:return: The URL to render
:rtype: str
"""
try:
log = LoggerManager('info', 'singup_manager-info', session=request.session)
if request.method == 'POST':
form = AdminRegistrationForm(request.POST)
if form.is_valid():
form.save()
max_id = Account.objects.all().aggregate(Max('id'))['id__max']
user = Account.objects.filter(id=max_id)
user.update(is_staff=True)
return redirect('client_list')
else:
form = AdminRegistrationForm()
return render(request, 'registration/signup.html', {
'form': form
})
except Exception as ex:
log = LoggerManager('exception', 'singup_manager-exception', session=request.session)
log.write_exception(ex) | 0.497559 | 0.077134 |
import json
import dataflows as DF
import time
from common import all_data, city_translations, upload_file
from city_images import upload_static_image
def ranker():
def func(rows):
for i, r in enumerate(rows):
r['rank'] = i + 1
yield r
return func
def sort_limit_scores():
def func(row):
row['scores'] = sorted(row.get('scores', []), key=lambda r: r['date'])[-30:]
return func
if __name__ == '__main__':
r, _, _ = DF.Flow(
DF.load(all_data(), name='cities', headers=1,
override_fields=dict(area_id=dict(type='string')),
cast_strategy=DF.load.CAST_WITH_SCHEMA),
DF.filter_rows(lambda r: r['is_city']),
DF.add_field('score_date', 'object', lambda r: dict(
date=r['date'].isoformat(), sr=float(r['symptoms_ratio_weighted'] or 0), nr=int(r['num_reports_weighted']))
),
DF.concatenate(dict(
id=[], city_name=[], score_date=[]
), target=dict(name='ranking')),
DF.join_with_self('ranking', '{city_name}', dict(
id=None, city_name=None, scores=dict(name='score_date', aggregate='array')
)),
sort_limit_scores(),
DF.filter_rows(lambda r: r['scores'][-1]['nr'] >= 200),
DF.add_field('sortkey', 'integer', lambda r: int(r['scores'][-1]['sr'] * 1000000) + r['scores'][-1]['nr']),
DF.sort_rows('{sortkey}', reverse=True),
DF.delete_fields(['sortkey']),
DF.add_field('rank', 'integer', 0),
DF.add_field('translations', 'object', lambda r: city_translations[r['city_name']]),
DF.add_field('image', 'object', lambda r: upload_static_image(r['id'], width=280*2, height=160*2)),
ranker(),
).results()
rankings = r[0]
r, _, _ = DF.Flow(
DF.load(all_data(), name='cities', headers=1,
override_fields=dict(area_id=dict(type='string')),
cast_strategy=DF.load.CAST_WITH_SCHEMA),
DF.filter_rows(lambda r: r['is_city']),
DF.filter_rows(lambda r: r['num_reports_weighted'] >= 200),
DF.add_field('ws', 'number', lambda r: r['symptoms_ratio_weighted'] * r['num_reports_weighted']),
DF.concatenate(dict(
date=[], num_reports_weighted=[], ws=[]
), target=dict(name='ranking')),
DF.join_with_self('ranking', '{date}', dict(
date=None, nr=dict(name='num_reports_weighted', aggregate='sum'), ws=dict(name='ws', aggregate='sum')
)),
DF.add_field('sr', 'number', lambda r: r['ws']/r['nr']),
DF.delete_fields(['ws']),
DF.sort_rows('{date}'),
).results()
national = dict(
id='NATIONAL', rank=0, scores=[
dict(nr=rr['nr'], sr=float(rr['sr']), date=rr['date'].isoformat())
for rr in r[0]
][-30:]
)
rankings.insert(0, national)
upload_file(json.dumps(rankings).encode('utf8'), 'data/city_rankings.json') | tools/city_ranking.py | import json
import dataflows as DF
import time
from common import all_data, city_translations, upload_file
from city_images import upload_static_image
def ranker():
def func(rows):
for i, r in enumerate(rows):
r['rank'] = i + 1
yield r
return func
def sort_limit_scores():
def func(row):
row['scores'] = sorted(row.get('scores', []), key=lambda r: r['date'])[-30:]
return func
if __name__ == '__main__':
r, _, _ = DF.Flow(
DF.load(all_data(), name='cities', headers=1,
override_fields=dict(area_id=dict(type='string')),
cast_strategy=DF.load.CAST_WITH_SCHEMA),
DF.filter_rows(lambda r: r['is_city']),
DF.add_field('score_date', 'object', lambda r: dict(
date=r['date'].isoformat(), sr=float(r['symptoms_ratio_weighted'] or 0), nr=int(r['num_reports_weighted']))
),
DF.concatenate(dict(
id=[], city_name=[], score_date=[]
), target=dict(name='ranking')),
DF.join_with_self('ranking', '{city_name}', dict(
id=None, city_name=None, scores=dict(name='score_date', aggregate='array')
)),
sort_limit_scores(),
DF.filter_rows(lambda r: r['scores'][-1]['nr'] >= 200),
DF.add_field('sortkey', 'integer', lambda r: int(r['scores'][-1]['sr'] * 1000000) + r['scores'][-1]['nr']),
DF.sort_rows('{sortkey}', reverse=True),
DF.delete_fields(['sortkey']),
DF.add_field('rank', 'integer', 0),
DF.add_field('translations', 'object', lambda r: city_translations[r['city_name']]),
DF.add_field('image', 'object', lambda r: upload_static_image(r['id'], width=280*2, height=160*2)),
ranker(),
).results()
rankings = r[0]
r, _, _ = DF.Flow(
DF.load(all_data(), name='cities', headers=1,
override_fields=dict(area_id=dict(type='string')),
cast_strategy=DF.load.CAST_WITH_SCHEMA),
DF.filter_rows(lambda r: r['is_city']),
DF.filter_rows(lambda r: r['num_reports_weighted'] >= 200),
DF.add_field('ws', 'number', lambda r: r['symptoms_ratio_weighted'] * r['num_reports_weighted']),
DF.concatenate(dict(
date=[], num_reports_weighted=[], ws=[]
), target=dict(name='ranking')),
DF.join_with_self('ranking', '{date}', dict(
date=None, nr=dict(name='num_reports_weighted', aggregate='sum'), ws=dict(name='ws', aggregate='sum')
)),
DF.add_field('sr', 'number', lambda r: r['ws']/r['nr']),
DF.delete_fields(['ws']),
DF.sort_rows('{date}'),
).results()
national = dict(
id='NATIONAL', rank=0, scores=[
dict(nr=rr['nr'], sr=float(rr['sr']), date=rr['date'].isoformat())
for rr in r[0]
][-30:]
)
rankings.insert(0, national)
upload_file(json.dumps(rankings).encode('utf8'), 'data/city_rankings.json') | 0.439266 | 0.13852 |
import csv
import getopt
import math
import sys
from time import localtime, strftime, time
# Global variables.
# The value can be updated by command line options.
__data_type = None
__input_file_path = None
__output_file_path = None
def process_inventory_list():
print("-" * 100)
time_str = strftime("%Y-%m-%d %H:%M:%S", localtime(time()))
print("Start time =", time_str)
headers = []
records = []
try:
with open(__input_file_path, "r") as file:
# Read file as dict.
reader = csv.DictReader(file)
# Read header line.
headers = reader.fieldnames
# Read records into a list of dict.
records = [line for line in reader]
print("Records =", len(records))
keyFloorAreaFlatModel = "Floor Area (sqm) /Flat Model"
keyResalePrice = "Resale Price"
keyResaleRegistrationDate = "Resale Registration Date"
keyFlatModel = "Flat Model"
keyFloorArea = "Floor Area (sqm)"
keyFloorAreaLower = "Floor Area Lower (sqm)"
keyUnitPrice = "Unit Price"
keyResaleYear = "Resale Year"
if __data_type == 0:
headers.append(keyFlatModel)
headers.append(keyFloorArea)
headers.append(keyFloorAreaLower)
headers.append(keyUnitPrice)
headers.append(keyResaleYear)
for record in records:
temp = record[keyFloorAreaFlatModel].split("/")
record[keyFlatModel] = temp[1]
record[keyFloorArea] = temp[0]
record[keyFloorAreaLower] = math.floor(float(record[keyFloorArea]) / 10) * 10
record[keyUnitPrice] = math.floor(int(record[keyResalePrice]) / float(record[keyFloorArea]))
record[keyResaleYear] = record[keyResaleRegistrationDate][-4:]
print("Process inventory list: ok.")
except Exception as e:
print("Process inventory list: Exception = {0}".format(e))
time_str = strftime("%Y-%m-%d %H:%M:%S", localtime(time()))
print("Stop time =", time_str)
print("-" * 100)
# If given __output_file_path, output to file; otherwise, output to
# screen.
if __output_file_path:
try:
# Open output file.
with open(__output_file_path, "wt", encoding = "utf-8") as output_file:
print('output_file =', output_file)
# Output file as CSV format.
cout = csv.DictWriter(
output_file, fieldnames = headers, lineterminator = "\n")
# Write header line.
cout.writeheader()
# Write record lines.
cout.writerows(records)
print("Output process results: ok")
except Exception as e:
print("Output process results: Exception = {0}".format(e))
else:
# Output screen as JSON format.
print("headers =", headers)
print("records =")
for record in records:
print(record)
print("-" * 100)
def usage():
print('''
Preprocess HDB merged data.
Usage:
-h
-d <DataType> -i <FilePath> [-o <FilePath>]
Options:
-h : Show help.
-d <DataType> : Raw data type. Compulsory, Value [0: Transaction].
-i <FilePath> : Source data file path (CSV). Compulsory.
-o <FilePath> : Result output file path (CSV). Optional, output to screen by default.
''')
def main(argv):
'''
Pass input arguments from command line to method.
@param argv: A list of arguments
'''
global __data_type
global __input_file_path
global __output_file_path
print("argv =", argv)
__show_usage = False
__exit_code = 0
__error_message = None
# If no any option.
if not argv:
__show_usage = True
# Parse command line.
if not __show_usage:
try:
opts, args = getopt.getopt(argv, "hd:i:o:")
print("opts =", opts)
print("args =", args)
except Exception as e:
# There would be getopt.GetoptError.
print("Parse command line: Exception = {0}".format(e))
__show_usage, __exit_code, __error_message = True, -1, "Wrong command line option."
# Check and parse each option.
if not __show_usage:
try:
for opt, arg in opts:
if opt == "-h":
__show_usage, __exit_code = True, 0
elif opt == "-d":
__data_type = int(arg)
elif opt == "-i":
__input_file_path = arg
elif opt == "-o":
__output_file_path = arg
else:
__show_usage, __exit_code, __error_message = True, -\
2, "Unknown command line option."
except Exception as e:
print("Parse command options: Exception = {0}".format(e))
__show_usage, __exit_code, __error_message = True, -\
3, "Wrong value for command line option."
print("show_usage =", __show_usage)
print("data_type =", __data_type)
print("input_file_path =", __input_file_path)
print("output_file_path =", __output_file_path)
# Check options are valid.
if not __show_usage:
if (__data_type is None) or (__input_file_path is None):
__show_usage, __exit_code, __error_message = True, -\
4, "Missing compulsory command line option."
elif not (__data_type in [0]):
__show_usage, __exit_code, __error_message = True, -5, "Wrong value for -d."
if not __show_usage:
process_inventory_list()
else:
print("__exit_code =", __exit_code)
if __error_message:
print("__error_message =", __error_message)
print("")
usage()
sys.exit(__exit_code)
if __name__ == '__main__':
main(sys.argv[1:]) | Python_Test/PyDataMiningSample/com/djs/learn/hdb/PreprocessData.py | import csv
import getopt
import math
import sys
from time import localtime, strftime, time
# Global variables.
# The value can be updated by command line options.
__data_type = None
__input_file_path = None
__output_file_path = None
def process_inventory_list():
print("-" * 100)
time_str = strftime("%Y-%m-%d %H:%M:%S", localtime(time()))
print("Start time =", time_str)
headers = []
records = []
try:
with open(__input_file_path, "r") as file:
# Read file as dict.
reader = csv.DictReader(file)
# Read header line.
headers = reader.fieldnames
# Read records into a list of dict.
records = [line for line in reader]
print("Records =", len(records))
keyFloorAreaFlatModel = "Floor Area (sqm) /Flat Model"
keyResalePrice = "Resale Price"
keyResaleRegistrationDate = "Resale Registration Date"
keyFlatModel = "Flat Model"
keyFloorArea = "Floor Area (sqm)"
keyFloorAreaLower = "Floor Area Lower (sqm)"
keyUnitPrice = "Unit Price"
keyResaleYear = "Resale Year"
if __data_type == 0:
headers.append(keyFlatModel)
headers.append(keyFloorArea)
headers.append(keyFloorAreaLower)
headers.append(keyUnitPrice)
headers.append(keyResaleYear)
for record in records:
temp = record[keyFloorAreaFlatModel].split("/")
record[keyFlatModel] = temp[1]
record[keyFloorArea] = temp[0]
record[keyFloorAreaLower] = math.floor(float(record[keyFloorArea]) / 10) * 10
record[keyUnitPrice] = math.floor(int(record[keyResalePrice]) / float(record[keyFloorArea]))
record[keyResaleYear] = record[keyResaleRegistrationDate][-4:]
print("Process inventory list: ok.")
except Exception as e:
print("Process inventory list: Exception = {0}".format(e))
time_str = strftime("%Y-%m-%d %H:%M:%S", localtime(time()))
print("Stop time =", time_str)
print("-" * 100)
# If given __output_file_path, output to file; otherwise, output to
# screen.
if __output_file_path:
try:
# Open output file.
with open(__output_file_path, "wt", encoding = "utf-8") as output_file:
print('output_file =', output_file)
# Output file as CSV format.
cout = csv.DictWriter(
output_file, fieldnames = headers, lineterminator = "\n")
# Write header line.
cout.writeheader()
# Write record lines.
cout.writerows(records)
print("Output process results: ok")
except Exception as e:
print("Output process results: Exception = {0}".format(e))
else:
# Output screen as JSON format.
print("headers =", headers)
print("records =")
for record in records:
print(record)
print("-" * 100)
def usage():
print('''
Preprocess HDB merged data.
Usage:
-h
-d <DataType> -i <FilePath> [-o <FilePath>]
Options:
-h : Show help.
-d <DataType> : Raw data type. Compulsory, Value [0: Transaction].
-i <FilePath> : Source data file path (CSV). Compulsory.
-o <FilePath> : Result output file path (CSV). Optional, output to screen by default.
''')
def main(argv):
'''
Pass input arguments from command line to method.
@param argv: A list of arguments
'''
global __data_type
global __input_file_path
global __output_file_path
print("argv =", argv)
__show_usage = False
__exit_code = 0
__error_message = None
# If no any option.
if not argv:
__show_usage = True
# Parse command line.
if not __show_usage:
try:
opts, args = getopt.getopt(argv, "hd:i:o:")
print("opts =", opts)
print("args =", args)
except Exception as e:
# There would be getopt.GetoptError.
print("Parse command line: Exception = {0}".format(e))
__show_usage, __exit_code, __error_message = True, -1, "Wrong command line option."
# Check and parse each option.
if not __show_usage:
try:
for opt, arg in opts:
if opt == "-h":
__show_usage, __exit_code = True, 0
elif opt == "-d":
__data_type = int(arg)
elif opt == "-i":
__input_file_path = arg
elif opt == "-o":
__output_file_path = arg
else:
__show_usage, __exit_code, __error_message = True, -\
2, "Unknown command line option."
except Exception as e:
print("Parse command options: Exception = {0}".format(e))
__show_usage, __exit_code, __error_message = True, -\
3, "Wrong value for command line option."
print("show_usage =", __show_usage)
print("data_type =", __data_type)
print("input_file_path =", __input_file_path)
print("output_file_path =", __output_file_path)
# Check options are valid.
if not __show_usage:
if (__data_type is None) or (__input_file_path is None):
__show_usage, __exit_code, __error_message = True, -\
4, "Missing compulsory command line option."
elif not (__data_type in [0]):
__show_usage, __exit_code, __error_message = True, -5, "Wrong value for -d."
if not __show_usage:
process_inventory_list()
else:
print("__exit_code =", __exit_code)
if __error_message:
print("__error_message =", __error_message)
print("")
usage()
sys.exit(__exit_code)
if __name__ == '__main__':
main(sys.argv[1:]) | 0.19853 | 0.093058 |
import pygame as pg
from settings import *
from tilemap import collide_hit_rect
vec = pg.math.Vector2
def collide_with_walls(sprite, group, dir):
if dir == 'x':
hits = pg.sprite.spritecollide(sprite, group, False, collide_hit_rect)
if hits:
if sprite.vel.x > 0:
sprite.pos.x = hits[0].rect.left - sprite.hit_rect.width / 2
if sprite.vel.x < 0:
sprite.pos.x = hits[0].rect.right + sprite.hit_rect.width / 2
sprite.vel.x = 0
sprite.hit_rect.centerx = sprite.pos.x
if dir == 'y':
hits = pg.sprite.spritecollide(sprite, group, False, collide_hit_rect)
if hits:
if sprite.vel.y > 0:
sprite.pos.y = hits[0].rect.top - sprite.hit_rect.height / 2
if sprite.vel.y < 0:
sprite.pos.y = hits[0].rect.bottom + sprite.hit_rect.height / 2
sprite.vel.y = 0
sprite.hit_rect.centery = sprite.pos.y
class Spritesheet:
def __init__(self, filename) :
self.spritesheet = pg.image.load(filename).convert()
def get_image(self, x, y, width, height) :
image = pg.Surface((width, height))
image.blit(self.spritesheet, (0, 0), (x, y, width, height))
return image
class Player(pg.sprite.Sprite):
def __init__(self, game, x, y):
self.groups = game.all_sprites
pg.sprite.Sprite.__init__(self, self.groups)
self.game = game
self.walking = False
self.current_frame = 0
self.last_update = 0
self.load_images()
self.direction = 0
self.image = self.game.player_img.get_image(0, 0, 180, 150)
#self.image.set_colorkey((0,255,0))
self.rect = self.image.get_rect()
self.rect.center = (x, y)
self.hit_rect = PLAYER_HIT_RECT
self.hit_rect.center = self.rect.center
self.vel = vec(0, 0)
self.pos = vec(x, y)
self.rot = 0
def load_images(self) :
self.standing_frames_r = [self.game.player_img.get_image(0, 0, 180, 150), self.game.player_img.get_image(180, 0, 180, 150)]
for frame in self.standing_frames_r :
frame.set_colorkey((0, 255, 0))
self.standing_frames_l = []
for frame in self.standing_frames_r :
frame.set_colorkey((0, 255, 0))
self.standing_frames_l.append(pg.transform.flip(frame, True, False))
def get_keys(self):
self.vel = vec(0, 0)
keys = pg.key.get_pressed()
if keys[pg.K_LEFT] or keys[pg.K_a]:
self.vel.x = -PLAYER_SPEED
if keys[pg.K_RIGHT] or keys[pg.K_d]:
self.vel.x = PLAYER_SPEED
if keys[pg.K_UP] or keys[pg.K_w]:
self.vel.y = -PLAYER_SPEED
if keys[pg.K_DOWN] or keys[pg.K_s]:
self.vel.y = PLAYER_SPEED
if self.vel.x != 0 and self.vel.y != 0:
self.vel *= 0.7071
def update(self):
self.animate()
self.get_keys()
self.rect = self.image.get_rect()
self.rect.center = self.pos
self.pos += self.vel * self.game.dt
self.hit_rect.centerx = self.pos.x
collide_with_walls(self, self.game.walls, 'x')
self.hit_rect.centery = self.pos.y
collide_with_walls(self, self.game.walls, 'y')
self.rect.center = self.hit_rect.center
def animate(self) :
now = pg.time.get_ticks()
if self.vel.x != 0 :
self.walking = True
else :
self.walking = False
if self.walking :
if now - self.last_update > 200 :
self.last_update = now
self.current_frame = (self.current_frame + 1) % len(self.standing_frames_r)
if self.vel.x > 0 :
self.image = self.standing_frames_r[self.current_frame]
self.direction = 0
else :
self.image = self.standing_frames_l[self.current_frame]
self.direction = 1
if not self.walking :
if now - self.last_update > 200 :
self.last_update = now
self.current_frame = (self.current_frame + 1) % len(self.standing_frames_r)
if self.direction == 0 :
self.image = self.standing_frames_r[self.current_frame]
else :
self.image = self.standing_frames_l[self.current_frame]
class Wall(pg.sprite.Sprite):
def __init__(self, game, x, y):
self.groups = game.all_sprites, game.walls
pg.sprite.Sprite.__init__(self, self.groups)
self.game = game
self.image = game.wall_img
self.rect = self.image.get_rect()
self.x = x
self.y = y
self.rect.x = x * TILESIZE
self.rect.y = y * TILESIZE
class Obstacle(pg.sprite.Sprite):
def __init__(self, game, x, y, w, h):
self.groups = game.walls
pg.sprite.Sprite.__init__(self, self.groups)
self.game = game
self.rect = pg.Rect(x, y, w, h)
self.x = x
self.y = y
self.rect.x = x
self.rect.y = y
class Grass(pg.sprite.Sprite) :
def __init__(self, game, x, y) :
self.groups = game.all_sprites, game.grass
pg.sprite.Sprite.__init__(self, self.groups)
self.game = game
self.image = game.grass_img
self.rect = self.image.get_rect()
self.x = x
self.y = y
self.rect.x = x * TILESIZE
self.rect.y = y * TILESIZE
class Slab(pg.sprite.Sprite) :
def __init__(self, game, x, y) :
self.groups = game.all_sprites, game.slab
pg.sprite.Sprite.__init__(self, self.groups)
self.game = game
self.image = game.slab_img
self.rect = self.image.get_rect()
self.x = x
self.y = y
self.rect.x = x * TILESIZE
self.rect.y = y * TILESIZE
class Mob(pg.sprite.Sprite) :
def __init__(self, game, x, y) :
self.groups = game.all_sprites, game.mobs
pg.sprite.Sprite.__init__(self, self.groups)
self.game = game
self.image = game.mob_img
self.rect = self.image.get_rect()
self.rect.center = (x, y)
self.hit_rect = MOB_HIT_RECT.copy()
self.hit_rect.center = self.rect.center
self.pos = vec(x, y)
self.vel = vec(0, 0)
self.acc = vec(0, 0)
self.rect.center = self.pos
self.rot = 0
def update(self) :
self.rot = (self.game.player.pos - self.pos).angle_to(vec(1, 0))
#self.image = pg.transform.rotate(self.game.mob_img, self.rot)
self.rect = self.image.get_rect()
self.rect.center = self.pos
self.acc = vec(MOB_SPEED, 0).rotate(-self.rot)
self.acc += self.vel * -1
self.vel += self.acc * self.game.dt
self.pos += self.vel * self.game.dt + 0.5 * self.acc * self.game.dt ** 2
self.hit_rect.centerx = self.pos.x
collide_with_walls(self, self.game.walls, 'x')
self.hit_rect.centery = self.pos.y
collide_with_walls(self, self.game.walls, 'y')
self.rect.center = self.hit_rect.center | sprites.py | import pygame as pg
from settings import *
from tilemap import collide_hit_rect
vec = pg.math.Vector2
def collide_with_walls(sprite, group, dir):
if dir == 'x':
hits = pg.sprite.spritecollide(sprite, group, False, collide_hit_rect)
if hits:
if sprite.vel.x > 0:
sprite.pos.x = hits[0].rect.left - sprite.hit_rect.width / 2
if sprite.vel.x < 0:
sprite.pos.x = hits[0].rect.right + sprite.hit_rect.width / 2
sprite.vel.x = 0
sprite.hit_rect.centerx = sprite.pos.x
if dir == 'y':
hits = pg.sprite.spritecollide(sprite, group, False, collide_hit_rect)
if hits:
if sprite.vel.y > 0:
sprite.pos.y = hits[0].rect.top - sprite.hit_rect.height / 2
if sprite.vel.y < 0:
sprite.pos.y = hits[0].rect.bottom + sprite.hit_rect.height / 2
sprite.vel.y = 0
sprite.hit_rect.centery = sprite.pos.y
class Spritesheet:
def __init__(self, filename) :
self.spritesheet = pg.image.load(filename).convert()
def get_image(self, x, y, width, height) :
image = pg.Surface((width, height))
image.blit(self.spritesheet, (0, 0), (x, y, width, height))
return image
class Player(pg.sprite.Sprite):
def __init__(self, game, x, y):
self.groups = game.all_sprites
pg.sprite.Sprite.__init__(self, self.groups)
self.game = game
self.walking = False
self.current_frame = 0
self.last_update = 0
self.load_images()
self.direction = 0
self.image = self.game.player_img.get_image(0, 0, 180, 150)
#self.image.set_colorkey((0,255,0))
self.rect = self.image.get_rect()
self.rect.center = (x, y)
self.hit_rect = PLAYER_HIT_RECT
self.hit_rect.center = self.rect.center
self.vel = vec(0, 0)
self.pos = vec(x, y)
self.rot = 0
def load_images(self) :
self.standing_frames_r = [self.game.player_img.get_image(0, 0, 180, 150), self.game.player_img.get_image(180, 0, 180, 150)]
for frame in self.standing_frames_r :
frame.set_colorkey((0, 255, 0))
self.standing_frames_l = []
for frame in self.standing_frames_r :
frame.set_colorkey((0, 255, 0))
self.standing_frames_l.append(pg.transform.flip(frame, True, False))
def get_keys(self):
self.vel = vec(0, 0)
keys = pg.key.get_pressed()
if keys[pg.K_LEFT] or keys[pg.K_a]:
self.vel.x = -PLAYER_SPEED
if keys[pg.K_RIGHT] or keys[pg.K_d]:
self.vel.x = PLAYER_SPEED
if keys[pg.K_UP] or keys[pg.K_w]:
self.vel.y = -PLAYER_SPEED
if keys[pg.K_DOWN] or keys[pg.K_s]:
self.vel.y = PLAYER_SPEED
if self.vel.x != 0 and self.vel.y != 0:
self.vel *= 0.7071
def update(self):
self.animate()
self.get_keys()
self.rect = self.image.get_rect()
self.rect.center = self.pos
self.pos += self.vel * self.game.dt
self.hit_rect.centerx = self.pos.x
collide_with_walls(self, self.game.walls, 'x')
self.hit_rect.centery = self.pos.y
collide_with_walls(self, self.game.walls, 'y')
self.rect.center = self.hit_rect.center
def animate(self) :
now = pg.time.get_ticks()
if self.vel.x != 0 :
self.walking = True
else :
self.walking = False
if self.walking :
if now - self.last_update > 200 :
self.last_update = now
self.current_frame = (self.current_frame + 1) % len(self.standing_frames_r)
if self.vel.x > 0 :
self.image = self.standing_frames_r[self.current_frame]
self.direction = 0
else :
self.image = self.standing_frames_l[self.current_frame]
self.direction = 1
if not self.walking :
if now - self.last_update > 200 :
self.last_update = now
self.current_frame = (self.current_frame + 1) % len(self.standing_frames_r)
if self.direction == 0 :
self.image = self.standing_frames_r[self.current_frame]
else :
self.image = self.standing_frames_l[self.current_frame]
class Wall(pg.sprite.Sprite):
def __init__(self, game, x, y):
self.groups = game.all_sprites, game.walls
pg.sprite.Sprite.__init__(self, self.groups)
self.game = game
self.image = game.wall_img
self.rect = self.image.get_rect()
self.x = x
self.y = y
self.rect.x = x * TILESIZE
self.rect.y = y * TILESIZE
class Obstacle(pg.sprite.Sprite):
def __init__(self, game, x, y, w, h):
self.groups = game.walls
pg.sprite.Sprite.__init__(self, self.groups)
self.game = game
self.rect = pg.Rect(x, y, w, h)
self.x = x
self.y = y
self.rect.x = x
self.rect.y = y
class Grass(pg.sprite.Sprite) :
def __init__(self, game, x, y) :
self.groups = game.all_sprites, game.grass
pg.sprite.Sprite.__init__(self, self.groups)
self.game = game
self.image = game.grass_img
self.rect = self.image.get_rect()
self.x = x
self.y = y
self.rect.x = x * TILESIZE
self.rect.y = y * TILESIZE
class Slab(pg.sprite.Sprite) :
def __init__(self, game, x, y) :
self.groups = game.all_sprites, game.slab
pg.sprite.Sprite.__init__(self, self.groups)
self.game = game
self.image = game.slab_img
self.rect = self.image.get_rect()
self.x = x
self.y = y
self.rect.x = x * TILESIZE
self.rect.y = y * TILESIZE
class Mob(pg.sprite.Sprite) :
def __init__(self, game, x, y) :
self.groups = game.all_sprites, game.mobs
pg.sprite.Sprite.__init__(self, self.groups)
self.game = game
self.image = game.mob_img
self.rect = self.image.get_rect()
self.rect.center = (x, y)
self.hit_rect = MOB_HIT_RECT.copy()
self.hit_rect.center = self.rect.center
self.pos = vec(x, y)
self.vel = vec(0, 0)
self.acc = vec(0, 0)
self.rect.center = self.pos
self.rot = 0
def update(self) :
self.rot = (self.game.player.pos - self.pos).angle_to(vec(1, 0))
#self.image = pg.transform.rotate(self.game.mob_img, self.rot)
self.rect = self.image.get_rect()
self.rect.center = self.pos
self.acc = vec(MOB_SPEED, 0).rotate(-self.rot)
self.acc += self.vel * -1
self.vel += self.acc * self.game.dt
self.pos += self.vel * self.game.dt + 0.5 * self.acc * self.game.dt ** 2
self.hit_rect.centerx = self.pos.x
collide_with_walls(self, self.game.walls, 'x')
self.hit_rect.centery = self.pos.y
collide_with_walls(self, self.game.walls, 'y')
self.rect.center = self.hit_rect.center | 0.333395 | 0.312055 |
import shared_gui_delegate_on_robot
import time
import rosebot
import ev3dev.ev3 as ev3
#Sprint 2 Functions
def increasing_rate_of_beep(rate_of_beep,rate_of_beep_increase,robot):
""":type robot: rosebot.RoseBot"""
robot.drive_system.go(20,20)
while True:
distance = robot.sensor_system.ir_proximity_sensor.get_distance_in_inches()
# # time.sleep(abs(int(og_rate_of_beep) - (int(rate_of_beep_increase)/int(distance))))
robot.sound_system.beeper.beep().wait()
if int(distance) <= 20:
for k in range(20):
if int(distance) == k:
delay = (k * int(rate_of_beep_increase) + int(rate_of_beep))*(1/100)
time.sleep(delay)
else:
time.sleep(20)
if distance <= 1:
break
robot.drive_system.stop()
robot.arm_and_claw.raise_arm()
def spin_to_find_object(direction,speed,rate_of_beep,rate_of_beep_increase,robot):
""":type robot: rosebot.RoseBot"""
pixy = ev3.Sensor(driver_name="pixy-lego")
pixy.mode = "SIG1"
if direction == "CCW":
robot.drive_system.spin_counterclockwise_until_sees_object(int(speed),pixy.value(3)*pixy.value(4))
if direction == "CW":
robot.drive_system.spin_clockwise_until_sees_object(int(speed),pixy.value(3)*pixy.value(4))
increasing_rate_of_beep(rate_of_beep,rate_of_beep_increase,robot)
# -----------------------------------------------------------------------------
# -----------------------------------------------------------------------------
# Sprint 3 Functions
# -----------------------------------------------------------------------------
# -----------------------------------------------------------------------------
def spin_to_find_package(speed,robot):
""":type robot: rosebot.RoseBot"""
pixy = ev3.Sensor(driver_name="pixy-lego")
pixy.mode = "SIG1"
robot.drive_system.spin_clockwise_until_sees_object(20, pixy.value(3) * pixy.value(4))
robot.drive_system.stop()
robot.drive_system.go(speed, speed)
while True:
if robot.sensor_system.ir_proximity_sensor.get_distance_in_inches() <=0.5:
break
robot.drive_system.stop()
robot.arm_and_claw.raise_arm()
def find_road(robot):
""":type robot: rosebot.RoseBot"""
robot.drive_system.go(-30,30)
while True:
if robot.sensor_system.color_sensor.get_color() == 1:
break
time.sleep(0.01)
robot.drive_system.stop()
def find_house(color,robot):
""":type robot: rosebot.RoseBot"""
robot.drive_system.go(50,50)
while True:
#If the color sensor detects the color corresponding to the house
if robot.sensor_system.color_sensor.get_color() == int(color):
# If the color is green, the robot turns left
if int(color) == 3:
robot.drive_system.stop()
robot.drive_system.go(0, 50)
break
# If the color is yellow, the robot turns right
if int(color) == 4:
robot.drive_system.stop()
robot.drive_system.go(50,0)
break
# If the color is red, the robot turns left
if int(color) == 5:
robot.drive_system.stop()
robot.drive_system.go(0, 50)
break
# If the color is blue,the robot turns right
if int(color) == 2:
robot.drive_system.stop()
robot.drive_system.go(50,0)
break
#Allows for a 90 degree turn
time.sleep(2)
robot.drive_system.stop()
def deliver_package(greeting,goodbye,robot):
""":type robot: rosebot.RoseBot"""
robot.drive_system.go(50, 50)
time.sleep(2.3)
robot.drive_system.stop()
robot.sound_system.speech_maker(greeting).wait
robot.arm_and_claw.lower_arm()
robot.sound_system.speech_maker(goodbye).wait
robot.drive_system.go(-50,-50)
def full_delivery(color,greeting,goodbye,robot):
find_road(robot)
find_house(color,robot)
deliver_package(greeting,goodbye,robot)
def theft(robot):
""":type robot: rosebot.RoseBot"""
robot.drive_system.go(50,50)
while True:
if robot.sensor_system.ir_proximity_sensor.get_distance_in_inches() <=0.5:
break
robot.drive_system.stop()
robot.arm_and_claw.raise_arm()
robot.drive_system.go(-50,50)
time.sleep(2)
robot.drive_system.stop()
robot.drive_system.go(-50,50)
#Allows for a turn
time.sleep(2.3)
def getaway(laugh,robot):
robot.drive_system.go(100, 100)
robot.sound_system.speech_maker(laugh).wait
time.sleep(5)
robot.drive_system.stop()
def steal_package(color,laugh,robot):
find_house(color,robot)
theft(robot)
getaway(laugh,robot) | src/m1_extra.py | import shared_gui_delegate_on_robot
import time
import rosebot
import ev3dev.ev3 as ev3
#Sprint 2 Functions
def increasing_rate_of_beep(rate_of_beep,rate_of_beep_increase,robot):
""":type robot: rosebot.RoseBot"""
robot.drive_system.go(20,20)
while True:
distance = robot.sensor_system.ir_proximity_sensor.get_distance_in_inches()
# # time.sleep(abs(int(og_rate_of_beep) - (int(rate_of_beep_increase)/int(distance))))
robot.sound_system.beeper.beep().wait()
if int(distance) <= 20:
for k in range(20):
if int(distance) == k:
delay = (k * int(rate_of_beep_increase) + int(rate_of_beep))*(1/100)
time.sleep(delay)
else:
time.sleep(20)
if distance <= 1:
break
robot.drive_system.stop()
robot.arm_and_claw.raise_arm()
def spin_to_find_object(direction,speed,rate_of_beep,rate_of_beep_increase,robot):
""":type robot: rosebot.RoseBot"""
pixy = ev3.Sensor(driver_name="pixy-lego")
pixy.mode = "SIG1"
if direction == "CCW":
robot.drive_system.spin_counterclockwise_until_sees_object(int(speed),pixy.value(3)*pixy.value(4))
if direction == "CW":
robot.drive_system.spin_clockwise_until_sees_object(int(speed),pixy.value(3)*pixy.value(4))
increasing_rate_of_beep(rate_of_beep,rate_of_beep_increase,robot)
# -----------------------------------------------------------------------------
# -----------------------------------------------------------------------------
# Sprint 3 Functions
# -----------------------------------------------------------------------------
# -----------------------------------------------------------------------------
def spin_to_find_package(speed,robot):
""":type robot: rosebot.RoseBot"""
pixy = ev3.Sensor(driver_name="pixy-lego")
pixy.mode = "SIG1"
robot.drive_system.spin_clockwise_until_sees_object(20, pixy.value(3) * pixy.value(4))
robot.drive_system.stop()
robot.drive_system.go(speed, speed)
while True:
if robot.sensor_system.ir_proximity_sensor.get_distance_in_inches() <=0.5:
break
robot.drive_system.stop()
robot.arm_and_claw.raise_arm()
def find_road(robot):
""":type robot: rosebot.RoseBot"""
robot.drive_system.go(-30,30)
while True:
if robot.sensor_system.color_sensor.get_color() == 1:
break
time.sleep(0.01)
robot.drive_system.stop()
def find_house(color,robot):
""":type robot: rosebot.RoseBot"""
robot.drive_system.go(50,50)
while True:
#If the color sensor detects the color corresponding to the house
if robot.sensor_system.color_sensor.get_color() == int(color):
# If the color is green, the robot turns left
if int(color) == 3:
robot.drive_system.stop()
robot.drive_system.go(0, 50)
break
# If the color is yellow, the robot turns right
if int(color) == 4:
robot.drive_system.stop()
robot.drive_system.go(50,0)
break
# If the color is red, the robot turns left
if int(color) == 5:
robot.drive_system.stop()
robot.drive_system.go(0, 50)
break
# If the color is blue,the robot turns right
if int(color) == 2:
robot.drive_system.stop()
robot.drive_system.go(50,0)
break
#Allows for a 90 degree turn
time.sleep(2)
robot.drive_system.stop()
def deliver_package(greeting,goodbye,robot):
""":type robot: rosebot.RoseBot"""
robot.drive_system.go(50, 50)
time.sleep(2.3)
robot.drive_system.stop()
robot.sound_system.speech_maker(greeting).wait
robot.arm_and_claw.lower_arm()
robot.sound_system.speech_maker(goodbye).wait
robot.drive_system.go(-50,-50)
def full_delivery(color,greeting,goodbye,robot):
find_road(robot)
find_house(color,robot)
deliver_package(greeting,goodbye,robot)
def theft(robot):
""":type robot: rosebot.RoseBot"""
robot.drive_system.go(50,50)
while True:
if robot.sensor_system.ir_proximity_sensor.get_distance_in_inches() <=0.5:
break
robot.drive_system.stop()
robot.arm_and_claw.raise_arm()
robot.drive_system.go(-50,50)
time.sleep(2)
robot.drive_system.stop()
robot.drive_system.go(-50,50)
#Allows for a turn
time.sleep(2.3)
def getaway(laugh,robot):
robot.drive_system.go(100, 100)
robot.sound_system.speech_maker(laugh).wait
time.sleep(5)
robot.drive_system.stop()
def steal_package(color,laugh,robot):
find_house(color,robot)
theft(robot)
getaway(laugh,robot) | 0.23092 | 0.266766 |
from django.contrib.gis import admin
from django.forms import ModelForm
from vfwheron import models
# The Admin.py is used to create fields in the django admin web page
# Register your models here.
class EntriesAdminForm(ModelForm):
class Meta:
model = models.Entries
fields = ['title', 'abstract', 'external_id', 'location', 'geom', 'version', 'latest_version', 'comment',
'license', 'variable', 'datasource', 'embargo', 'embargo_end', 'publication', 'lastupdate',
'is_partial', 'uuid', 'citation']
class EntriesAdmin(admin.OSMGeoAdmin):
form = EntriesAdminForm
list_display = ['id', 'title', 'abstract']
def variable_fname(self, obj):
return obj.variable.variable_name
variable_fname.admin_order_field = 'title'
variable_fname.short_description = 'Titel'
def creator_last_name(self, obj):
return obj.creator.last_name
creator_last_name.short_description = 'creator Name'
admin.site.register(models.Entries, EntriesAdmin)
# admin.site.register(models.Entries, admin.GeoModelAdmin)
class VariablesAdminForm(ModelForm):
class Meta:
model = models.Variables
fields = ['name', 'keyword', 'unit', 'symbol']
class VariablesAdmin(admin.ModelAdmin):
form = VariablesAdminForm
list_display = ['name', 'unit']
admin.site.register(models.Variables, VariablesAdmin)
class LicensesAdminForm(ModelForm):
class Meta:
model = models.Licenses
fields = ['short_title', 'title', 'summary', 'full_text', 'link', 'by_attribution', 'share_alike',
'commercial_use']
class LicensesAdmin(admin.ModelAdmin):
form = LicensesAdminForm
list_display = ['short_title', 'title']
admin.site.register(models.Licenses, LicensesAdmin)
class PersonsAdminForm(ModelForm):
class Meta:
model = models.Persons
fields = ['first_name', 'last_name', 'affiliation', 'organisation_name', 'attribution']
class PersonsAdmin(admin.ModelAdmin):
form = PersonsAdminForm
search_fields = ['first_name', 'last_name']
list_display = ['first_name', 'last_name', 'affiliation']
list_filter = ['last_name']
admin.site.register(models.Persons, PersonsAdmin) | vfwheron/admin.py | from django.contrib.gis import admin
from django.forms import ModelForm
from vfwheron import models
# The Admin.py is used to create fields in the django admin web page
# Register your models here.
class EntriesAdminForm(ModelForm):
class Meta:
model = models.Entries
fields = ['title', 'abstract', 'external_id', 'location', 'geom', 'version', 'latest_version', 'comment',
'license', 'variable', 'datasource', 'embargo', 'embargo_end', 'publication', 'lastupdate',
'is_partial', 'uuid', 'citation']
class EntriesAdmin(admin.OSMGeoAdmin):
form = EntriesAdminForm
list_display = ['id', 'title', 'abstract']
def variable_fname(self, obj):
return obj.variable.variable_name
variable_fname.admin_order_field = 'title'
variable_fname.short_description = 'Titel'
def creator_last_name(self, obj):
return obj.creator.last_name
creator_last_name.short_description = 'creator Name'
admin.site.register(models.Entries, EntriesAdmin)
# admin.site.register(models.Entries, admin.GeoModelAdmin)
class VariablesAdminForm(ModelForm):
class Meta:
model = models.Variables
fields = ['name', 'keyword', 'unit', 'symbol']
class VariablesAdmin(admin.ModelAdmin):
form = VariablesAdminForm
list_display = ['name', 'unit']
admin.site.register(models.Variables, VariablesAdmin)
class LicensesAdminForm(ModelForm):
class Meta:
model = models.Licenses
fields = ['short_title', 'title', 'summary', 'full_text', 'link', 'by_attribution', 'share_alike',
'commercial_use']
class LicensesAdmin(admin.ModelAdmin):
form = LicensesAdminForm
list_display = ['short_title', 'title']
admin.site.register(models.Licenses, LicensesAdmin)
class PersonsAdminForm(ModelForm):
class Meta:
model = models.Persons
fields = ['first_name', 'last_name', 'affiliation', 'organisation_name', 'attribution']
class PersonsAdmin(admin.ModelAdmin):
form = PersonsAdminForm
search_fields = ['first_name', 'last_name']
list_display = ['first_name', 'last_name', 'affiliation']
list_filter = ['last_name']
admin.site.register(models.Persons, PersonsAdmin) | 0.346873 | 0.135318 |
import pandas as pd
import plotly.graph_objects as go
from plotly.subplots import make_subplots
#mapbox_token
mapbox_access_token = open('mapbox_access_token').read()
#get portugal data
portugal_url='https://raw.githubusercontent.com/dssg-pt/covid19pt-data/master/data.csv'
portugal_df=pd.read_csv(portugal_url)
#organize Portugal Data
date = portugal_df.data
date_formated=[]
for x in date:
a=x.replace('/','-')
parts = a.split('-')
mys = parts[2] + '-' + parts[1] + '-' + parts[0]
date_formated.append(mys)
date=date_formated
confirmados = portugal_df.confirmados
recuperados = portugal_df.recuperados
obitos = portugal_df.obitos
activos = confirmados-obitos-recuperados
internados = portugal_df.internados
internados_uci = portugal_df.internados_uci
population_pt = 10196707
number_beds = 35429
number_ventilators = 1392
#calculate Percentage Grow
def percentageGrow(Current,Preview):
percentageGrow = ((Current-Preview)/Preview)*100
return percentageGrow
#Map Portugal
scale=[portugal_df.confirmados_arsnorte.iloc[-1],
portugal_df.confirmados_arscentro.iloc[-1],
portugal_df.confirmados_arslvt.iloc[-1],
portugal_df.confirmados_arsalentejo.iloc[-1],
portugal_df.confirmados_arsalgarve.iloc[-1],
portugal_df.confirmados_acores.iloc[-1],
portugal_df.confirmados_madeira.iloc[-1],
portugal_df.confirmados_estrangeiro.iloc[-1]]
size=[]
for x in enumerate(scale):
if x[1]<200:
size.append(10)
if x[1]>200 and x[1]<400:
size.append(11)
if x[1]>400 and x[1]<600:
size.append(12)
if x[1]>600 and x[1]<800:
size.append(13)
if x[1]>600 and x[1]<800:
size.append(13)
if x[1]>800 and x[1]<1000:
size.append(14)
if x[1]>1000 and x[1]<1200:
size.append(15)
if x[1]>1200 and x[1]<1400:
size.append(16)
if x[1]>1400 and x[1]<1600:
size.append(17)
if x[1]>1600 and x[1]<1800:
size.append(18)
if x[1]>1800 and x[1]<2000:
size.append(19)
fig_map = go.Figure(go.Scattermapbox(
lat=['41.1567','40.2033','38.7059',
'38.5586','37.189','37.794594',
'32.3716'],
lon=['-8.6239','-8.4103','-9.1443',
'-7.9084','-8.4412','-25.506134',
'-16.2749'],
hovertext=['Norte '+ str(portugal_df.confirmados_arsnorte.iloc[-1])+' Casos',
'Centro'+ str(portugal_df.confirmados_arscentro.iloc[-1])+' Casos',
'Lisboa e V.Tejo'+ str(portugal_df.confirmados_arslvt.iloc[-1])+' Casos',
'Alentejo'+ str(portugal_df.confirmados_arsalentejo.iloc[-1])+' Casos',
'Algarve'+ str(portugal_df.confirmados_arsalgarve.iloc[-1])+' Casos',
'Açores'+ str(portugal_df.confirmados_acores.iloc[-1])+' Casos',
'Madeira'+ str(portugal_df.confirmados_madeira.iloc[-1])+' Casos',],
mode='markers',
marker=go.scattermapbox.Marker(
size=size,
color='red'
),
text=[portugal_df.confirmados_arsnorte.iloc[-1],
portugal_df.confirmados_arscentro.iloc[-1],
portugal_df.confirmados_arslvt.iloc[-1],
portugal_df.confirmados_arsalentejo.iloc[-1],
portugal_df.confirmados_arsalgarve.iloc[-1],
portugal_df.confirmados_acores.iloc[-1],
portugal_df.confirmados_madeira.iloc[-1],
portugal_df.confirmados_estrangeiro.iloc[-1]],
))
maps=[fig_map]
for elements in maps:
elements.update_layout(
autosize=True,
mapbox=dict(
accesstoken=mapbox_access_token,
style = 'mapbox://styles/fredericopimpao/ck84zqv9l0esk1jqm4xuxmldn',
bearing=0,
center=dict(
lat=38.7059,
lon=-9.1443,
),
pitch=0,
zoom=4,
),
)
############################################################################ Graphs
fig_cases_pt = go.Figure()
fig_cases_zones = go.Figure()
fig_percentage_timeline_grow_pt = go.Figure()
fig_percentage_timeline_grow_zones = go.Figure()
line_graphs = [fig_cases_pt,
fig_cases_zones,
fig_percentage_timeline_grow_pt,
fig_percentage_timeline_grow_zones
]
# Line Graph
line_graphs_name = [['confirmados','casos activos','obitos','recuperados'],
['Norte',
'Centro',
'Lisboa e V.Tejo',
'Alentejo',
'Algarve',
'Açores',
'Madeira',
'estrangeiro'],
['Crescimento'],
['Norte',
'Centro',
'Lisboa e V.Tejo',
'Alentejo',
'Algarve',
'Açores',
'Madeira',
'estrangeiro'],
]
line_graphs_color=[['grey',
'black',
'red','#7FFFD4'],
['grey',
'black',
'#7FFFD4',
'#72e5be',
'#65cca9',
'red',
'#cc0000',
'#990000',],
['grey'],
['grey',
'black',
'#7FFFD4',
'#72e5be',
'#65cca9',
'red',
'#cc0000',
'#990000',],
]
line_graphs_data=[[confirmados,
activos,
obitos,
recuperados],
[portugal_df.confirmados_arsnorte,
portugal_df.confirmados_arscentro,
portugal_df.confirmados_arslvt,
portugal_df.confirmados_arsalentejo,
portugal_df.confirmados_arsalgarve,
portugal_df.confirmados_acores,
portugal_df.confirmados_madeira,
portugal_df.confirmados_estrangeiro],
[percentageGrow(confirmados,confirmados.shift(1))],
[percentageGrow(portugal_df.confirmados_arsnorte, portugal_df.confirmados_arsnorte.shift(1)),
percentageGrow(portugal_df.confirmados_arscentro, portugal_df.confirmados_arscentro.shift(1)),
percentageGrow(portugal_df.confirmados_arslvt, portugal_df.confirmados_arslvt.shift(1)),
percentageGrow(portugal_df.confirmados_arsalentejo, portugal_df.confirmados_arsalentejo.shift(1)),
percentageGrow(portugal_df.confirmados_arsalgarve, portugal_df.confirmados_arsalgarve.shift(1)),
percentageGrow(portugal_df.confirmados_acores, portugal_df.confirmados_acores.shift(1)),
percentageGrow(portugal_df.confirmados_madeira, portugal_df.confirmados_madeira.shift(1)),
percentageGrow(portugal_df.confirmados_estrangeiro, portugal_df.confirmados_estrangeiro.shift(1))],
]
for fig_index, fig in enumerate(line_graphs):
for index, val in enumerate(line_graphs_data[fig_index]):
fig.add_trace(go.Scatter(
x=date,
y=val,
name= line_graphs_name[fig_index][index],
mode='lines+markers',
line=dict(
color=line_graphs_color[fig_index][index],
width=1)
))
#Percentage Graphs
circle_graph_data=[[activos.iloc[-1], recuperados.iloc[-1], obitos.iloc[-1]],
[internados_uci.iloc[-1], number_ventilators],
[portugal_df.confirmados_arsnorte.iloc[-1],
portugal_df.confirmados_arscentro.iloc[-1],
portugal_df.confirmados_arslvt.iloc[-1],
portugal_df.confirmados_arsalentejo.iloc[-1],
portugal_df.confirmados_arsalgarve.iloc[-1],
portugal_df.confirmados_acores.iloc[-1],
portugal_df.confirmados_madeira.iloc[-1],
portugal_df.confirmados_estrangeiro.iloc[-1]],
[internados.iloc[-1],number_beds]
]
circle_graph_name=[['Activos','Recuperados','Obitos'],
['Internados UCI','Ventiladores'],
['Norte',
'Centro',
'Lisboa e V.Tejo',
'Alentejo',
'Algarve',
'Açores',
'Madeira',
'Estrangeiro'],
['Internados', 'numero de camas']
]
circle_graph_color=[['grey', 'black', '#7FFFD4',]]
fig_percentage_dead_rec_act = go.Figure()
fig_percentage_uci = go.Figure()
fig_percentage_zones = go.Figure()
fig_percentage_beds = go.Figure()
circle_graph =[fig_percentage_dead_rec_act,
fig_percentage_uci,
fig_percentage_zones,
fig_percentage_beds,
]
for index, fig in enumerate(circle_graph):
circle_graph[index] = go.Figure(data=[go.Pie(
labels=circle_graph_name[index],
values=circle_graph_data[index],
textinfo='label+percent',
hole = .95,
insidetextorientation='radial',
)])
line_graphs = [fig_cases_pt,
fig_cases_zones,
fig_percentage_timeline_grow_pt,
fig_percentage_timeline_grow_zones
]
#Style
line_y_axis_name=['numero de casos','numero de casos','percentagem','percentagem']
line_title=['numero de casos em portugal','numero de casos em portugal por zona','percentagem de crescimento em portugal','percentagem de crescimento em portugal por zona']
for x, elements in enumerate(line_graphs):
elements.update_layout(
title={
'text': line_title[x],
'y':0.9,
'x':0.5,
'xanchor': 'center',
'yanchor': 'top'},
xaxis_title="",
yaxis_title=line_y_axis_name[x],
plot_bgcolor="#FFFFFF",
font=dict(
family="Courier New, monospace",
size=14,
color="#7f7f7f"
)
)
elements.update_yaxes(showgrid=True, gridwidth=1, gridcolor='LightPink')
circle_title=['percentagem de infectados/ obitos/ recuperados','percentagem de ventiladores livres','percentagem de casos/zona ','percentagem de camas livres']
for x, elements in enumerate(circle_graph):
elements.update_layout(
showlegend=True,
legend=dict(
bgcolor='rgba(0, 0, 0, 0)',
x=1,
y=1),
title={
'text': circle_title[x],
'y':.95,
'x':0.1,
},
plot_bgcolor="#FFFFFF",
font=dict(
family="Courier New, monospace",
size=14,
color="#7f7f7f"
)
)
elements.update_traces(hoverinfo='label+percent', textinfo='value', textfont_size=14,
marker=dict(colors=['black','#7FFFD4','#72e5be','#65cca9','LightGrey','red','#cc0000','#990000'], line=dict(color='#000000', width=.2)))
#pip install dash
import dash
import flask
import dash_html_components as html
import dash_core_components as dcc
server = flask.Flask('app')
app = dash.Dash('app', server=server,
external_stylesheets=['https://stackpath.bootstrapcdn.com/bootstrap/4.3.1/css/bootstrap.min.css'])
app.title = 'Covid19PT'
data = []
data.append(html.H1(
'COVID-19 Portugal',
className='col-sm-12',
style={
'color':'black',
'margin-left': '7vh',
'fontSize': '14',
'font-family':'Courier New, monospace',
}
))
data.append(html.H5(
'numero de casos activos ' + str(activos.iloc[-1]),
className='col-sm-12',
style={
'color':'black',
'margin-left': '7vh',
'fontSize': '14',
'font-family':'Courier New, monospace',
}
))
data.append(html.H5(
'numero de casos confirmados ' + str(confirmados.iloc[-1]),
className='col-sm-12',
style={
'color':'black',
'margin-left': '7vh',
'fontSize': '14',
'font-family':'Courier New, monospace',
}
))
data.append(html.H5(
'numero de obitos ' + str(obitos.iloc[-1]),
className='col-sm-12',
style={
'color':'black',
'margin-left': '7vh',
'fontSize': '14',
'font-family':'Courier New, monospace',
}
))
data.append(html.H5(
'numero de recuperados ' + str(recuperados.iloc[-1]),
className='col-sm-12',
style={
'color':'black',
'margin-left': '7vh',
'fontSize': '14',
'font-family':'Courier New, monospace',
}
))
data.append(html.A(
'doações',
href='https://www.paypal.com/cgi-bin/webscr?cmd=_donations&business=<EMAIL>&lc=US&item_name=Apoio+ao+desenvolvimento+de+grafismos+relacionados+com+covid19&no_note=0&cn=¤cy_code=USD&bn=PP-DonationsBF:btn_donateCC_LG.gif:NonHosted',
target="_blank",
className='col-sm-12',
style={
'color':'red',
'margin-left': '7vh',
'fontSize': '3em',
'font-family':'Courier New, monospace',
}
))
data.append(dcc.Graph(id='example7',figure=fig_map, className='col-sm-12', style={'width': '50vh', 'height': '50vh'}))
data.append(dcc.Graph(id='example2',figure=fig_cases_zones, className='col-sm-12'))
data.append(dcc.Graph(id='example4',figure=fig_percentage_timeline_grow_zones, className='col-sm-12'))
data.append(dcc.Graph(id='example1',figure=fig_cases_pt, className='col-sm-6'))
data.append(dcc.Graph(id='example3',figure=fig_percentage_timeline_grow_pt, className='col-sm-6'))
data.append(dcc.Graph(id='example80',figure=circle_graph[2], className='col-sm-6'))
data.append(dcc.Graph(id='example81',figure=circle_graph[3], className='col-sm-6'))
data.append(dcc.Graph(id='example79',figure=circle_graph[0], className='col-sm-6'))
data.append(dcc.Graph(id='example5',figure=circle_graph[1], className='col-sm-6'))
data.append(html.A(
'+ informação sobre o projecto',
href='https://github.com/fredericopimpao/Covid19PT',
target="_blank",
className='col-sm-12',
style={
'color':'red',
'margin-left': '7vh',
'fontSize': '3em',
'font-family':'Courier New, monospace',
}
))
data.append(html.H5(
'os autores não terão responsabilidade para o licenciado ou outras pessoas por danos diretos, indiretos, especiais, incidentais, conseqüenciais, exemplares ou punitivos de qualquer personagem, incluindo, sem limitação de serviços, procuramentos, procuramentos dados ou lucros ou interrupção de negócios, causados e qualquer teoria de contrato, garantia, atribuição (incluindo negligência), responsabilidade do produto ou de outra forma, decorrendo de qualquer forma de uso do software, mesmo se avisado da possibilidade de tais danos.',
className='col-sm-10',
style={
'color':'grey',
'margin-left': '7vh',
'fontSize': '1em',
'font-family':'Courier New, monospace',
}
))
app.layout = html.Div(data,className='row')
if __name__ == '__main__':
app.run_server(debug=False) | covid19PortugalPT.py | import pandas as pd
import plotly.graph_objects as go
from plotly.subplots import make_subplots
#mapbox_token
mapbox_access_token = open('mapbox_access_token').read()
#get portugal data
portugal_url='https://raw.githubusercontent.com/dssg-pt/covid19pt-data/master/data.csv'
portugal_df=pd.read_csv(portugal_url)
#organize Portugal Data
date = portugal_df.data
date_formated=[]
for x in date:
a=x.replace('/','-')
parts = a.split('-')
mys = parts[2] + '-' + parts[1] + '-' + parts[0]
date_formated.append(mys)
date=date_formated
confirmados = portugal_df.confirmados
recuperados = portugal_df.recuperados
obitos = portugal_df.obitos
activos = confirmados-obitos-recuperados
internados = portugal_df.internados
internados_uci = portugal_df.internados_uci
population_pt = 10196707
number_beds = 35429
number_ventilators = 1392
#calculate Percentage Grow
def percentageGrow(Current,Preview):
percentageGrow = ((Current-Preview)/Preview)*100
return percentageGrow
#Map Portugal
scale=[portugal_df.confirmados_arsnorte.iloc[-1],
portugal_df.confirmados_arscentro.iloc[-1],
portugal_df.confirmados_arslvt.iloc[-1],
portugal_df.confirmados_arsalentejo.iloc[-1],
portugal_df.confirmados_arsalgarve.iloc[-1],
portugal_df.confirmados_acores.iloc[-1],
portugal_df.confirmados_madeira.iloc[-1],
portugal_df.confirmados_estrangeiro.iloc[-1]]
size=[]
for x in enumerate(scale):
if x[1]<200:
size.append(10)
if x[1]>200 and x[1]<400:
size.append(11)
if x[1]>400 and x[1]<600:
size.append(12)
if x[1]>600 and x[1]<800:
size.append(13)
if x[1]>600 and x[1]<800:
size.append(13)
if x[1]>800 and x[1]<1000:
size.append(14)
if x[1]>1000 and x[1]<1200:
size.append(15)
if x[1]>1200 and x[1]<1400:
size.append(16)
if x[1]>1400 and x[1]<1600:
size.append(17)
if x[1]>1600 and x[1]<1800:
size.append(18)
if x[1]>1800 and x[1]<2000:
size.append(19)
fig_map = go.Figure(go.Scattermapbox(
lat=['41.1567','40.2033','38.7059',
'38.5586','37.189','37.794594',
'32.3716'],
lon=['-8.6239','-8.4103','-9.1443',
'-7.9084','-8.4412','-25.506134',
'-16.2749'],
hovertext=['Norte '+ str(portugal_df.confirmados_arsnorte.iloc[-1])+' Casos',
'Centro'+ str(portugal_df.confirmados_arscentro.iloc[-1])+' Casos',
'Lisboa e V.Tejo'+ str(portugal_df.confirmados_arslvt.iloc[-1])+' Casos',
'Alentejo'+ str(portugal_df.confirmados_arsalentejo.iloc[-1])+' Casos',
'Algarve'+ str(portugal_df.confirmados_arsalgarve.iloc[-1])+' Casos',
'Açores'+ str(portugal_df.confirmados_acores.iloc[-1])+' Casos',
'Madeira'+ str(portugal_df.confirmados_madeira.iloc[-1])+' Casos',],
mode='markers',
marker=go.scattermapbox.Marker(
size=size,
color='red'
),
text=[portugal_df.confirmados_arsnorte.iloc[-1],
portugal_df.confirmados_arscentro.iloc[-1],
portugal_df.confirmados_arslvt.iloc[-1],
portugal_df.confirmados_arsalentejo.iloc[-1],
portugal_df.confirmados_arsalgarve.iloc[-1],
portugal_df.confirmados_acores.iloc[-1],
portugal_df.confirmados_madeira.iloc[-1],
portugal_df.confirmados_estrangeiro.iloc[-1]],
))
maps=[fig_map]
for elements in maps:
elements.update_layout(
autosize=True,
mapbox=dict(
accesstoken=mapbox_access_token,
style = 'mapbox://styles/fredericopimpao/ck84zqv9l0esk1jqm4xuxmldn',
bearing=0,
center=dict(
lat=38.7059,
lon=-9.1443,
),
pitch=0,
zoom=4,
),
)
############################################################################ Graphs
fig_cases_pt = go.Figure()
fig_cases_zones = go.Figure()
fig_percentage_timeline_grow_pt = go.Figure()
fig_percentage_timeline_grow_zones = go.Figure()
line_graphs = [fig_cases_pt,
fig_cases_zones,
fig_percentage_timeline_grow_pt,
fig_percentage_timeline_grow_zones
]
# Line Graph
line_graphs_name = [['confirmados','casos activos','obitos','recuperados'],
['Norte',
'Centro',
'Lisboa e V.Tejo',
'Alentejo',
'Algarve',
'Açores',
'Madeira',
'estrangeiro'],
['Crescimento'],
['Norte',
'Centro',
'Lisboa e V.Tejo',
'Alentejo',
'Algarve',
'Açores',
'Madeira',
'estrangeiro'],
]
line_graphs_color=[['grey',
'black',
'red','#7FFFD4'],
['grey',
'black',
'#7FFFD4',
'#72e5be',
'#65cca9',
'red',
'#cc0000',
'#990000',],
['grey'],
['grey',
'black',
'#7FFFD4',
'#72e5be',
'#65cca9',
'red',
'#cc0000',
'#990000',],
]
line_graphs_data=[[confirmados,
activos,
obitos,
recuperados],
[portugal_df.confirmados_arsnorte,
portugal_df.confirmados_arscentro,
portugal_df.confirmados_arslvt,
portugal_df.confirmados_arsalentejo,
portugal_df.confirmados_arsalgarve,
portugal_df.confirmados_acores,
portugal_df.confirmados_madeira,
portugal_df.confirmados_estrangeiro],
[percentageGrow(confirmados,confirmados.shift(1))],
[percentageGrow(portugal_df.confirmados_arsnorte, portugal_df.confirmados_arsnorte.shift(1)),
percentageGrow(portugal_df.confirmados_arscentro, portugal_df.confirmados_arscentro.shift(1)),
percentageGrow(portugal_df.confirmados_arslvt, portugal_df.confirmados_arslvt.shift(1)),
percentageGrow(portugal_df.confirmados_arsalentejo, portugal_df.confirmados_arsalentejo.shift(1)),
percentageGrow(portugal_df.confirmados_arsalgarve, portugal_df.confirmados_arsalgarve.shift(1)),
percentageGrow(portugal_df.confirmados_acores, portugal_df.confirmados_acores.shift(1)),
percentageGrow(portugal_df.confirmados_madeira, portugal_df.confirmados_madeira.shift(1)),
percentageGrow(portugal_df.confirmados_estrangeiro, portugal_df.confirmados_estrangeiro.shift(1))],
]
for fig_index, fig in enumerate(line_graphs):
for index, val in enumerate(line_graphs_data[fig_index]):
fig.add_trace(go.Scatter(
x=date,
y=val,
name= line_graphs_name[fig_index][index],
mode='lines+markers',
line=dict(
color=line_graphs_color[fig_index][index],
width=1)
))
#Percentage Graphs
circle_graph_data=[[activos.iloc[-1], recuperados.iloc[-1], obitos.iloc[-1]],
[internados_uci.iloc[-1], number_ventilators],
[portugal_df.confirmados_arsnorte.iloc[-1],
portugal_df.confirmados_arscentro.iloc[-1],
portugal_df.confirmados_arslvt.iloc[-1],
portugal_df.confirmados_arsalentejo.iloc[-1],
portugal_df.confirmados_arsalgarve.iloc[-1],
portugal_df.confirmados_acores.iloc[-1],
portugal_df.confirmados_madeira.iloc[-1],
portugal_df.confirmados_estrangeiro.iloc[-1]],
[internados.iloc[-1],number_beds]
]
circle_graph_name=[['Activos','Recuperados','Obitos'],
['Internados UCI','Ventiladores'],
['Norte',
'Centro',
'Lisboa e V.Tejo',
'Alentejo',
'Algarve',
'Açores',
'Madeira',
'Estrangeiro'],
['Internados', 'numero de camas']
]
circle_graph_color=[['grey', 'black', '#7FFFD4',]]
fig_percentage_dead_rec_act = go.Figure()
fig_percentage_uci = go.Figure()
fig_percentage_zones = go.Figure()
fig_percentage_beds = go.Figure()
circle_graph =[fig_percentage_dead_rec_act,
fig_percentage_uci,
fig_percentage_zones,
fig_percentage_beds,
]
for index, fig in enumerate(circle_graph):
circle_graph[index] = go.Figure(data=[go.Pie(
labels=circle_graph_name[index],
values=circle_graph_data[index],
textinfo='label+percent',
hole = .95,
insidetextorientation='radial',
)])
line_graphs = [fig_cases_pt,
fig_cases_zones,
fig_percentage_timeline_grow_pt,
fig_percentage_timeline_grow_zones
]
#Style
line_y_axis_name=['numero de casos','numero de casos','percentagem','percentagem']
line_title=['numero de casos em portugal','numero de casos em portugal por zona','percentagem de crescimento em portugal','percentagem de crescimento em portugal por zona']
for x, elements in enumerate(line_graphs):
elements.update_layout(
title={
'text': line_title[x],
'y':0.9,
'x':0.5,
'xanchor': 'center',
'yanchor': 'top'},
xaxis_title="",
yaxis_title=line_y_axis_name[x],
plot_bgcolor="#FFFFFF",
font=dict(
family="Courier New, monospace",
size=14,
color="#7f7f7f"
)
)
elements.update_yaxes(showgrid=True, gridwidth=1, gridcolor='LightPink')
circle_title=['percentagem de infectados/ obitos/ recuperados','percentagem de ventiladores livres','percentagem de casos/zona ','percentagem de camas livres']
for x, elements in enumerate(circle_graph):
elements.update_layout(
showlegend=True,
legend=dict(
bgcolor='rgba(0, 0, 0, 0)',
x=1,
y=1),
title={
'text': circle_title[x],
'y':.95,
'x':0.1,
},
plot_bgcolor="#FFFFFF",
font=dict(
family="Courier New, monospace",
size=14,
color="#7f7f7f"
)
)
elements.update_traces(hoverinfo='label+percent', textinfo='value', textfont_size=14,
marker=dict(colors=['black','#7FFFD4','#72e5be','#65cca9','LightGrey','red','#cc0000','#990000'], line=dict(color='#000000', width=.2)))
#pip install dash
import dash
import flask
import dash_html_components as html
import dash_core_components as dcc
server = flask.Flask('app')
app = dash.Dash('app', server=server,
external_stylesheets=['https://stackpath.bootstrapcdn.com/bootstrap/4.3.1/css/bootstrap.min.css'])
app.title = 'Covid19PT'
data = []
data.append(html.H1(
'COVID-19 Portugal',
className='col-sm-12',
style={
'color':'black',
'margin-left': '7vh',
'fontSize': '14',
'font-family':'Courier New, monospace',
}
))
data.append(html.H5(
'numero de casos activos ' + str(activos.iloc[-1]),
className='col-sm-12',
style={
'color':'black',
'margin-left': '7vh',
'fontSize': '14',
'font-family':'Courier New, monospace',
}
))
data.append(html.H5(
'numero de casos confirmados ' + str(confirmados.iloc[-1]),
className='col-sm-12',
style={
'color':'black',
'margin-left': '7vh',
'fontSize': '14',
'font-family':'Courier New, monospace',
}
))
data.append(html.H5(
'numero de obitos ' + str(obitos.iloc[-1]),
className='col-sm-12',
style={
'color':'black',
'margin-left': '7vh',
'fontSize': '14',
'font-family':'Courier New, monospace',
}
))
data.append(html.H5(
'numero de recuperados ' + str(recuperados.iloc[-1]),
className='col-sm-12',
style={
'color':'black',
'margin-left': '7vh',
'fontSize': '14',
'font-family':'Courier New, monospace',
}
))
data.append(html.A(
'doações',
href='https://www.paypal.com/cgi-bin/webscr?cmd=_donations&business=<EMAIL>&lc=US&item_name=Apoio+ao+desenvolvimento+de+grafismos+relacionados+com+covid19&no_note=0&cn=¤cy_code=USD&bn=PP-DonationsBF:btn_donateCC_LG.gif:NonHosted',
target="_blank",
className='col-sm-12',
style={
'color':'red',
'margin-left': '7vh',
'fontSize': '3em',
'font-family':'Courier New, monospace',
}
))
data.append(dcc.Graph(id='example7',figure=fig_map, className='col-sm-12', style={'width': '50vh', 'height': '50vh'}))
data.append(dcc.Graph(id='example2',figure=fig_cases_zones, className='col-sm-12'))
data.append(dcc.Graph(id='example4',figure=fig_percentage_timeline_grow_zones, className='col-sm-12'))
data.append(dcc.Graph(id='example1',figure=fig_cases_pt, className='col-sm-6'))
data.append(dcc.Graph(id='example3',figure=fig_percentage_timeline_grow_pt, className='col-sm-6'))
data.append(dcc.Graph(id='example80',figure=circle_graph[2], className='col-sm-6'))
data.append(dcc.Graph(id='example81',figure=circle_graph[3], className='col-sm-6'))
data.append(dcc.Graph(id='example79',figure=circle_graph[0], className='col-sm-6'))
data.append(dcc.Graph(id='example5',figure=circle_graph[1], className='col-sm-6'))
data.append(html.A(
'+ informação sobre o projecto',
href='https://github.com/fredericopimpao/Covid19PT',
target="_blank",
className='col-sm-12',
style={
'color':'red',
'margin-left': '7vh',
'fontSize': '3em',
'font-family':'Courier New, monospace',
}
))
data.append(html.H5(
'os autores não terão responsabilidade para o licenciado ou outras pessoas por danos diretos, indiretos, especiais, incidentais, conseqüenciais, exemplares ou punitivos de qualquer personagem, incluindo, sem limitação de serviços, procuramentos, procuramentos dados ou lucros ou interrupção de negócios, causados e qualquer teoria de contrato, garantia, atribuição (incluindo negligência), responsabilidade do produto ou de outra forma, decorrendo de qualquer forma de uso do software, mesmo se avisado da possibilidade de tais danos.',
className='col-sm-10',
style={
'color':'grey',
'margin-left': '7vh',
'fontSize': '1em',
'font-family':'Courier New, monospace',
}
))
app.layout = html.Div(data,className='row')
if __name__ == '__main__':
app.run_server(debug=False) | 0.270673 | 0.154727 |
import argparse
from collections import defaultdict
from os.path import splitext, dirname
from g2p_en import G2p
import pandas as pd
import time
from utils import *
def get_parser():
parser = argparse.ArgumentParser(formatter_class = argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('--libripath', type=str, default='/home/hkshin/server_hdd/Database/LibriSpeech_clean_wav/', help='Folder for LibriSpeech ASR corpus with wav files')
parser.add_argument('--newpath', type=str, default='/home/hkshin/server_hdd/Database/LibriPhrase_diffspk_all/', help='Folder to save generated LibriPhrase wav files')
parser.add_argument('--wordalign', type=str, default='./data/librispeech_other_train_500h_all_utt.csv', help='word alignment file (csv format)')
parser.add_argument('--output', type=str, default='./data/libriphrase_diffspk_all.csv', help='output filename (csv format)')
parser.add_argument('--numpair', type=int, default=3)
parser.add_argument('--maxspk', type=int, default=1166, help='the maximum number of speakers (default: 1166)')
parser.add_argument('--maxword', type=int, default=4, help='the maximum number of words (default: 4)')
parser.add_argument('--mode', type=str, default='diffspk_all', choices=['samespk_easy', 'diffspk_easy', 'diffspk_hard', 'diffspk_all'])
return parser
def main(args):
start_time = time.clock()
rootpath = args.libripath
rootpath_new = args.newpath
word_alignment = args.wordalign
output_filename = args.output
# set parameters
spk_k = args.maxspk
max_num_words = args.maxword
num_anchor = args.numpair
num_pos = args.numpair
num_neg = args.numpair
mode = args.mode
print('----------------------------------------------------')
print(' Step 1 : Extract short phrase from LibriSpeech')
print('----------------------------------------------------')
df = extract_short_phrase_from_csv(word_alignment)
print('----------------------------------------------------')
print(' Step 2 : Make speaker dictionary from LibriSpeech')
print('----------------------------------------------------')
spk_dic = make_k_spk_dict(df, spk_k)
g2p = G2p()
print('----------------------------------------------------')
print(' Step 3 : Extract anchor candidates')
print('----------------------------------------------------')
anchor_word_dic, anchor_lst = extract_anchor(df, spk_dic, num_anchor, num_pos)
print('----------------------------------------------------')
print(' Step 4 : Extract positive and negative samples')
print('----------------------------------------------------')
for i in range(1, max_num_words + 1):
# extract 'df' and 'word_lst' which are only included word_class
print('Step 4-1 : Extract df word class {}'.format(i))
df_word_class = extract_df_word_class(df, i)
word = [row['text'] for idx, row in df_word_class.iterrows()]
word_lst = list(set(word))
# extract 'df_dic_key'
df_dic_key = defaultdict(list)
for idx, row in df_word_class.iterrows():
df_dic_key[(dirname(row['audio_filename']).split('/')[-2], row['text'])].append(row)
# make positive
print('Step 4-2 : Start making positive')
df_result_pos = make_positive(anchor_word_dic, df_dic_key, num_anchor, num_pos, mode, word_class=i)
# make negative
print('Step 4-3 : Start making negative')
total_word_dic = extract_total_word(df, word_lst, g2p)
if mode in ['diffspk_hard', 'diffspk_all']:
hard_neg_dic = make_hard_negative(anchor_word_dic, total_word_dic, num_neg, word_class=i)
else:
hard_neg_dic = None
df_result_neg = make_negative(hard_neg_dic, df_dic_key, df_result_pos, num_neg, mode, word_class=i)
print('# of positive case rows = ', len(df_result_pos))
print('# of negative case rows = ', len(df_result_neg))
# merge and save 'df_result_pos' and 'df_result_neg'
total_df = pd.concat([df_result_pos, df_result_neg], ignore_index=True)
total_df = total_df.sort_values(by=['anchor_spk', 'anchor_text', 'target', 'type', 'comparison_spk'], ascending=[True, True, True, True, True])
total_df = total_df.reset_index(drop=True)
print('Step 4-4 : Start exporting wav files')
total_df = save_wav(total_df, rootpath, rootpath_new, str(i) + 'word')
total_df = total_df.sort_values(by=['anchor_spk', 'anchor_text', 'target', 'type', 'comparison_spk'], ascending=[True, True, True, True, True])
total_df = total_df.reset_index(drop=True)
print('Step 4-5 : Save csv file')
total_df.to_csv(splitext(output_filename)[0] + '_' + str(i) + 'word' + splitext(output_filename)[1], index=False)
print('* Finish {} word class'.format(i))
print('* Time check: ', (time.clock() - start_time) / (60 * 60), 'hours')
if __name__ == '__main__':
args = get_parser().parse_args()
main(args) | libriphrase.py | import argparse
from collections import defaultdict
from os.path import splitext, dirname
from g2p_en import G2p
import pandas as pd
import time
from utils import *
def get_parser():
parser = argparse.ArgumentParser(formatter_class = argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('--libripath', type=str, default='/home/hkshin/server_hdd/Database/LibriSpeech_clean_wav/', help='Folder for LibriSpeech ASR corpus with wav files')
parser.add_argument('--newpath', type=str, default='/home/hkshin/server_hdd/Database/LibriPhrase_diffspk_all/', help='Folder to save generated LibriPhrase wav files')
parser.add_argument('--wordalign', type=str, default='./data/librispeech_other_train_500h_all_utt.csv', help='word alignment file (csv format)')
parser.add_argument('--output', type=str, default='./data/libriphrase_diffspk_all.csv', help='output filename (csv format)')
parser.add_argument('--numpair', type=int, default=3)
parser.add_argument('--maxspk', type=int, default=1166, help='the maximum number of speakers (default: 1166)')
parser.add_argument('--maxword', type=int, default=4, help='the maximum number of words (default: 4)')
parser.add_argument('--mode', type=str, default='diffspk_all', choices=['samespk_easy', 'diffspk_easy', 'diffspk_hard', 'diffspk_all'])
return parser
def main(args):
start_time = time.clock()
rootpath = args.libripath
rootpath_new = args.newpath
word_alignment = args.wordalign
output_filename = args.output
# set parameters
spk_k = args.maxspk
max_num_words = args.maxword
num_anchor = args.numpair
num_pos = args.numpair
num_neg = args.numpair
mode = args.mode
print('----------------------------------------------------')
print(' Step 1 : Extract short phrase from LibriSpeech')
print('----------------------------------------------------')
df = extract_short_phrase_from_csv(word_alignment)
print('----------------------------------------------------')
print(' Step 2 : Make speaker dictionary from LibriSpeech')
print('----------------------------------------------------')
spk_dic = make_k_spk_dict(df, spk_k)
g2p = G2p()
print('----------------------------------------------------')
print(' Step 3 : Extract anchor candidates')
print('----------------------------------------------------')
anchor_word_dic, anchor_lst = extract_anchor(df, spk_dic, num_anchor, num_pos)
print('----------------------------------------------------')
print(' Step 4 : Extract positive and negative samples')
print('----------------------------------------------------')
for i in range(1, max_num_words + 1):
# extract 'df' and 'word_lst' which are only included word_class
print('Step 4-1 : Extract df word class {}'.format(i))
df_word_class = extract_df_word_class(df, i)
word = [row['text'] for idx, row in df_word_class.iterrows()]
word_lst = list(set(word))
# extract 'df_dic_key'
df_dic_key = defaultdict(list)
for idx, row in df_word_class.iterrows():
df_dic_key[(dirname(row['audio_filename']).split('/')[-2], row['text'])].append(row)
# make positive
print('Step 4-2 : Start making positive')
df_result_pos = make_positive(anchor_word_dic, df_dic_key, num_anchor, num_pos, mode, word_class=i)
# make negative
print('Step 4-3 : Start making negative')
total_word_dic = extract_total_word(df, word_lst, g2p)
if mode in ['diffspk_hard', 'diffspk_all']:
hard_neg_dic = make_hard_negative(anchor_word_dic, total_word_dic, num_neg, word_class=i)
else:
hard_neg_dic = None
df_result_neg = make_negative(hard_neg_dic, df_dic_key, df_result_pos, num_neg, mode, word_class=i)
print('# of positive case rows = ', len(df_result_pos))
print('# of negative case rows = ', len(df_result_neg))
# merge and save 'df_result_pos' and 'df_result_neg'
total_df = pd.concat([df_result_pos, df_result_neg], ignore_index=True)
total_df = total_df.sort_values(by=['anchor_spk', 'anchor_text', 'target', 'type', 'comparison_spk'], ascending=[True, True, True, True, True])
total_df = total_df.reset_index(drop=True)
print('Step 4-4 : Start exporting wav files')
total_df = save_wav(total_df, rootpath, rootpath_new, str(i) + 'word')
total_df = total_df.sort_values(by=['anchor_spk', 'anchor_text', 'target', 'type', 'comparison_spk'], ascending=[True, True, True, True, True])
total_df = total_df.reset_index(drop=True)
print('Step 4-5 : Save csv file')
total_df.to_csv(splitext(output_filename)[0] + '_' + str(i) + 'word' + splitext(output_filename)[1], index=False)
print('* Finish {} word class'.format(i))
print('* Time check: ', (time.clock() - start_time) / (60 * 60), 'hours')
if __name__ == '__main__':
args = get_parser().parse_args()
main(args) | 0.164953 | 0.116111 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import math
import functools
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.nn.init as init
import torch.optim as optim
import common.images
import common.metrics
import models
def update_argparser(parser):
models.update_argparser(parser)
args, _ = parser.parse_known_args()
parser.add_argument(
'--n_resblocks',
help='Number of residual blocks in networks.',
default=32,
type=int)
parser.add_argument(
'--n_feats',
help='Channel width.',
default=256,
type=int)
parser.add_argument(
'--res_scale',
help='rescale residual connections',
default=0.1,
type=float)
parser.add_argument(
'--rgb_range',
help='rgb value range',
default=1,
type=int)
parser.add_argument(
'--n_colors',
help='rgb channel',
default=3,
type=int)
if args.dataset.startswith('div2k'):
parser.set_defaults(
train_epochs=32,
learning_rate_milestones=(8,16,24),
learning_rate_decay=0.5,
save_checkpoints_epochs=1,
lr_patch_size=48,
train_temporal_size=1,
eval_temporal_size=1,
)
else:
raise NotImplementedError('Needs to tune hyper parameters for new dataset.')
def get_model_spec(params):
model = MODEL(params)
print('# of parameters: ', sum([p.numel() for p in model.parameters()]))
optimizer = optim.Adam(model.parameters(), params.learning_rate)
lr_scheduler = optim.lr_scheduler.MultiStepLR(optimizer,
params.learning_rate_milestones,
params.learning_rate_decay)
loss_fn = torch.nn.L1Loss()
metrics = {
'loss':
loss_fn,
'PSNR':
functools.partial(
common.metrics.psnr,
shave=0 if params.scale == 1 else params.scale + 6),
'PSNR_Y':
functools.partial(
common.metrics.psnr_y,
shave=0 if params.scale == 1 else params.scale),
}
return model, loss_fn, optimizer, lr_scheduler, metrics
def make_model(args, parent=False):
if args.dilation:
from model import dilated
return PANET(args, dilated.dilated_conv)
else:
return PANET(args)
def default_conv(in_channels, out_channels, kernel_size,stride=1, bias=True):
return nn.Conv2d(
in_channels, out_channels, kernel_size,
padding=(kernel_size//2),stride=stride, bias=bias)
class MODEL(nn.Module):
def __init__(self, args, conv= default_conv):
super(MODEL, self).__init__()
n_resblock = args.n_resblocks
n_feats = args.n_feats
kernel_size = 3
scale = args.scale
act = nn.ReLU(True)
rgb_mean = (0.4488, 0.4371, 0.4040)
rgb_std = (1.0, 1.0, 1.0)
self.sub_mean = MeanShift(args.rgb_range, rgb_mean, rgb_std)
self.msa = PyramidAttention(channel=n_feats, reduction=8,res_scale=args.res_scale);
# define head module
m_head = [conv(args.n_colors, n_feats, kernel_size)]
# define body module
m_body = [
ResBlock(
conv, n_feats, kernel_size, act=act, res_scale=args.res_scale
) for _ in range(n_resblock//2)
]
m_body.append(self.msa)
for _ in range(n_resblock//2):
m_body.append(ResBlock(
conv, n_feats, kernel_size, act=act, res_scale=args.res_scale
))
m_body.append(conv(n_feats, n_feats, kernel_size))
# define tail module
m_tail = [
Upsampler(conv, scale, n_feats, act=False),
nn.Conv2d(
n_feats, args.n_colors, kernel_size,
padding=(kernel_size//2)
)
]
self.add_mean = MeanShift(args.rgb_range, rgb_mean, rgb_std, 1)
self.head = nn.Sequential(*m_head)
self.body = nn.Sequential(*m_body)
self.tail = nn.Sequential(*m_tail)
def forward(self, x):
x = self.sub_mean(x)
x = self.head(x)
res = self.body(x)
res += x
x = self.tail(res)
x = self.add_mean(x)
return x
def load_state_dict(self, state_dict, strict=True):
own_state = self.state_dict()
for name, param in state_dict.items():
if name in own_state:
if isinstance(param, nn.Parameter):
param = param.data
try:
own_state[name].copy_(param)
except Exception:
if name.find('tail') == -1:
raise RuntimeError('While copying the parameter named {}, '
'whose dimensions in the model are {} and '
'whose dimensions in the checkpoint are {}.'
.format(name, own_state[name].size(), param.size()))
elif strict:
if name.find('tail') == -1:
raise KeyError('unexpected key "{}" in state_dict'
.format(name))
class PyramidAttention(nn.Module):
def __init__(self, level=5, res_scale=1, channel=64, reduction=2, ksize=3, stride=1, softmax_scale=10, average=True, conv=default_conv):
super(PyramidAttention, self).__init__()
self.ksize = ksize
self.stride = stride
self.res_scale = res_scale
self.softmax_scale = softmax_scale
self.scale = [1-i/10 for i in range(level)]
self.reduction = reduction
self.average = average
escape_NaN = torch.FloatTensor([1e-4])
self.register_buffer('escape_NaN', escape_NaN)
self.conv_match_L_base = BasicBlock(conv,channel,channel//reduction, 3, bn=False, act=None)
self.conv_match = BasicBlock(conv,channel, channel//reduction, 3, bn=False, act=None)
self.conv_assembly = BasicBlock(conv,channel, channel,1,bn=False, act=None)
def forward(self, input):
res = input
N,C,H,W = input.shape
#theta
match_base = self.conv_match_L_base(input)
# patch size for matching
# raw_w is for reconstruction
raw_w = []
# w is for matching
w = []
#build feature pyramid
for i in range(len(self.scale)):
ref = input
if self.scale[i]!=1:
ref = F.interpolate(input, scale_factor=self.scale[i], mode='bicubic')
#feature transformation function f
base = self.conv_assembly(ref)
base = torch.reshape(base,[N,C,-1])
raw_w.append(base)
#feature transformation function g
ref_i = self.conv_match(ref)
ref_i = torch.reshape(ref_i,[N,C//self.reduction,-1])
w.append(ref_i)
match_pyramid = torch.cat(w,dim=-1)
match_raw = torch.cat(raw_w,dim=-1).permute(0,2,1)
match_base = torch.reshape(match_base,[N,C//self.reduction,-1]).permute(0,2,1)
score = F.softmax(torch.matmul(match_base,match_pyramid),dim=-1)
y = torch.matmul(score,match_raw)
y = torch.reshape(y,[N,C,H,W])
y = y*self.res_scale+res # back to the mini-batch
return y
class MeanShift(nn.Conv2d):
def __init__(
self, rgb_range,
rgb_mean=(0.4488, 0.4371, 0.4040), rgb_std=(1.0, 1.0, 1.0), sign=-1):
super(MeanShift, self).__init__(3, 3, kernel_size=1)
std = torch.Tensor(rgb_std)
self.weight.data = torch.eye(3).view(3, 3, 1, 1) / std.view(3, 1, 1, 1)
self.bias.data = sign * rgb_range * torch.Tensor(rgb_mean) / std
for p in self.parameters():
p.requires_grad = False
class BasicBlock(nn.Sequential):
def __init__(
self, conv, in_channels, out_channels, kernel_size, stride=1, bias=True,
bn=False, act=nn.PReLU()):
m = [conv(in_channels, out_channels, kernel_size, bias=bias)]
if bn:
m.append(nn.BatchNorm2d(out_channels))
if act is not None:
m.append(act)
super(BasicBlock, self).__init__(*m)
class ResBlock(nn.Module):
def __init__(
self, conv, n_feats, kernel_size,
bias=True, bn=False, act=nn.PReLU(), res_scale=1):
super(ResBlock, self).__init__()
m = []
for i in range(2):
m.append(conv(n_feats, n_feats, kernel_size, bias=bias))
if bn:
m.append(nn.BatchNorm2d(n_feats))
if i == 0:
m.append(act)
self.body = nn.Sequential(*m)
self.res_scale = res_scale
def forward(self, x):
res = self.body(x).mul(self.res_scale)
res += x
return res
class Upsampler(nn.Sequential):
def __init__(self, conv, scale, n_feats, bn=False, act=False, bias=True):
m = []
if (scale & (scale - 1)) == 0: # Is scale = 2^n?
for _ in range(int(math.log(scale, 2))):
m.append(conv(n_feats, 4 * n_feats, 3, bias))
m.append(nn.PixelShuffle(2))
if bn:
m.append(nn.BatchNorm2d(n_feats))
if act == 'relu':
m.append(nn.ReLU(True))
elif act == 'prelu':
m.append(nn.PReLU(n_feats))
elif scale == 3:
m.append(conv(n_feats, 9 * n_feats, 3, bias))
m.append(nn.PixelShuffle(3))
if bn:
m.append(nn.BatchNorm2d(n_feats))
if act == 'relu':
m.append(nn.ReLU(True))
elif act == 'prelu':
m.append(nn.PReLU(n_feats))
else:
raise NotImplementedError
super(Upsampler, self).__init__(*m) | models/panet.py | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import math
import functools
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.nn.init as init
import torch.optim as optim
import common.images
import common.metrics
import models
def update_argparser(parser):
models.update_argparser(parser)
args, _ = parser.parse_known_args()
parser.add_argument(
'--n_resblocks',
help='Number of residual blocks in networks.',
default=32,
type=int)
parser.add_argument(
'--n_feats',
help='Channel width.',
default=256,
type=int)
parser.add_argument(
'--res_scale',
help='rescale residual connections',
default=0.1,
type=float)
parser.add_argument(
'--rgb_range',
help='rgb value range',
default=1,
type=int)
parser.add_argument(
'--n_colors',
help='rgb channel',
default=3,
type=int)
if args.dataset.startswith('div2k'):
parser.set_defaults(
train_epochs=32,
learning_rate_milestones=(8,16,24),
learning_rate_decay=0.5,
save_checkpoints_epochs=1,
lr_patch_size=48,
train_temporal_size=1,
eval_temporal_size=1,
)
else:
raise NotImplementedError('Needs to tune hyper parameters for new dataset.')
def get_model_spec(params):
model = MODEL(params)
print('# of parameters: ', sum([p.numel() for p in model.parameters()]))
optimizer = optim.Adam(model.parameters(), params.learning_rate)
lr_scheduler = optim.lr_scheduler.MultiStepLR(optimizer,
params.learning_rate_milestones,
params.learning_rate_decay)
loss_fn = torch.nn.L1Loss()
metrics = {
'loss':
loss_fn,
'PSNR':
functools.partial(
common.metrics.psnr,
shave=0 if params.scale == 1 else params.scale + 6),
'PSNR_Y':
functools.partial(
common.metrics.psnr_y,
shave=0 if params.scale == 1 else params.scale),
}
return model, loss_fn, optimizer, lr_scheduler, metrics
def make_model(args, parent=False):
if args.dilation:
from model import dilated
return PANET(args, dilated.dilated_conv)
else:
return PANET(args)
def default_conv(in_channels, out_channels, kernel_size,stride=1, bias=True):
return nn.Conv2d(
in_channels, out_channels, kernel_size,
padding=(kernel_size//2),stride=stride, bias=bias)
class MODEL(nn.Module):
def __init__(self, args, conv= default_conv):
super(MODEL, self).__init__()
n_resblock = args.n_resblocks
n_feats = args.n_feats
kernel_size = 3
scale = args.scale
act = nn.ReLU(True)
rgb_mean = (0.4488, 0.4371, 0.4040)
rgb_std = (1.0, 1.0, 1.0)
self.sub_mean = MeanShift(args.rgb_range, rgb_mean, rgb_std)
self.msa = PyramidAttention(channel=n_feats, reduction=8,res_scale=args.res_scale);
# define head module
m_head = [conv(args.n_colors, n_feats, kernel_size)]
# define body module
m_body = [
ResBlock(
conv, n_feats, kernel_size, act=act, res_scale=args.res_scale
) for _ in range(n_resblock//2)
]
m_body.append(self.msa)
for _ in range(n_resblock//2):
m_body.append(ResBlock(
conv, n_feats, kernel_size, act=act, res_scale=args.res_scale
))
m_body.append(conv(n_feats, n_feats, kernel_size))
# define tail module
m_tail = [
Upsampler(conv, scale, n_feats, act=False),
nn.Conv2d(
n_feats, args.n_colors, kernel_size,
padding=(kernel_size//2)
)
]
self.add_mean = MeanShift(args.rgb_range, rgb_mean, rgb_std, 1)
self.head = nn.Sequential(*m_head)
self.body = nn.Sequential(*m_body)
self.tail = nn.Sequential(*m_tail)
def forward(self, x):
x = self.sub_mean(x)
x = self.head(x)
res = self.body(x)
res += x
x = self.tail(res)
x = self.add_mean(x)
return x
def load_state_dict(self, state_dict, strict=True):
own_state = self.state_dict()
for name, param in state_dict.items():
if name in own_state:
if isinstance(param, nn.Parameter):
param = param.data
try:
own_state[name].copy_(param)
except Exception:
if name.find('tail') == -1:
raise RuntimeError('While copying the parameter named {}, '
'whose dimensions in the model are {} and '
'whose dimensions in the checkpoint are {}.'
.format(name, own_state[name].size(), param.size()))
elif strict:
if name.find('tail') == -1:
raise KeyError('unexpected key "{}" in state_dict'
.format(name))
class PyramidAttention(nn.Module):
def __init__(self, level=5, res_scale=1, channel=64, reduction=2, ksize=3, stride=1, softmax_scale=10, average=True, conv=default_conv):
super(PyramidAttention, self).__init__()
self.ksize = ksize
self.stride = stride
self.res_scale = res_scale
self.softmax_scale = softmax_scale
self.scale = [1-i/10 for i in range(level)]
self.reduction = reduction
self.average = average
escape_NaN = torch.FloatTensor([1e-4])
self.register_buffer('escape_NaN', escape_NaN)
self.conv_match_L_base = BasicBlock(conv,channel,channel//reduction, 3, bn=False, act=None)
self.conv_match = BasicBlock(conv,channel, channel//reduction, 3, bn=False, act=None)
self.conv_assembly = BasicBlock(conv,channel, channel,1,bn=False, act=None)
def forward(self, input):
res = input
N,C,H,W = input.shape
#theta
match_base = self.conv_match_L_base(input)
# patch size for matching
# raw_w is for reconstruction
raw_w = []
# w is for matching
w = []
#build feature pyramid
for i in range(len(self.scale)):
ref = input
if self.scale[i]!=1:
ref = F.interpolate(input, scale_factor=self.scale[i], mode='bicubic')
#feature transformation function f
base = self.conv_assembly(ref)
base = torch.reshape(base,[N,C,-1])
raw_w.append(base)
#feature transformation function g
ref_i = self.conv_match(ref)
ref_i = torch.reshape(ref_i,[N,C//self.reduction,-1])
w.append(ref_i)
match_pyramid = torch.cat(w,dim=-1)
match_raw = torch.cat(raw_w,dim=-1).permute(0,2,1)
match_base = torch.reshape(match_base,[N,C//self.reduction,-1]).permute(0,2,1)
score = F.softmax(torch.matmul(match_base,match_pyramid),dim=-1)
y = torch.matmul(score,match_raw)
y = torch.reshape(y,[N,C,H,W])
y = y*self.res_scale+res # back to the mini-batch
return y
class MeanShift(nn.Conv2d):
def __init__(
self, rgb_range,
rgb_mean=(0.4488, 0.4371, 0.4040), rgb_std=(1.0, 1.0, 1.0), sign=-1):
super(MeanShift, self).__init__(3, 3, kernel_size=1)
std = torch.Tensor(rgb_std)
self.weight.data = torch.eye(3).view(3, 3, 1, 1) / std.view(3, 1, 1, 1)
self.bias.data = sign * rgb_range * torch.Tensor(rgb_mean) / std
for p in self.parameters():
p.requires_grad = False
class BasicBlock(nn.Sequential):
def __init__(
self, conv, in_channels, out_channels, kernel_size, stride=1, bias=True,
bn=False, act=nn.PReLU()):
m = [conv(in_channels, out_channels, kernel_size, bias=bias)]
if bn:
m.append(nn.BatchNorm2d(out_channels))
if act is not None:
m.append(act)
super(BasicBlock, self).__init__(*m)
class ResBlock(nn.Module):
def __init__(
self, conv, n_feats, kernel_size,
bias=True, bn=False, act=nn.PReLU(), res_scale=1):
super(ResBlock, self).__init__()
m = []
for i in range(2):
m.append(conv(n_feats, n_feats, kernel_size, bias=bias))
if bn:
m.append(nn.BatchNorm2d(n_feats))
if i == 0:
m.append(act)
self.body = nn.Sequential(*m)
self.res_scale = res_scale
def forward(self, x):
res = self.body(x).mul(self.res_scale)
res += x
return res
class Upsampler(nn.Sequential):
def __init__(self, conv, scale, n_feats, bn=False, act=False, bias=True):
m = []
if (scale & (scale - 1)) == 0: # Is scale = 2^n?
for _ in range(int(math.log(scale, 2))):
m.append(conv(n_feats, 4 * n_feats, 3, bias))
m.append(nn.PixelShuffle(2))
if bn:
m.append(nn.BatchNorm2d(n_feats))
if act == 'relu':
m.append(nn.ReLU(True))
elif act == 'prelu':
m.append(nn.PReLU(n_feats))
elif scale == 3:
m.append(conv(n_feats, 9 * n_feats, 3, bias))
m.append(nn.PixelShuffle(3))
if bn:
m.append(nn.BatchNorm2d(n_feats))
if act == 'relu':
m.append(nn.ReLU(True))
elif act == 'prelu':
m.append(nn.PReLU(n_feats))
else:
raise NotImplementedError
super(Upsampler, self).__init__(*m) | 0.866937 | 0.123049 |
from django.contrib.auth.models import User
# local Django
from app.models import Notification
from app.models import Task
from app.models import Host
class Notification_Entity():
def insert_one(self, notification):
"""Insert Notification"""
notification = Notification(
highlight=notification["highlight"],
notification=notification["notification"],
url=notification["url"],
type=notification["type"],
delivered=notification["delivered"],
user=User.objects.get(pk=notification["user_id"]),
task=Task.objects.get(pk=notification["task_id"]) if notification["task_id"] is not None else notification["task_id"],
host=Host.objects.get(pk=notification["host_id"]) if notification["host_id"] is not None else notification["host_id"]
)
notification.save()
return False if notification.pk is None else notification
def insert_many(self, notifications):
"""Insert Many Notifications"""
status = True
for notification in notifications:
status &= True if self.insert_one(notification) is not False else False
return status
def get_one_by_id(self, id):
"""Get Notification By ID"""
try:
notification = Notification.objects.get(pk=id)
return False if notification.pk is None else notification
except Exception:
return False
def get_one_by_task_id(self, task_id):
"""Get Notification By Task ID"""
try:
notification = Notification.objects.get(task=task_id)
return False if notification.pk is None else notification
except Exception:
return False
def get_many_by_user(self, user_id, order_by, asc, count=5):
"""Get Many Notifications By User ID"""
notifications = Notification.objects.filter(user=user_id).order_by(order_by if asc else "-%s" % order_by)[:count]
return notifications
def update_one_by_id(self, id, new_data):
"""Update Notification By ID"""
notification = self.get_one_by_id(id)
if notification is not False:
if "highlight" in new_data:
notification.highlight = new_data["highlight"]
if "notification" in new_data:
notification.notification = new_data["notification"]
if "url" in new_data:
notification.url = new_data["url"]
if "type" in new_data:
notification.type = new_data["type"]
if "delivered" in new_data:
notification.delivered = new_data["delivered"]
if "user_id" in new_data:
notification.user = User.objects.get(pk=new_data["user_id"])
if "task_id" in new_data:
notification.task = Task.objects.get(pk=notification["task_id"]) if notification["task_id"] is not None else notification["task_id"]
if "host_id" in new_data:
notification.host = Host.objects.get(pk=notification["host_id"]) if notification["host_id"] is not None else notification["host_id"]
notification.save()
return True
return False
def update_one_by_task_id(self, task_id, new_data):
notification = self.get_one_by_task_id(task_id)
if notification is not False:
if "highlight" in new_data:
notification.highlight = new_data["highlight"]
if "notification" in new_data:
notification.notification = new_data["notification"]
if "url" in new_data:
notification.url = new_data["url"]
if "type" in new_data:
notification.type = new_data["type"]
if "delivered" in new_data:
notification.delivered = new_data["delivered"]
if "created_at" in new_data:
notification.created_at = new_data["created_at"]
if "updated_at" in new_data:
notification.updated_at = new_data["updated_at"]
if "user_id" in new_data:
notification.user = User.objects.get(pk=new_data["user_id"])
if "task_id" in new_data:
notification.task = Task.objects.get(pk=notification["task_id"]) if notification["task_id"] is not None else notification["task_id"]
if "host_id" in new_data:
notification.host = Host.objects.get(pk=notification["host_id"]) if notification["host_id"] is not None else notification["host_id"]
notification.save()
return True
return False
def get_one_by_id_and_user(self, id, user_id):
try:
notification = Notification.objects.get(pk=id, user=user_id)
return False if notification.pk is None else notification
except Exception:
return False
def delete_one_by_id(self, id):
"""Delete Notification By ID"""
notification = self.get_one_by_id(id)
if notification is not False:
count, deleted = notification.delete()
return True if count > 0 else False
return False | app/modules/entity/notification_entity.py | from django.contrib.auth.models import User
# local Django
from app.models import Notification
from app.models import Task
from app.models import Host
class Notification_Entity():
def insert_one(self, notification):
"""Insert Notification"""
notification = Notification(
highlight=notification["highlight"],
notification=notification["notification"],
url=notification["url"],
type=notification["type"],
delivered=notification["delivered"],
user=User.objects.get(pk=notification["user_id"]),
task=Task.objects.get(pk=notification["task_id"]) if notification["task_id"] is not None else notification["task_id"],
host=Host.objects.get(pk=notification["host_id"]) if notification["host_id"] is not None else notification["host_id"]
)
notification.save()
return False if notification.pk is None else notification
def insert_many(self, notifications):
"""Insert Many Notifications"""
status = True
for notification in notifications:
status &= True if self.insert_one(notification) is not False else False
return status
def get_one_by_id(self, id):
"""Get Notification By ID"""
try:
notification = Notification.objects.get(pk=id)
return False if notification.pk is None else notification
except Exception:
return False
def get_one_by_task_id(self, task_id):
"""Get Notification By Task ID"""
try:
notification = Notification.objects.get(task=task_id)
return False if notification.pk is None else notification
except Exception:
return False
def get_many_by_user(self, user_id, order_by, asc, count=5):
"""Get Many Notifications By User ID"""
notifications = Notification.objects.filter(user=user_id).order_by(order_by if asc else "-%s" % order_by)[:count]
return notifications
def update_one_by_id(self, id, new_data):
"""Update Notification By ID"""
notification = self.get_one_by_id(id)
if notification is not False:
if "highlight" in new_data:
notification.highlight = new_data["highlight"]
if "notification" in new_data:
notification.notification = new_data["notification"]
if "url" in new_data:
notification.url = new_data["url"]
if "type" in new_data:
notification.type = new_data["type"]
if "delivered" in new_data:
notification.delivered = new_data["delivered"]
if "user_id" in new_data:
notification.user = User.objects.get(pk=new_data["user_id"])
if "task_id" in new_data:
notification.task = Task.objects.get(pk=notification["task_id"]) if notification["task_id"] is not None else notification["task_id"]
if "host_id" in new_data:
notification.host = Host.objects.get(pk=notification["host_id"]) if notification["host_id"] is not None else notification["host_id"]
notification.save()
return True
return False
def update_one_by_task_id(self, task_id, new_data):
notification = self.get_one_by_task_id(task_id)
if notification is not False:
if "highlight" in new_data:
notification.highlight = new_data["highlight"]
if "notification" in new_data:
notification.notification = new_data["notification"]
if "url" in new_data:
notification.url = new_data["url"]
if "type" in new_data:
notification.type = new_data["type"]
if "delivered" in new_data:
notification.delivered = new_data["delivered"]
if "created_at" in new_data:
notification.created_at = new_data["created_at"]
if "updated_at" in new_data:
notification.updated_at = new_data["updated_at"]
if "user_id" in new_data:
notification.user = User.objects.get(pk=new_data["user_id"])
if "task_id" in new_data:
notification.task = Task.objects.get(pk=notification["task_id"]) if notification["task_id"] is not None else notification["task_id"]
if "host_id" in new_data:
notification.host = Host.objects.get(pk=notification["host_id"]) if notification["host_id"] is not None else notification["host_id"]
notification.save()
return True
return False
def get_one_by_id_and_user(self, id, user_id):
try:
notification = Notification.objects.get(pk=id, user=user_id)
return False if notification.pk is None else notification
except Exception:
return False
def delete_one_by_id(self, id):
"""Delete Notification By ID"""
notification = self.get_one_by_id(id)
if notification is not False:
count, deleted = notification.delete()
return True if count > 0 else False
return False | 0.346873 | 0.036666 |
import os
import aiohttp
from asyncinit import asyncinit
from wow.fight import Fight
API_URL = "https://www.warcraftlogs.com:443/v1/report/"
@asyncinit
class WarcraftlogsAPI():
async def __init__(self, code: str):
self.code = code
self.log_info = await self.get_log_info()
async def get_log_info(self):
try:
async with aiohttp.ClientSession() as session:
params = {
"api_key": os.getenv("WARCRAFTLOGS_CLIENT"),
"translate": "True"
}
async with session.get(API_URL + "fights/" +
self.code, params=params) as resp:
return await resp.json()
except:
print(
f"There was an error while getting logs info with {self.code} code")
return None
async def get_title(self):
"""
Return the logs title `str`
"""
return (self.log_info)["title"]
async def get_fight(self, fight_number):
"""
Return the `Fight` object from `list` of fights
with the given fight_number
"""
fight = (await self.get_fights())[fight_number]
return Fight(**fight)
async def get_fights(self):
"""
Return `list` of `dict` fights from logs
(excluding trash and reset pulls if it is raid)
"""
fights = (self.log_info)["fights"]
fights[:] = [e for e in fights if e.get("boss")]
return fights
async def get_fights_amount(self):
"""
Return the :`int` number of fights
"""
return len(await self.get_fights())
async def get_total_duration(self):
"""
Use to calculate duration of logs
Return the `int` duration of logs that comes from the
difference of first and last event
"""
return abs((self.log_info)["start"] - (self.log_info)["end"])
async def get_zone(self):
"""
Return the name of the logs zone as an `str`
MYTHIC_PLUS = 25
CASTLE_NATHRIA = 26
TORGHAST = 27
SANCTUM_OF_DOMINATION = 28
NEXT_RAID = 29...
"""
zone = (self.log_info)["zone"]
if zone == 25:
return "Mythic+"
elif zone == 26:
return "Castle Nathria"
elif zone == 27:
return "Torghast"
elif zone == 28:
return "Sanctum of Domination"
elif zone == 29:
return "Future raid..."
else:
return None | wow/warcraftlogs.py | import os
import aiohttp
from asyncinit import asyncinit
from wow.fight import Fight
API_URL = "https://www.warcraftlogs.com:443/v1/report/"
@asyncinit
class WarcraftlogsAPI():
async def __init__(self, code: str):
self.code = code
self.log_info = await self.get_log_info()
async def get_log_info(self):
try:
async with aiohttp.ClientSession() as session:
params = {
"api_key": os.getenv("WARCRAFTLOGS_CLIENT"),
"translate": "True"
}
async with session.get(API_URL + "fights/" +
self.code, params=params) as resp:
return await resp.json()
except:
print(
f"There was an error while getting logs info with {self.code} code")
return None
async def get_title(self):
"""
Return the logs title `str`
"""
return (self.log_info)["title"]
async def get_fight(self, fight_number):
"""
Return the `Fight` object from `list` of fights
with the given fight_number
"""
fight = (await self.get_fights())[fight_number]
return Fight(**fight)
async def get_fights(self):
"""
Return `list` of `dict` fights from logs
(excluding trash and reset pulls if it is raid)
"""
fights = (self.log_info)["fights"]
fights[:] = [e for e in fights if e.get("boss")]
return fights
async def get_fights_amount(self):
"""
Return the :`int` number of fights
"""
return len(await self.get_fights())
async def get_total_duration(self):
"""
Use to calculate duration of logs
Return the `int` duration of logs that comes from the
difference of first and last event
"""
return abs((self.log_info)["start"] - (self.log_info)["end"])
async def get_zone(self):
"""
Return the name of the logs zone as an `str`
MYTHIC_PLUS = 25
CASTLE_NATHRIA = 26
TORGHAST = 27
SANCTUM_OF_DOMINATION = 28
NEXT_RAID = 29...
"""
zone = (self.log_info)["zone"]
if zone == 25:
return "Mythic+"
elif zone == 26:
return "Castle Nathria"
elif zone == 27:
return "Torghast"
elif zone == 28:
return "Sanctum of Domination"
elif zone == 29:
return "Future raid..."
else:
return None | 0.484624 | 0.178741 |
from sklearn.grid_search import GridSearchCV
from sklearn.ensemble import GradientBoostingClassifier
from sklearn.externals import joblib
import deepdish.io as io
import cPickle
import sys
CROSS_VAL = False
# -----------------------------------------------------------------
def train():
'''
'''
data = io.load(open('train_data.h5', 'rb'))
#data = remove_tau(data)
if CROSS_VAL:
param_grid = {'n_estimators':[50, 100], 'max_depth':[3, 5, 10], 'min_samples_split':[2, 5]}
fit_params = {
'sample_weight' : data['w'],
}
metaclassifier = GridSearchCV(GradientBoostingClassifier(), param_grid=param_grid, fit_params=fit_params, \
cv=2, n_jobs=4, verbose=2)#, scoring=roc_score)
metaclassifier.fit(data['X'], data['y'])
classifier = metaclassifier.best_estimator_
print 'Best classifier:', metaclassifier.best_params_
else:
classifier = GradientBoostingClassifier(n_estimators=200, min_samples_split=2, max_depth=10, verbose=1)
classifier.fit(data['X'], data['y'], sample_weight=data['w'])
joblib.dump(classifier, 'sklBDT_trk2.pkl', protocol=cPickle.HIGHEST_PROTOCOL)
# -----------------------------------------------------------------
def test():
'''
'''
data = io.load(open('test_data.h5', 'rb'))
#data = remove_tau(data)
# -- Load scikit classifier
classifier = joblib.load('sklBDT_trk2.pkl')
# -- Get classifier predictions
yhat = classifier.predict_proba(data['X'])[:, 2]
io.save(open('yhat_test.h5', 'wb'), yhat)
# -----------------------------------------------------------------
def roc_score(clf, X, y):
'''
'''
from sklearn.metrics import roc_curve, auc
yhat = clf.predict_proba(X)[:, 2] # make sure you select out the right column! (depends on whether we are doing binary or multiclass classification)
bl_sel = (y == 0) | (y == 5)
fpr, eff, _ = roc_curve(y[bl_sel] == 5, yhat[bl_sel])
rej = 1 / fpr
select = (eff > 0.5) & (eff < 1.0)
rej = rej[select]
eff = eff[select]
return auc(eff, rej)
# -----------------------------------------------------------------
def remove_tau(data):
data['X'] = data['X'][data['y'] != 15]
data['y'] = data['y'][data['y'] != 15]
data['w'] = data['w'][data['y'] != 15]
return data
# -----------------------------------------------------------------
if __name__ == '__main__':
# -- read in arguments
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('mode', help='train, test or traintest')
args= parser.parse_args()
if args.mode == 'train':
sys.exit(train())
elif args.mode == 'test':
sys.exit(test())
elif args.mode == 'traintest':
train()
sys.exit(test())
else:
sys.exit('Error: unknown mode') | trackjets/trackjet_bdt.py | from sklearn.grid_search import GridSearchCV
from sklearn.ensemble import GradientBoostingClassifier
from sklearn.externals import joblib
import deepdish.io as io
import cPickle
import sys
CROSS_VAL = False
# -----------------------------------------------------------------
def train():
'''
'''
data = io.load(open('train_data.h5', 'rb'))
#data = remove_tau(data)
if CROSS_VAL:
param_grid = {'n_estimators':[50, 100], 'max_depth':[3, 5, 10], 'min_samples_split':[2, 5]}
fit_params = {
'sample_weight' : data['w'],
}
metaclassifier = GridSearchCV(GradientBoostingClassifier(), param_grid=param_grid, fit_params=fit_params, \
cv=2, n_jobs=4, verbose=2)#, scoring=roc_score)
metaclassifier.fit(data['X'], data['y'])
classifier = metaclassifier.best_estimator_
print 'Best classifier:', metaclassifier.best_params_
else:
classifier = GradientBoostingClassifier(n_estimators=200, min_samples_split=2, max_depth=10, verbose=1)
classifier.fit(data['X'], data['y'], sample_weight=data['w'])
joblib.dump(classifier, 'sklBDT_trk2.pkl', protocol=cPickle.HIGHEST_PROTOCOL)
# -----------------------------------------------------------------
def test():
'''
'''
data = io.load(open('test_data.h5', 'rb'))
#data = remove_tau(data)
# -- Load scikit classifier
classifier = joblib.load('sklBDT_trk2.pkl')
# -- Get classifier predictions
yhat = classifier.predict_proba(data['X'])[:, 2]
io.save(open('yhat_test.h5', 'wb'), yhat)
# -----------------------------------------------------------------
def roc_score(clf, X, y):
'''
'''
from sklearn.metrics import roc_curve, auc
yhat = clf.predict_proba(X)[:, 2] # make sure you select out the right column! (depends on whether we are doing binary or multiclass classification)
bl_sel = (y == 0) | (y == 5)
fpr, eff, _ = roc_curve(y[bl_sel] == 5, yhat[bl_sel])
rej = 1 / fpr
select = (eff > 0.5) & (eff < 1.0)
rej = rej[select]
eff = eff[select]
return auc(eff, rej)
# -----------------------------------------------------------------
def remove_tau(data):
data['X'] = data['X'][data['y'] != 15]
data['y'] = data['y'][data['y'] != 15]
data['w'] = data['w'][data['y'] != 15]
return data
# -----------------------------------------------------------------
if __name__ == '__main__':
# -- read in arguments
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('mode', help='train, test or traintest')
args= parser.parse_args()
if args.mode == 'train':
sys.exit(train())
elif args.mode == 'test':
sys.exit(test())
elif args.mode == 'traintest':
train()
sys.exit(test())
else:
sys.exit('Error: unknown mode') | 0.468304 | 0.278287 |
from typing import Dict
from django.conf import settings
from django.contrib.auth import get_user_model
from django.contrib.postgres.fields import JSONField
from django.core.exceptions import ValidationError
from django.db import models
from django.utils.encoding import force_text
from django.utils.translation import gettext_lazy as _
from django_better_admin_arrayfield.models.fields import ArrayField
from solo.models import SingletonModel, get_cache
import mozilla_django_oidc_db.settings as oidc_settings
def get_default_scopes():
"""
Returns the default scopes to request for OpenID Connect logins
"""
return ["openid", "email", "profile"]
def get_claim_mapping() -> Dict[str, str]:
# Map (some) claim names from https://openid.net/specs/openid-connect-core-1_0.html#Claims
# to corresponding field names on the User model
return {
"email": "email",
"first_name": "given_name",
"last_name": "family_name",
}
class OpenIDConnectConfig(SingletonModel):
"""
Configuration for authentication/authorization via OpenID connect
"""
enabled = models.BooleanField(
_("enable"),
default=False,
help_text=_(
"Indicates whether OpenID Connect for authentication/authorization is enabled"
),
)
oidc_rp_client_id = models.CharField(
_("OpenID Connect client ID"),
max_length=1000,
help_text=_("OpenID Connect client ID provided by the OIDC Provider"),
)
oidc_rp_client_secret = models.CharField(
_("OpenID Connect secret"),
max_length=1000,
help_text=_("OpenID Connect secret provided by the OIDC Provider"),
)
oidc_rp_sign_algo = models.CharField(
_("OpenID sign algorithm"),
max_length=50,
help_text=_("Algorithm the Identity Provider uses to sign ID tokens"),
default="HS256",
)
oidc_rp_scopes_list = ArrayField(
verbose_name=_("OpenID Connect scopes"),
base_field=models.CharField(_("OpenID Connect scope"), max_length=50),
default=get_default_scopes,
blank=True,
help_text=_("OpenID Connect scopes that are requested during login"),
)
oidc_op_discovery_endpoint = models.URLField(
_("Discovery endpoint"),
max_length=1000,
help_text=_(
"URL of your OpenID Connect provider discovery endpoint ending with a slash "
"(`.well-known/...` will be added automatically). "
"If this is provided, the remaining endpoints can be omitted, as "
"they will be derived from this endpoint."
),
blank=True,
)
oidc_op_jwks_endpoint = models.URLField(
_("JSON Web Key Set endpoint"),
max_length=1000,
help_text=_(
"URL of your OpenID Connect provider JSON Web Key Set endpoint. Required if `RS256` is used as signing algorithm"
),
blank=True,
)
oidc_op_authorization_endpoint = models.URLField(
_("Authorization endpoint"),
max_length=1000,
help_text=_("URL of your OpenID Connect provider authorization endpoint"),
)
oidc_op_token_endpoint = models.URLField(
_("Token endpoint"),
max_length=1000,
help_text=_("URL of your OpenID Connect provider token endpoint"),
)
oidc_op_user_endpoint = models.URLField(
_("User endpoint"),
max_length=1000,
help_text=_("URL of your OpenID Connect provider userinfo endpoint"),
)
oidc_rp_idp_sign_key = models.CharField(
_("Sign key"),
max_length=1000,
help_text=_(
"Key the Identity Provider uses to sign ID tokens in the case of an RSA sign algorithm. Should be the signing key in PEM or DER format"
),
blank=True,
)
username_claim = models.CharField(
_("username claim"),
max_length=50,
default="sub",
help_text=_("The name of the OIDC claim that is used as the username"),
)
claim_mapping = JSONField(
_("claim mapping"),
default=get_claim_mapping,
help_text=("Mapping from user-model fields to OIDC claims"),
)
groups_claim = models.CharField(
_("groups claim"),
max_length=50,
default="roles",
help_text=_(
"The name of the OIDC claim that holds the values to map to local user groups."
),
)
sync_groups = models.BooleanField(
_("synchronize groups"),
default=True,
help_text=_(
"Synchronize the local user groups with the provided groups. Note that this "
"means a user is removed from all groups if there is no group claim. "
"Uncheck to manage groups manually."
),
)
sync_groups_glob_pattern = models.CharField(
_("groups glob pattern"),
default="*",
max_length=255,
help_text=_(
"The glob pattern that groups must match to be synchronized to "
"the local database."
),
)
make_users_staff = models.BooleanField(
_("make users staff"),
default=False,
help_text=_(
"Users will be flagged as being a staff user automatically. This allows users to login to the admin interface. By default they have no permissions, even if they are staff."
),
)
class Meta:
verbose_name = _("OpenID Connect configuration")
def __str__(self):
return force_text(self._meta.verbose_name)
def clean(self):
super().clean()
# validate claim mapping
User = get_user_model()
for field in self.claim_mapping.keys():
try:
User._meta.get_field(field)
except models.FieldDoesNotExist:
raise ValidationError(
{
"claim_mapping": _(
"Field {field} does not exist on the user model"
).format(field=field)
}
)
if User.USERNAME_FIELD in self.claim_mapping:
raise ValidationError(
{
"claim_mapping": _(
"The username field may not be in the claim mapping"
),
}
)
@property
def oidc_rp_scopes(self):
"""
Scopes should be formatted as a string with spaces
"""
return " ".join(self.oidc_rp_scopes_list)
@classmethod
def clear_cache(cls):
cache_name = getattr(
settings, "OIDC_CACHE", oidc_settings.MOZILLA_DJANGO_OIDC_DB_CACHE
)
if cache_name:
cache = get_cache(cache_name)
cache_key = cls.get_cache_key()
cache.delete(cache_key)
def set_to_cache(self):
cache_name = getattr(
settings,
"MOZILLA_DJANGO_OIDC_DB_CACHE",
oidc_settings.MOZILLA_DJANGO_OIDC_DB_CACHE,
)
if not cache_name:
return None
cache = get_cache(cache_name)
cache_key = self.get_cache_key()
timeout = getattr(
settings,
"MOZILLA_DJANGO_OIDC_DB_CACHE_TIMEOUT",
oidc_settings.MOZILLA_DJANGO_OIDC_DB_CACHE_TIMEOUT,
)
cache.set(cache_key, self, timeout)
@classmethod
def get_cache_key(cls):
prefix = getattr(
settings,
"MOZILLA_DJANGO_OIDC_DB_PREFIX",
oidc_settings.MOZILLA_DJANGO_OIDC_DB_PREFIX,
)
return "%s:%s" % (prefix, cls.__name__.lower())
@classmethod
def get_solo(cls):
cache_name = getattr(
settings,
"MOZILLA_DJANGO_OIDC_DB_CACHE",
oidc_settings.MOZILLA_DJANGO_OIDC_DB_CACHE,
)
if not cache_name:
obj, created = cls.objects.get_or_create(pk=cls.singleton_instance_id)
return obj
cache = get_cache(cache_name)
cache_key = cls.get_cache_key()
obj = cache.get(cache_key)
if not obj:
obj, created = cls.objects.get_or_create(pk=cls.singleton_instance_id)
obj.set_to_cache()
return obj | mozilla_django_oidc_db/models.py | from typing import Dict
from django.conf import settings
from django.contrib.auth import get_user_model
from django.contrib.postgres.fields import JSONField
from django.core.exceptions import ValidationError
from django.db import models
from django.utils.encoding import force_text
from django.utils.translation import gettext_lazy as _
from django_better_admin_arrayfield.models.fields import ArrayField
from solo.models import SingletonModel, get_cache
import mozilla_django_oidc_db.settings as oidc_settings
def get_default_scopes():
"""
Returns the default scopes to request for OpenID Connect logins
"""
return ["openid", "email", "profile"]
def get_claim_mapping() -> Dict[str, str]:
# Map (some) claim names from https://openid.net/specs/openid-connect-core-1_0.html#Claims
# to corresponding field names on the User model
return {
"email": "email",
"first_name": "given_name",
"last_name": "family_name",
}
class OpenIDConnectConfig(SingletonModel):
"""
Configuration for authentication/authorization via OpenID connect
"""
enabled = models.BooleanField(
_("enable"),
default=False,
help_text=_(
"Indicates whether OpenID Connect for authentication/authorization is enabled"
),
)
oidc_rp_client_id = models.CharField(
_("OpenID Connect client ID"),
max_length=1000,
help_text=_("OpenID Connect client ID provided by the OIDC Provider"),
)
oidc_rp_client_secret = models.CharField(
_("OpenID Connect secret"),
max_length=1000,
help_text=_("OpenID Connect secret provided by the OIDC Provider"),
)
oidc_rp_sign_algo = models.CharField(
_("OpenID sign algorithm"),
max_length=50,
help_text=_("Algorithm the Identity Provider uses to sign ID tokens"),
default="HS256",
)
oidc_rp_scopes_list = ArrayField(
verbose_name=_("OpenID Connect scopes"),
base_field=models.CharField(_("OpenID Connect scope"), max_length=50),
default=get_default_scopes,
blank=True,
help_text=_("OpenID Connect scopes that are requested during login"),
)
oidc_op_discovery_endpoint = models.URLField(
_("Discovery endpoint"),
max_length=1000,
help_text=_(
"URL of your OpenID Connect provider discovery endpoint ending with a slash "
"(`.well-known/...` will be added automatically). "
"If this is provided, the remaining endpoints can be omitted, as "
"they will be derived from this endpoint."
),
blank=True,
)
oidc_op_jwks_endpoint = models.URLField(
_("JSON Web Key Set endpoint"),
max_length=1000,
help_text=_(
"URL of your OpenID Connect provider JSON Web Key Set endpoint. Required if `RS256` is used as signing algorithm"
),
blank=True,
)
oidc_op_authorization_endpoint = models.URLField(
_("Authorization endpoint"),
max_length=1000,
help_text=_("URL of your OpenID Connect provider authorization endpoint"),
)
oidc_op_token_endpoint = models.URLField(
_("Token endpoint"),
max_length=1000,
help_text=_("URL of your OpenID Connect provider token endpoint"),
)
oidc_op_user_endpoint = models.URLField(
_("User endpoint"),
max_length=1000,
help_text=_("URL of your OpenID Connect provider userinfo endpoint"),
)
oidc_rp_idp_sign_key = models.CharField(
_("Sign key"),
max_length=1000,
help_text=_(
"Key the Identity Provider uses to sign ID tokens in the case of an RSA sign algorithm. Should be the signing key in PEM or DER format"
),
blank=True,
)
username_claim = models.CharField(
_("username claim"),
max_length=50,
default="sub",
help_text=_("The name of the OIDC claim that is used as the username"),
)
claim_mapping = JSONField(
_("claim mapping"),
default=get_claim_mapping,
help_text=("Mapping from user-model fields to OIDC claims"),
)
groups_claim = models.CharField(
_("groups claim"),
max_length=50,
default="roles",
help_text=_(
"The name of the OIDC claim that holds the values to map to local user groups."
),
)
sync_groups = models.BooleanField(
_("synchronize groups"),
default=True,
help_text=_(
"Synchronize the local user groups with the provided groups. Note that this "
"means a user is removed from all groups if there is no group claim. "
"Uncheck to manage groups manually."
),
)
sync_groups_glob_pattern = models.CharField(
_("groups glob pattern"),
default="*",
max_length=255,
help_text=_(
"The glob pattern that groups must match to be synchronized to "
"the local database."
),
)
make_users_staff = models.BooleanField(
_("make users staff"),
default=False,
help_text=_(
"Users will be flagged as being a staff user automatically. This allows users to login to the admin interface. By default they have no permissions, even if they are staff."
),
)
class Meta:
verbose_name = _("OpenID Connect configuration")
def __str__(self):
return force_text(self._meta.verbose_name)
def clean(self):
super().clean()
# validate claim mapping
User = get_user_model()
for field in self.claim_mapping.keys():
try:
User._meta.get_field(field)
except models.FieldDoesNotExist:
raise ValidationError(
{
"claim_mapping": _(
"Field {field} does not exist on the user model"
).format(field=field)
}
)
if User.USERNAME_FIELD in self.claim_mapping:
raise ValidationError(
{
"claim_mapping": _(
"The username field may not be in the claim mapping"
),
}
)
@property
def oidc_rp_scopes(self):
"""
Scopes should be formatted as a string with spaces
"""
return " ".join(self.oidc_rp_scopes_list)
@classmethod
def clear_cache(cls):
cache_name = getattr(
settings, "OIDC_CACHE", oidc_settings.MOZILLA_DJANGO_OIDC_DB_CACHE
)
if cache_name:
cache = get_cache(cache_name)
cache_key = cls.get_cache_key()
cache.delete(cache_key)
def set_to_cache(self):
cache_name = getattr(
settings,
"MOZILLA_DJANGO_OIDC_DB_CACHE",
oidc_settings.MOZILLA_DJANGO_OIDC_DB_CACHE,
)
if not cache_name:
return None
cache = get_cache(cache_name)
cache_key = self.get_cache_key()
timeout = getattr(
settings,
"MOZILLA_DJANGO_OIDC_DB_CACHE_TIMEOUT",
oidc_settings.MOZILLA_DJANGO_OIDC_DB_CACHE_TIMEOUT,
)
cache.set(cache_key, self, timeout)
@classmethod
def get_cache_key(cls):
prefix = getattr(
settings,
"MOZILLA_DJANGO_OIDC_DB_PREFIX",
oidc_settings.MOZILLA_DJANGO_OIDC_DB_PREFIX,
)
return "%s:%s" % (prefix, cls.__name__.lower())
@classmethod
def get_solo(cls):
cache_name = getattr(
settings,
"MOZILLA_DJANGO_OIDC_DB_CACHE",
oidc_settings.MOZILLA_DJANGO_OIDC_DB_CACHE,
)
if not cache_name:
obj, created = cls.objects.get_or_create(pk=cls.singleton_instance_id)
return obj
cache = get_cache(cache_name)
cache_key = cls.get_cache_key()
obj = cache.get(cache_key)
if not obj:
obj, created = cls.objects.get_or_create(pk=cls.singleton_instance_id)
obj.set_to_cache()
return obj | 0.768473 | 0.108992 |
from django.conf import settings
from django.db import models
import uuid
from django.contrib.auth.models import (
AbstractBaseUser,
BaseUserManager,
PermissionsMixin,
)
class UserManager(BaseUserManager):
def create_user(self, email, password=<PASSWORD>, **extra_fields):
"""Creates and saves a new user"""
if not email:
raise ValueError("Email must be a valid address")
user = self.model(email=self.normalize_email(email), **extra_fields)
user.set_password(password)
user.save(using=self._db)
return user
def create_superuser(self, email, password):
""" Creates and saves a new superuser """
user = self.create_user(email, password)
user.is_staff = True
user.is_superuser = True
user.save(using=self._db)
return user
class User(AbstractBaseUser, PermissionsMixin):
"""custome user model that uses email instead of username"""
email = models.EmailField(max_length=255, unique=True)
name = models.CharField(max_length=255)
is_active = models.BooleanField(default=True)
is_staff = models.BooleanField(default=False)
objects = UserManager()
USERNAME_FIELD = "email"
class Subject(models.Model):
""" saves subjects offered """
SubjectId = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
SubjectName = models.CharField(max_length=100, blank=True)
Abbreviation = models.CharField(max_length=100, blank=True)
owner = models.ForeignKey(
to=User,
on_delete=models.CASCADE,
)
def __str__(self):
return self.SubjectName
class Term(models.Model):
""" Saves periods in a year """
TermId = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
TermName = models.CharField(max_length=100, blank=True)
owner = models.ForeignKey(
to=User,
on_delete=models.CASCADE,
)
def __str__(self):
return self.TermName
class Group(models.Model):
""" saves groups object """
GroupId = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
GroupName = models.CharField(max_length=100, blank=True)
owner = models.ForeignKey(
to=User,
on_delete=models.CASCADE,
)
def __str__(self):
return self.GroupName
class Student(models.Model):
""" students model that saves students """
StudentId = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
FirstName = models.CharField(max_length=100, blank=True)
LastName = models.CharField(max_length=100, blank=True)
ComChannel = models.CharField(max_length=100, blank=True)
Groups = models.ManyToManyField(Group, related_name="groups_registered")
Gender = models.CharField(
max_length=10, choices=(("M", "Male"), ("F", "Female")), default="F"
)
owner = models.ForeignKey(
to=User,
on_delete=models.CASCADE,
)
def __str__(self):
return self.FirstName
class Score(models.Model):
""" Saves scores object for subjects taken by student"""
ScoreId = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
Student = models.ForeignKey(Student, on_delete=models.CASCADE)
Subject = models.ForeignKey(Subject, on_delete=models.CASCADE)
Term = models.ForeignKey(Term, on_delete=models.CASCADE)
score = models.IntegerField()
owner = models.ForeignKey(
to=User,
on_delete=models.CASCADE,
)
def __str__(self):
return self.score
class Leader(models.Model):
"""Saves leaders objects of various groups """
LeaderId = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
Student = models.ForeignKey(Student, on_delete=models.CASCADE)
Group = models.ForeignKey(Group, on_delete=models.CASCADE)
Position = models.CharField(max_length=100, blank=True)
owner = models.ForeignKey(
to=User,
on_delete=models.CASCADE,
)
def __str__(self):
return self.Position | server/src/core/models.py | from django.conf import settings
from django.db import models
import uuid
from django.contrib.auth.models import (
AbstractBaseUser,
BaseUserManager,
PermissionsMixin,
)
class UserManager(BaseUserManager):
def create_user(self, email, password=<PASSWORD>, **extra_fields):
"""Creates and saves a new user"""
if not email:
raise ValueError("Email must be a valid address")
user = self.model(email=self.normalize_email(email), **extra_fields)
user.set_password(password)
user.save(using=self._db)
return user
def create_superuser(self, email, password):
""" Creates and saves a new superuser """
user = self.create_user(email, password)
user.is_staff = True
user.is_superuser = True
user.save(using=self._db)
return user
class User(AbstractBaseUser, PermissionsMixin):
"""custome user model that uses email instead of username"""
email = models.EmailField(max_length=255, unique=True)
name = models.CharField(max_length=255)
is_active = models.BooleanField(default=True)
is_staff = models.BooleanField(default=False)
objects = UserManager()
USERNAME_FIELD = "email"
class Subject(models.Model):
""" saves subjects offered """
SubjectId = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
SubjectName = models.CharField(max_length=100, blank=True)
Abbreviation = models.CharField(max_length=100, blank=True)
owner = models.ForeignKey(
to=User,
on_delete=models.CASCADE,
)
def __str__(self):
return self.SubjectName
class Term(models.Model):
""" Saves periods in a year """
TermId = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
TermName = models.CharField(max_length=100, blank=True)
owner = models.ForeignKey(
to=User,
on_delete=models.CASCADE,
)
def __str__(self):
return self.TermName
class Group(models.Model):
""" saves groups object """
GroupId = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
GroupName = models.CharField(max_length=100, blank=True)
owner = models.ForeignKey(
to=User,
on_delete=models.CASCADE,
)
def __str__(self):
return self.GroupName
class Student(models.Model):
""" students model that saves students """
StudentId = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
FirstName = models.CharField(max_length=100, blank=True)
LastName = models.CharField(max_length=100, blank=True)
ComChannel = models.CharField(max_length=100, blank=True)
Groups = models.ManyToManyField(Group, related_name="groups_registered")
Gender = models.CharField(
max_length=10, choices=(("M", "Male"), ("F", "Female")), default="F"
)
owner = models.ForeignKey(
to=User,
on_delete=models.CASCADE,
)
def __str__(self):
return self.FirstName
class Score(models.Model):
""" Saves scores object for subjects taken by student"""
ScoreId = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
Student = models.ForeignKey(Student, on_delete=models.CASCADE)
Subject = models.ForeignKey(Subject, on_delete=models.CASCADE)
Term = models.ForeignKey(Term, on_delete=models.CASCADE)
score = models.IntegerField()
owner = models.ForeignKey(
to=User,
on_delete=models.CASCADE,
)
def __str__(self):
return self.score
class Leader(models.Model):
"""Saves leaders objects of various groups """
LeaderId = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
Student = models.ForeignKey(Student, on_delete=models.CASCADE)
Group = models.ForeignKey(Group, on_delete=models.CASCADE)
Position = models.CharField(max_length=100, blank=True)
owner = models.ForeignKey(
to=User,
on_delete=models.CASCADE,
)
def __str__(self):
return self.Position | 0.596433 | 0.13134 |
__all__ = ['TEST_MODEL_VIEW', 'TEST_API_VIEW']
TEST_MODEL_VIEW = """from django.test import TestCase
from {{ app_name }}.factories import {{ model_meta.object_name }}Factory
from {{ app_name }}.models import {{ model_meta.object_name }}
class {{ model_meta.object_name }}TestCase(TestCase):
def setUp(self):
super().setUp()
# create data from {{ model_meta.object_name }}Factory
{{ model_meta.object_name }}Factory()
def tearDown(self):
super().tearDown()
{{ model_meta.object_name }}.objects.all().delete()
def test_{{ model_meta.name }}_can_be_created(self):
{{ model_meta.name }} = {{ model_meta.object_name }}.objects.first()
self.assertEqual({{ model_meta.name }}.id, 1)
def test_{{ model_meta.name }}_can_be_updated(self):
pass
def test_{{ model_meta.name }}_can_be_deleted(self):
pass
"""
TEST_API_VIEW = """from drf_core.tests import BaseTestCase
from {{ app_name }}.factories import {{ model_meta.object_name }}Factory
from {{ app_name }}.models import {{ model_meta.object_name }}
from {{ app_name }}.apis import {{ model_meta.object_name }}ViewSet
class {{ model_meta.object_name }}ViewSetTestCase(BaseTestCase):
resource = {{ model_meta.object_name }}ViewSet
def setUp(self):
super().setUp()
# Create a {{ model_meta.object_name }} for testing
{{ model_meta.object_name }}Factory()
#==============================================================================
# API should be forbidden if user is not logged in.
#==============================================================================
def test_get_{{ model_meta.name }}_forbidden(self):
self.auth = None
self.get_json_method_forbidden()
def test_post_{{ model_meta.name }}_forbidden(self):
self.auth = None
data = {}
self.post_json_method_forbidden(data=data)
def test_put_{{ model_meta.name }}_forbidden(self):
self.auth = None
data = {}
self.put_json_method_forbidden(data=data)
def test_patch_{{ model_meta.name }}_forbidden(self):
self.auth = None
data = {}
self.patch_json_forbidden(data=data)
def test_delete_{{ model_meta.name }}_forbidden(self):
self.auth = None
self.delete_method_forbidden()
#==============================================================================
# API should be success with authenticated users.
#==============================================================================
def test_get_{{ model_meta.name }}_accepted(self):
self.get_json_ok()
# Get 1 {{ model_meta.name }}.
{{ model_meta.name }} = {{ model_meta.object_name }}.objects.all()
self.assertEqual(len({{ model_meta.name }}), 1)
# Fill in futher test cases
def test_get_{{ model_meta.name }}_pagination_ok(self):
self.sampling.generate_by_model(
app_name='{{ app_name }}',
model_name='{{ model_meta.object_name }}',
sampling=100,
)
# Get 101 {{ model_meta.verbose_name_plural }}.
{{ model_meta.verbose_name_plural }} = {{ model_meta.object_name }}.objects.all()
self.assertEqual(len({{ model_meta.verbose_name_plural }}), 101)
# Test default case
resp = self.get_json_ok('', limit=10)
resp_json = self.deserialize(resp)
# Check response JSON
self.assertEqual(resp_json['count'], 101)
self.assertEqual(resp_json['previous'], None)
self.assertEqual(type(resp_json['next']), str)
self.assertEqual(type(resp_json['results']), list)
self.assertEqual(len(resp_json['results']), 10)
# Test another case
resp = self.get_json_ok('', limit=25, offset=25)
resp_json = self.deserialize(resp)
# Check response JSON
self.assertEqual(resp_json['count'], 101)
self.assertEqual(type(resp_json['next']), str)
self.assertEqual(type(resp_json['previous']), str)
self.assertEqual(type(resp_json['results']), list)
self.assertEqual(len(resp_json['results']), 25)
def test_post_{{ model_meta.name }}_accepted(self):
data = {}
self.post_json_created(data=data)
# Get 2 {{ model_meta.verbose_name_plural }}.
{{ model_meta.verbose_name_plural }} = {{ model_meta.object_name }}.objects.all()
self.assertEqual(len({{ model_meta.verbose_name_plural }}), 2)
# Fill in futher test cases
def test_put_{{ model_meta.name }}_accepted(self):
data = {}
{{ model_meta.name }} = {{ model_meta.object_name }}.objects.first()
self.put_json_ok(data=data, fragment='%d/' % {{ model_meta.name }}.id)
# Get 1 {{ model_meta.name }}.
{{ model_meta.name }} = {{ model_meta.object_name }}.objects.all()
self.assertEqual(len({{ model_meta.name }}), 1)
# Fill in futher test cases
def test_delete_{{ model_meta.name }}_accepted(self):
{{ model_meta.name }} = {{ model_meta.object_name }}.objects.first()
self.delete_json_ok('%d/' % {{ model_meta.name }}.id)
# Get 0 {{ model_meta.name }}.
{{ model_meta.name }} = {{ model_meta.object_name }}.objects.non_archived_only()
self.assertEqual(len({{ model_meta.name }}), 0)
# Fill in futher test cases
""" | drf_app_generators/templates/tests.py | __all__ = ['TEST_MODEL_VIEW', 'TEST_API_VIEW']
TEST_MODEL_VIEW = """from django.test import TestCase
from {{ app_name }}.factories import {{ model_meta.object_name }}Factory
from {{ app_name }}.models import {{ model_meta.object_name }}
class {{ model_meta.object_name }}TestCase(TestCase):
def setUp(self):
super().setUp()
# create data from {{ model_meta.object_name }}Factory
{{ model_meta.object_name }}Factory()
def tearDown(self):
super().tearDown()
{{ model_meta.object_name }}.objects.all().delete()
def test_{{ model_meta.name }}_can_be_created(self):
{{ model_meta.name }} = {{ model_meta.object_name }}.objects.first()
self.assertEqual({{ model_meta.name }}.id, 1)
def test_{{ model_meta.name }}_can_be_updated(self):
pass
def test_{{ model_meta.name }}_can_be_deleted(self):
pass
"""
TEST_API_VIEW = """from drf_core.tests import BaseTestCase
from {{ app_name }}.factories import {{ model_meta.object_name }}Factory
from {{ app_name }}.models import {{ model_meta.object_name }}
from {{ app_name }}.apis import {{ model_meta.object_name }}ViewSet
class {{ model_meta.object_name }}ViewSetTestCase(BaseTestCase):
resource = {{ model_meta.object_name }}ViewSet
def setUp(self):
super().setUp()
# Create a {{ model_meta.object_name }} for testing
{{ model_meta.object_name }}Factory()
#==============================================================================
# API should be forbidden if user is not logged in.
#==============================================================================
def test_get_{{ model_meta.name }}_forbidden(self):
self.auth = None
self.get_json_method_forbidden()
def test_post_{{ model_meta.name }}_forbidden(self):
self.auth = None
data = {}
self.post_json_method_forbidden(data=data)
def test_put_{{ model_meta.name }}_forbidden(self):
self.auth = None
data = {}
self.put_json_method_forbidden(data=data)
def test_patch_{{ model_meta.name }}_forbidden(self):
self.auth = None
data = {}
self.patch_json_forbidden(data=data)
def test_delete_{{ model_meta.name }}_forbidden(self):
self.auth = None
self.delete_method_forbidden()
#==============================================================================
# API should be success with authenticated users.
#==============================================================================
def test_get_{{ model_meta.name }}_accepted(self):
self.get_json_ok()
# Get 1 {{ model_meta.name }}.
{{ model_meta.name }} = {{ model_meta.object_name }}.objects.all()
self.assertEqual(len({{ model_meta.name }}), 1)
# Fill in futher test cases
def test_get_{{ model_meta.name }}_pagination_ok(self):
self.sampling.generate_by_model(
app_name='{{ app_name }}',
model_name='{{ model_meta.object_name }}',
sampling=100,
)
# Get 101 {{ model_meta.verbose_name_plural }}.
{{ model_meta.verbose_name_plural }} = {{ model_meta.object_name }}.objects.all()
self.assertEqual(len({{ model_meta.verbose_name_plural }}), 101)
# Test default case
resp = self.get_json_ok('', limit=10)
resp_json = self.deserialize(resp)
# Check response JSON
self.assertEqual(resp_json['count'], 101)
self.assertEqual(resp_json['previous'], None)
self.assertEqual(type(resp_json['next']), str)
self.assertEqual(type(resp_json['results']), list)
self.assertEqual(len(resp_json['results']), 10)
# Test another case
resp = self.get_json_ok('', limit=25, offset=25)
resp_json = self.deserialize(resp)
# Check response JSON
self.assertEqual(resp_json['count'], 101)
self.assertEqual(type(resp_json['next']), str)
self.assertEqual(type(resp_json['previous']), str)
self.assertEqual(type(resp_json['results']), list)
self.assertEqual(len(resp_json['results']), 25)
def test_post_{{ model_meta.name }}_accepted(self):
data = {}
self.post_json_created(data=data)
# Get 2 {{ model_meta.verbose_name_plural }}.
{{ model_meta.verbose_name_plural }} = {{ model_meta.object_name }}.objects.all()
self.assertEqual(len({{ model_meta.verbose_name_plural }}), 2)
# Fill in futher test cases
def test_put_{{ model_meta.name }}_accepted(self):
data = {}
{{ model_meta.name }} = {{ model_meta.object_name }}.objects.first()
self.put_json_ok(data=data, fragment='%d/' % {{ model_meta.name }}.id)
# Get 1 {{ model_meta.name }}.
{{ model_meta.name }} = {{ model_meta.object_name }}.objects.all()
self.assertEqual(len({{ model_meta.name }}), 1)
# Fill in futher test cases
def test_delete_{{ model_meta.name }}_accepted(self):
{{ model_meta.name }} = {{ model_meta.object_name }}.objects.first()
self.delete_json_ok('%d/' % {{ model_meta.name }}.id)
# Get 0 {{ model_meta.name }}.
{{ model_meta.name }} = {{ model_meta.object_name }}.objects.non_archived_only()
self.assertEqual(len({{ model_meta.name }}), 0)
# Fill in futher test cases
""" | 0.437824 | 0.204799 |
from __future__ import unicode_literals, absolute_import
from django.utils.encoding import python_2_unicode_compatible
from django.db import models
from django.core.urlresolvers import reverse_lazy
from django.utils.translation import ugettext_lazy as _
@python_2_unicode_compatible
class Author(models.Model):
# first_name = models.CharField(verbose_name=_("first name"), max_length=20) # gTranslator,
# first_name = models.CharField(_("first name"), max_length=20) # gTranslator,
first_name = models.CharField(_("first name"), max_length=20)
last_name = models.CharField(_("last name"), max_length=50)
class Meta:
ordering = ("last_name", "first_name")
verbose_name = _("Author") # nie mianownik, tylko dopeniacz
verbose_name_plural = _("Authors")
def __str__(self):
return "{first_name} {last_name}".format(first_name=self.first_name,
last_name=self.last_name)
class Publisher(models.Model):
name = models.CharField(max_length=70)
url = models.CharField(max_length=250, default="http://www.")
def __str__(self):
return self.name
class BookCategory(models.Model):
name = models.CharField(_("book category"), max_length=100)
def __str__(self):
return self.name
class Book(models.Model):
"""
Coś w rodzaju rękopisu.
Something like a manuscript.
"""
title = models.CharField(max_length=100)
author = models.ManyToManyField(Author)
category = models.ManyToManyField(BookCategory)
class Meta:
ordering = ["title"]
verbose_name = _("book")
verbose_name_plural = _("books")
def __str__(self):
return self.title
def get_absolute_url(self):
return reverse_lazy('shelf:book-detail', kwargs={'pk': self.id})
class BookEdition(models.Model):
"""
Edition of the book.
Wydanie określonej książki.
"""
book = models.ForeignKey(Book, related_name='editions')
isbn = models.CharField(max_length=17, blank=True)
date = models.DateField()
publisher = models.ForeignKey(Publisher)
def __str__(self):
return "{book.title}, {publisher.name}".format(book=self.book,
publisher=self.publisher)
COVER_TYPES = (
('soft', 'Soft'),
('hard', 'Hard')
# wartość w bazie, wyświetlana nazwa
)
class BookItem(models.Model):
"""
Concrete specimen. (Konkretny egzemplarz)
"""
edition = models.ForeignKey(BookEdition)
catalogue_number = models.CharField(max_length=30)
cover_type = models.CharField(max_length=4, choices=COVER_TYPES) # rodzaj okładki
def __str__(self):
return "ID_{id}: {edition}, {cover}".format(id=self._get_pk_val(),
edition=self.edition,
cover=self.get_cover_type_display()) | shelf/models.py | from __future__ import unicode_literals, absolute_import
from django.utils.encoding import python_2_unicode_compatible
from django.db import models
from django.core.urlresolvers import reverse_lazy
from django.utils.translation import ugettext_lazy as _
@python_2_unicode_compatible
class Author(models.Model):
# first_name = models.CharField(verbose_name=_("first name"), max_length=20) # gTranslator,
# first_name = models.CharField(_("first name"), max_length=20) # gTranslator,
first_name = models.CharField(_("first name"), max_length=20)
last_name = models.CharField(_("last name"), max_length=50)
class Meta:
ordering = ("last_name", "first_name")
verbose_name = _("Author") # nie mianownik, tylko dopeniacz
verbose_name_plural = _("Authors")
def __str__(self):
return "{first_name} {last_name}".format(first_name=self.first_name,
last_name=self.last_name)
class Publisher(models.Model):
name = models.CharField(max_length=70)
url = models.CharField(max_length=250, default="http://www.")
def __str__(self):
return self.name
class BookCategory(models.Model):
name = models.CharField(_("book category"), max_length=100)
def __str__(self):
return self.name
class Book(models.Model):
"""
Coś w rodzaju rękopisu.
Something like a manuscript.
"""
title = models.CharField(max_length=100)
author = models.ManyToManyField(Author)
category = models.ManyToManyField(BookCategory)
class Meta:
ordering = ["title"]
verbose_name = _("book")
verbose_name_plural = _("books")
def __str__(self):
return self.title
def get_absolute_url(self):
return reverse_lazy('shelf:book-detail', kwargs={'pk': self.id})
class BookEdition(models.Model):
"""
Edition of the book.
Wydanie określonej książki.
"""
book = models.ForeignKey(Book, related_name='editions')
isbn = models.CharField(max_length=17, blank=True)
date = models.DateField()
publisher = models.ForeignKey(Publisher)
def __str__(self):
return "{book.title}, {publisher.name}".format(book=self.book,
publisher=self.publisher)
COVER_TYPES = (
('soft', 'Soft'),
('hard', 'Hard')
# wartość w bazie, wyświetlana nazwa
)
class BookItem(models.Model):
"""
Concrete specimen. (Konkretny egzemplarz)
"""
edition = models.ForeignKey(BookEdition)
catalogue_number = models.CharField(max_length=30)
cover_type = models.CharField(max_length=4, choices=COVER_TYPES) # rodzaj okładki
def __str__(self):
return "ID_{id}: {edition}, {cover}".format(id=self._get_pk_val(),
edition=self.edition,
cover=self.get_cover_type_display()) | 0.593256 | 0.134264 |
load("@bazel_tools//tools/build_defs/repo:git.bzl", "new_git_repository") # buildifier: disable=load
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive") # buildifier: disable=load
load("@bazel_tools//tools/build_defs/repo:utils.bzl", "maybe") # buildifier: disable=load
def raze_fetch_remote_crates():
"""This function defines a collection of repos and should be called in a WORKSPACE file"""
maybe(
http_archive,
name = "raze__ahash__0_7_2",
url = "https://crates.io/api/v1/crates/ahash/0.7.2/download",
type = "tar.gz",
sha256 = "7f200cbb1e856866d9eade941cf3aa0c5d7dd36f74311c4273b494f4ef036957",
strip_prefix = "ahash-0.7.2",
build_file = Label("//bazel/cargo/remote:BUILD.ahash-0.7.2.bazel"),
)
maybe(
http_archive,
name = "raze__autocfg__1_0_1",
url = "https://crates.io/api/v1/crates/autocfg/1.0.1/download",
type = "tar.gz",
sha256 = "cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a",
strip_prefix = "autocfg-1.0.1",
build_file = Label("//bazel/cargo/remote:BUILD.autocfg-1.0.1.bazel"),
)
maybe(
http_archive,
name = "raze__bitflags__1_2_1",
url = "https://crates.io/api/v1/crates/bitflags/1.2.1/download",
type = "tar.gz",
sha256 = "cf1de2fe8c75bc145a2f577add951f8134889b4795d47466a54a5c846d691693",
strip_prefix = "bitflags-1.2.1",
build_file = Label("//bazel/cargo/remote:BUILD.bitflags-1.2.1.bazel"),
)
maybe(
http_archive,
name = "raze__bstr__0_2_15",
url = "https://crates.io/api/v1/crates/bstr/0.2.15/download",
type = "tar.gz",
sha256 = "a40b47ad93e1a5404e6c18dec46b628214fee441c70f4ab5d6942142cc268a3d",
strip_prefix = "bstr-0.2.15",
build_file = Label("//bazel/cargo/remote:BUILD.bstr-0.2.15.bazel"),
)
maybe(
http_archive,
name = "raze__byteorder__1_4_3",
url = "https://crates.io/api/v1/crates/byteorder/1.4.3/download",
type = "tar.gz",
sha256 = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610",
strip_prefix = "byteorder-1.4.3",
build_file = Label("//bazel/cargo/remote:BUILD.byteorder-1.4.3.bazel"),
)
maybe(
http_archive,
name = "raze__cfg_if__1_0_0",
url = "https://crates.io/api/v1/crates/cfg-if/1.0.0/download",
type = "tar.gz",
sha256 = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd",
strip_prefix = "cfg-if-1.0.0",
build_file = Label("//bazel/cargo/remote:BUILD.cfg-if-1.0.0.bazel"),
)
maybe(
http_archive,
name = "raze__chrono__0_4_19",
url = "https://crates.io/api/v1/crates/chrono/0.4.19/download",
type = "tar.gz",
sha256 = "670ad68c9088c2a963aaa298cb369688cf3f9465ce5e2d4ca10e6e0098a1ce73",
strip_prefix = "chrono-0.4.19",
build_file = Label("//bazel/cargo/remote:BUILD.chrono-0.4.19.bazel"),
)
maybe(
http_archive,
name = "raze__form_urlencoded__1_0_1",
url = "https://crates.io/api/v1/crates/form_urlencoded/1.0.1/download",
type = "tar.gz",
sha256 = "5fc25a87fa4fd2094bffb06925852034d90a17f0d1e05197d4956d3555752191",
strip_prefix = "form_urlencoded-1.0.1",
build_file = Label("//bazel/cargo/remote:BUILD.form_urlencoded-1.0.1.bazel"),
)
maybe(
http_archive,
name = "raze__getrandom__0_2_2",
url = "https://crates.io/api/v1/crates/getrandom/0.2.2/download",
type = "tar.gz",
sha256 = "c9495705279e7140bf035dde1f6e750c162df8b625267cd52cc44e0b156732c8",
strip_prefix = "getrandom-0.2.2",
build_file = Label("//bazel/cargo/remote:BUILD.getrandom-0.2.2.bazel"),
)
maybe(
http_archive,
name = "raze__hashbrown__0_11_2",
url = "https://crates.io/api/v1/crates/hashbrown/0.11.2/download",
type = "tar.gz",
sha256 = "ab5ef0d4909ef3724cc8cce6ccc8572c5c817592e9285f5464f8e86f8bd3726e",
strip_prefix = "hashbrown-0.11.2",
build_file = Label("//bazel/cargo/remote:BUILD.hashbrown-0.11.2.bazel"),
)
maybe(
http_archive,
name = "raze__idna__0_2_3",
url = "https://crates.io/api/v1/crates/idna/0.2.3/download",
type = "tar.gz",
sha256 = "418a0a6fab821475f634efe3ccc45c013f742efe03d853e8d3355d5cb850ecf8",
strip_prefix = "idna-0.2.3",
build_file = Label("//bazel/cargo/remote:BUILD.idna-0.2.3.bazel"),
)
maybe(
http_archive,
name = "raze__lazy_static__1_4_0",
url = "https://crates.io/api/v1/crates/lazy_static/1.4.0/download",
type = "tar.gz",
sha256 = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646",
strip_prefix = "lazy_static-1.4.0",
build_file = Label("//bazel/cargo/remote:BUILD.lazy_static-1.4.0.bazel"),
)
maybe(
http_archive,
name = "raze__libc__0_2_93",
url = "https://crates.io/api/v1/crates/libc/0.2.93/download",
type = "tar.gz",
sha256 = "9385f66bf6105b241aa65a61cb923ef20efc665cb9f9bb50ac2f0c4b7f378d41",
strip_prefix = "libc-0.2.93",
build_file = Label("//bazel/cargo/remote:BUILD.libc-0.2.93.bazel"),
)
maybe(
http_archive,
name = "raze__log__0_4_14",
url = "https://crates.io/api/v1/crates/log/0.4.14/download",
type = "tar.gz",
sha256 = "51b9bbe6c47d51fc3e1a9b945965946b4c44142ab8792c50835a980d362c2710",
strip_prefix = "log-0.4.14",
build_file = Label("//bazel/cargo/remote:BUILD.log-0.4.14.bazel"),
)
maybe(
http_archive,
name = "raze__matches__0_1_8",
url = "https://crates.io/api/v1/crates/matches/0.1.8/download",
type = "tar.gz",
sha256 = "7ffc5c5338469d4d3ea17d269fa8ea3512ad247247c30bd2df69e68309ed0a08",
strip_prefix = "matches-0.1.8",
build_file = Label("//bazel/cargo/remote:BUILD.matches-0.1.8.bazel"),
)
maybe(
http_archive,
name = "raze__memchr__2_3_4",
url = "https://crates.io/api/v1/crates/memchr/2.3.4/download",
type = "tar.gz",
sha256 = "0ee1c47aaa256ecabcaea351eae4a9b01ef39ed810004e298d2511ed284b1525",
strip_prefix = "memchr-2.3.4",
build_file = Label("//bazel/cargo/remote:BUILD.memchr-2.3.4.bazel"),
)
maybe(
http_archive,
name = "raze__num_integer__0_1_44",
url = "https://crates.io/api/v1/crates/num-integer/0.1.44/download",
type = "tar.gz",
sha256 = "d2cc698a63b549a70bc047073d2949cce27cd1c7b0a4a862d08a8031bc2801db",
strip_prefix = "num-integer-0.1.44",
build_file = Label("//bazel/cargo/remote:BUILD.num-integer-0.1.44.bazel"),
)
maybe(
http_archive,
name = "raze__num_traits__0_2_14",
url = "https://crates.io/api/v1/crates/num-traits/0.2.14/download",
type = "tar.gz",
sha256 = "9a64b1ec5cda2586e284722486d802acf1f7dbdc623e2bfc57e65ca1cd099290",
strip_prefix = "num-traits-0.2.14",
build_file = Label("//bazel/cargo/remote:BUILD.num-traits-0.2.14.bazel"),
)
maybe(
http_archive,
name = "raze__once_cell__1_7_2",
url = "https://crates.io/api/v1/crates/once_cell/1.7.2/download",
type = "tar.gz",
sha256 = "af8b08b04175473088b46763e51ee54da5f9a164bc162f615b91bc179dbf15a3",
strip_prefix = "once_cell-1.7.2",
build_file = Label("//bazel/cargo/remote:BUILD.once_cell-1.7.2.bazel"),
)
maybe(
http_archive,
name = "raze__percent_encoding__2_1_0",
url = "https://crates.io/api/v1/crates/percent-encoding/2.1.0/download",
type = "tar.gz",
sha256 = "d4fd5641d01c8f18a23da7b6fe29298ff4b55afcccdf78973b24cf3175fee32e",
strip_prefix = "percent-encoding-2.1.0",
build_file = Label("//bazel/cargo/remote:BUILD.percent-encoding-2.1.0.bazel"),
)
maybe(
http_archive,
name = "raze__proc_macro2__1_0_26",
url = "https://crates.io/api/v1/crates/proc-macro2/1.0.26/download",
type = "tar.gz",
sha256 = "a152013215dca273577e18d2bf00fa862b89b24169fb78c4c95aeb07992c9cec",
strip_prefix = "proc-macro2-1.0.26",
build_file = Label("//bazel/cargo/remote:BUILD.proc-macro2-1.0.26.bazel"),
)
maybe(
http_archive,
name = "raze__pulldown_cmark__0_8_0",
url = "https://crates.io/api/v1/crates/pulldown-cmark/0.8.0/download",
type = "tar.gz",
sha256 = "ffade02495f22453cd593159ea2f59827aae7f53fa8323f756799b670881dcf8",
strip_prefix = "pulldown-cmark-0.8.0",
build_file = Label("//bazel/cargo/remote:BUILD.pulldown-cmark-0.8.0.bazel"),
)
maybe(
http_archive,
name = "raze__quote__1_0_9",
url = "https://crates.io/api/v1/crates/quote/1.0.9/download",
type = "tar.gz",
sha256 = "c3d0b9745dc2debf507c8422de05d7226cc1f0644216dfdfead988f9b1ab32a7",
strip_prefix = "quote-1.0.9",
build_file = Label("//bazel/cargo/remote:BUILD.quote-1.0.9.bazel"),
)
maybe(
http_archive,
name = "raze__regex__1_4_5",
url = "https://crates.io/api/v1/crates/regex/1.4.5/download",
type = "tar.gz",
sha256 = "957056ecddbeba1b26965114e191d2e8589ce74db242b6ea25fc4062427a5c19",
strip_prefix = "regex-1.4.5",
build_file = Label("//bazel/cargo/remote:BUILD.regex-1.4.5.bazel"),
)
maybe(
http_archive,
name = "raze__regex_automata__0_1_9",
url = "https://crates.io/api/v1/crates/regex-automata/0.1.9/download",
type = "tar.gz",
sha256 = "ae1ded71d66a4a97f5e961fd0cb25a5f366a42a41570d16a763a69c092c26ae4",
strip_prefix = "regex-automata-0.1.9",
build_file = Label("//bazel/cargo/remote:BUILD.regex-automata-0.1.9.bazel"),
)
maybe(
http_archive,
name = "raze__regex_syntax__0_6_23",
url = "https://crates.io/api/v1/crates/regex-syntax/0.6.23/download",
type = "tar.gz",
sha256 = "24d5f089152e60f62d28b835fbff2cd2e8dc0baf1ac13343bef92ab7eed84548",
strip_prefix = "regex-syntax-0.6.23",
build_file = Label("//bazel/cargo/remote:BUILD.regex-syntax-0.6.23.bazel"),
)
maybe(
http_archive,
name = "raze__semver_parser__0_9_0",
url = "https://crates.io/api/v1/crates/semver-parser/0.9.0/download",
type = "tar.gz",
sha256 = "b46e1121e8180c12ff69a742aabc4f310542b6ccb69f1691689ac17fdf8618aa",
strip_prefix = "semver-parser-0.9.0",
build_file = Label("//bazel/cargo/remote:BUILD.semver-parser-0.9.0.bazel"),
)
maybe(
http_archive,
name = "raze__serde__1_0_125",
url = "https://crates.io/api/v1/crates/serde/1.0.125/download",
type = "tar.gz",
sha256 = "558dc50e1a5a5fa7112ca2ce4effcb321b0300c0d4ccf0776a9f60cd89031171",
strip_prefix = "serde-1.0.125",
build_file = Label("//bazel/cargo/remote:BUILD.serde-1.0.125.bazel"),
)
maybe(
http_archive,
name = "raze__syn__1_0_69",
url = "https://crates.io/api/v1/crates/syn/1.0.69/download",
type = "tar.gz",
sha256 = "48fe99c6bd8b1cc636890bcc071842de909d902c81ac7dab53ba33c421ab8ffb",
strip_prefix = "syn-1.0.69",
build_file = Label("//bazel/cargo/remote:BUILD.syn-1.0.69.bazel"),
)
maybe(
http_archive,
name = "raze__time__0_1_43",
url = "https://crates.io/api/v1/crates/time/0.1.43/download",
type = "tar.gz",
sha256 = "ca8a50ef2360fbd1eeb0ecd46795a87a19024eb4b53c5dc916ca1fd95fe62438",
strip_prefix = "time-0.1.43",
build_file = Label("//bazel/cargo/remote:BUILD.time-0.1.43.bazel"),
)
maybe(
http_archive,
name = "raze__tinyvec__1_2_0",
url = "https://crates.io/api/v1/crates/tinyvec/1.2.0/download",
type = "tar.gz",
sha256 = "5b5220f05bb7de7f3f53c7c065e1199b3172696fe2db9f9c4d8ad9b4ee74c342",
strip_prefix = "tinyvec-1.2.0",
build_file = Label("//bazel/cargo/remote:BUILD.tinyvec-1.2.0.bazel"),
)
maybe(
http_archive,
name = "raze__tinyvec_macros__0_1_0",
url = "https://crates.io/api/v1/crates/tinyvec_macros/0.1.0/download",
type = "tar.gz",
sha256 = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c",
strip_prefix = "tinyvec_macros-0.1.0",
build_file = Label("//bazel/cargo/remote:BUILD.tinyvec_macros-0.1.0.bazel"),
)
maybe(
http_archive,
name = "raze__toml__0_5_8",
url = "https://crates.io/api/v1/crates/toml/0.5.8/download",
type = "tar.gz",
sha256 = "a31142970826733df8241ef35dc040ef98c679ab14d7c3e54d827099b3acecaa",
strip_prefix = "toml-0.5.8",
build_file = Label("//bazel/cargo/remote:BUILD.toml-0.5.8.bazel"),
)
maybe(
http_archive,
name = "raze__unicase__2_6_0",
url = "https://crates.io/api/v1/crates/unicase/2.6.0/download",
type = "tar.gz",
sha256 = "50f37be617794602aabbeee0be4f259dc1778fabe05e2d67ee8f79326d5cb4f6",
strip_prefix = "unicase-2.6.0",
build_file = Label("//bazel/cargo/remote:BUILD.unicase-2.6.0.bazel"),
)
maybe(
http_archive,
name = "raze__unicode_bidi__0_3_5",
url = "https://crates.io/api/v1/crates/unicode-bidi/0.3.5/download",
type = "tar.gz",
sha256 = "eeb8be209bb1c96b7c177c7420d26e04eccacb0eeae6b980e35fcb74678107e0",
strip_prefix = "unicode-bidi-0.3.5",
build_file = Label("//bazel/cargo/remote:BUILD.unicode-bidi-0.3.5.bazel"),
)
maybe(
http_archive,
name = "raze__unicode_normalization__0_1_17",
url = "https://crates.io/api/v1/crates/unicode-normalization/0.1.17/download",
type = "tar.gz",
sha256 = "07fbfce1c8a97d547e8b5334978438d9d6ec8c20e38f56d4a4374d181493eaef",
strip_prefix = "unicode-normalization-0.1.17",
build_file = Label("//bazel/cargo/remote:BUILD.unicode-normalization-0.1.17.bazel"),
)
maybe(
http_archive,
name = "raze__unicode_xid__0_2_1",
url = "https://crates.io/api/v1/crates/unicode-xid/0.2.1/download",
type = "tar.gz",
sha256 = "f7fe0bb3479651439c9112f72b6c505038574c9fbb575ed1bf3b797fa39dd564",
strip_prefix = "unicode-xid-0.2.1",
build_file = Label("//bazel/cargo/remote:BUILD.unicode-xid-0.2.1.bazel"),
)
maybe(
http_archive,
name = "raze__url__2_2_1",
url = "https://crates.io/api/v1/crates/url/2.2.1/download",
type = "tar.gz",
sha256 = "9ccd964113622c8e9322cfac19eb1004a07e636c545f325da085d5cdde6f1f8b",
strip_prefix = "url-2.2.1",
build_file = Label("//bazel/cargo/remote:BUILD.url-2.2.1.bazel"),
)
maybe(
http_archive,
name = "raze__version_sync__0_9_2",
url = "https://crates.io/api/v1/crates/version-sync/0.9.2/download",
type = "tar.gz",
sha256 = "7cb94ca10ca0cf44f5d926ac977f0cac2d13e9789aa4bbe9d9388de445e61028",
strip_prefix = "version-sync-0.9.2",
build_file = Label("//bazel/cargo/remote:BUILD.version-sync-0.9.2.bazel"),
)
maybe(
http_archive,
name = "raze__version_check__0_9_3",
url = "https://crates.io/api/v1/crates/version_check/0.9.3/download",
type = "tar.gz",
sha256 = "5fecdca9a5291cc2b8dcf7dc02453fee791a280f3743cb0905f8822ae463b3fe",
strip_prefix = "version_check-0.9.3",
build_file = Label("//bazel/cargo/remote:BUILD.version_check-0.9.3.bazel"),
)
maybe(
http_archive,
name = "raze__wasi__0_10_2_wasi_snapshot_preview1",
url = "https://crates.io/api/v1/crates/wasi/0.10.2+wasi-snapshot-preview1/download",
type = "tar.gz",
sha256 = "fd6fbd9a79829dd1ad0cc20627bf1ed606756a7f77edff7b66b7064f9cb327c6",
strip_prefix = "wasi-0.10.2+wasi-snapshot-preview1",
build_file = Label("//bazel/cargo/remote:BUILD.wasi-0.10.2+wasi-snapshot-preview1.bazel"),
)
maybe(
http_archive,
name = "raze__winapi__0_3_9",
url = "https://crates.io/api/v1/crates/winapi/0.3.9/download",
type = "tar.gz",
sha256 = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419",
strip_prefix = "winapi-0.3.9",
build_file = Label("//bazel/cargo/remote:BUILD.winapi-0.3.9.bazel"),
)
maybe(
http_archive,
name = "raze__winapi_i686_pc_windows_gnu__0_4_0",
url = "https://crates.io/api/v1/crates/winapi-i686-pc-windows-gnu/0.4.0/download",
type = "tar.gz",
sha256 = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6",
strip_prefix = "winapi-i686-pc-windows-gnu-0.4.0",
build_file = Label("//bazel/cargo/remote:BUILD.winapi-i686-pc-windows-gnu-0.4.0.bazel"),
)
maybe(
http_archive,
name = "raze__winapi_x86_64_pc_windows_gnu__0_4_0",
url = "https://crates.io/api/v1/crates/winapi-x86_64-pc-windows-gnu/0.4.0/download",
type = "tar.gz",
sha256 = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f",
strip_prefix = "winapi-x86_64-pc-windows-gnu-0.4.0",
build_file = Label("//bazel/cargo/remote:BUILD.winapi-x86_64-pc-windows-gnu-0.4.0.bazel"),
) | bazel/cargo/crates.bzl | load("@bazel_tools//tools/build_defs/repo:git.bzl", "new_git_repository") # buildifier: disable=load
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive") # buildifier: disable=load
load("@bazel_tools//tools/build_defs/repo:utils.bzl", "maybe") # buildifier: disable=load
def raze_fetch_remote_crates():
"""This function defines a collection of repos and should be called in a WORKSPACE file"""
maybe(
http_archive,
name = "raze__ahash__0_7_2",
url = "https://crates.io/api/v1/crates/ahash/0.7.2/download",
type = "tar.gz",
sha256 = "7f200cbb1e856866d9eade941cf3aa0c5d7dd36f74311c4273b494f4ef036957",
strip_prefix = "ahash-0.7.2",
build_file = Label("//bazel/cargo/remote:BUILD.ahash-0.7.2.bazel"),
)
maybe(
http_archive,
name = "raze__autocfg__1_0_1",
url = "https://crates.io/api/v1/crates/autocfg/1.0.1/download",
type = "tar.gz",
sha256 = "cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a",
strip_prefix = "autocfg-1.0.1",
build_file = Label("//bazel/cargo/remote:BUILD.autocfg-1.0.1.bazel"),
)
maybe(
http_archive,
name = "raze__bitflags__1_2_1",
url = "https://crates.io/api/v1/crates/bitflags/1.2.1/download",
type = "tar.gz",
sha256 = "cf1de2fe8c75bc145a2f577add951f8134889b4795d47466a54a5c846d691693",
strip_prefix = "bitflags-1.2.1",
build_file = Label("//bazel/cargo/remote:BUILD.bitflags-1.2.1.bazel"),
)
maybe(
http_archive,
name = "raze__bstr__0_2_15",
url = "https://crates.io/api/v1/crates/bstr/0.2.15/download",
type = "tar.gz",
sha256 = "a40b47ad93e1a5404e6c18dec46b628214fee441c70f4ab5d6942142cc268a3d",
strip_prefix = "bstr-0.2.15",
build_file = Label("//bazel/cargo/remote:BUILD.bstr-0.2.15.bazel"),
)
maybe(
http_archive,
name = "raze__byteorder__1_4_3",
url = "https://crates.io/api/v1/crates/byteorder/1.4.3/download",
type = "tar.gz",
sha256 = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610",
strip_prefix = "byteorder-1.4.3",
build_file = Label("//bazel/cargo/remote:BUILD.byteorder-1.4.3.bazel"),
)
maybe(
http_archive,
name = "raze__cfg_if__1_0_0",
url = "https://crates.io/api/v1/crates/cfg-if/1.0.0/download",
type = "tar.gz",
sha256 = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd",
strip_prefix = "cfg-if-1.0.0",
build_file = Label("//bazel/cargo/remote:BUILD.cfg-if-1.0.0.bazel"),
)
maybe(
http_archive,
name = "raze__chrono__0_4_19",
url = "https://crates.io/api/v1/crates/chrono/0.4.19/download",
type = "tar.gz",
sha256 = "670ad68c9088c2a963aaa298cb369688cf3f9465ce5e2d4ca10e6e0098a1ce73",
strip_prefix = "chrono-0.4.19",
build_file = Label("//bazel/cargo/remote:BUILD.chrono-0.4.19.bazel"),
)
maybe(
http_archive,
name = "raze__form_urlencoded__1_0_1",
url = "https://crates.io/api/v1/crates/form_urlencoded/1.0.1/download",
type = "tar.gz",
sha256 = "5fc25a87fa4fd2094bffb06925852034d90a17f0d1e05197d4956d3555752191",
strip_prefix = "form_urlencoded-1.0.1",
build_file = Label("//bazel/cargo/remote:BUILD.form_urlencoded-1.0.1.bazel"),
)
maybe(
http_archive,
name = "raze__getrandom__0_2_2",
url = "https://crates.io/api/v1/crates/getrandom/0.2.2/download",
type = "tar.gz",
sha256 = "c9495705279e7140bf035dde1f6e750c162df8b625267cd52cc44e0b156732c8",
strip_prefix = "getrandom-0.2.2",
build_file = Label("//bazel/cargo/remote:BUILD.getrandom-0.2.2.bazel"),
)
maybe(
http_archive,
name = "raze__hashbrown__0_11_2",
url = "https://crates.io/api/v1/crates/hashbrown/0.11.2/download",
type = "tar.gz",
sha256 = "ab5ef0d4909ef3724cc8cce6ccc8572c5c817592e9285f5464f8e86f8bd3726e",
strip_prefix = "hashbrown-0.11.2",
build_file = Label("//bazel/cargo/remote:BUILD.hashbrown-0.11.2.bazel"),
)
maybe(
http_archive,
name = "raze__idna__0_2_3",
url = "https://crates.io/api/v1/crates/idna/0.2.3/download",
type = "tar.gz",
sha256 = "418a0a6fab821475f634efe3ccc45c013f742efe03d853e8d3355d5cb850ecf8",
strip_prefix = "idna-0.2.3",
build_file = Label("//bazel/cargo/remote:BUILD.idna-0.2.3.bazel"),
)
maybe(
http_archive,
name = "raze__lazy_static__1_4_0",
url = "https://crates.io/api/v1/crates/lazy_static/1.4.0/download",
type = "tar.gz",
sha256 = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646",
strip_prefix = "lazy_static-1.4.0",
build_file = Label("//bazel/cargo/remote:BUILD.lazy_static-1.4.0.bazel"),
)
maybe(
http_archive,
name = "raze__libc__0_2_93",
url = "https://crates.io/api/v1/crates/libc/0.2.93/download",
type = "tar.gz",
sha256 = "9385f66bf6105b241aa65a61cb923ef20efc665cb9f9bb50ac2f0c4b7f378d41",
strip_prefix = "libc-0.2.93",
build_file = Label("//bazel/cargo/remote:BUILD.libc-0.2.93.bazel"),
)
maybe(
http_archive,
name = "raze__log__0_4_14",
url = "https://crates.io/api/v1/crates/log/0.4.14/download",
type = "tar.gz",
sha256 = "51b9bbe6c47d51fc3e1a9b945965946b4c44142ab8792c50835a980d362c2710",
strip_prefix = "log-0.4.14",
build_file = Label("//bazel/cargo/remote:BUILD.log-0.4.14.bazel"),
)
maybe(
http_archive,
name = "raze__matches__0_1_8",
url = "https://crates.io/api/v1/crates/matches/0.1.8/download",
type = "tar.gz",
sha256 = "7ffc5c5338469d4d3ea17d269fa8ea3512ad247247c30bd2df69e68309ed0a08",
strip_prefix = "matches-0.1.8",
build_file = Label("//bazel/cargo/remote:BUILD.matches-0.1.8.bazel"),
)
maybe(
http_archive,
name = "raze__memchr__2_3_4",
url = "https://crates.io/api/v1/crates/memchr/2.3.4/download",
type = "tar.gz",
sha256 = "0ee1c47aaa256ecabcaea351eae4a9b01ef39ed810004e298d2511ed284b1525",
strip_prefix = "memchr-2.3.4",
build_file = Label("//bazel/cargo/remote:BUILD.memchr-2.3.4.bazel"),
)
maybe(
http_archive,
name = "raze__num_integer__0_1_44",
url = "https://crates.io/api/v1/crates/num-integer/0.1.44/download",
type = "tar.gz",
sha256 = "d2cc698a63b549a70bc047073d2949cce27cd1c7b0a4a862d08a8031bc2801db",
strip_prefix = "num-integer-0.1.44",
build_file = Label("//bazel/cargo/remote:BUILD.num-integer-0.1.44.bazel"),
)
maybe(
http_archive,
name = "raze__num_traits__0_2_14",
url = "https://crates.io/api/v1/crates/num-traits/0.2.14/download",
type = "tar.gz",
sha256 = "9a64b1ec5cda2586e284722486d802acf1f7dbdc623e2bfc57e65ca1cd099290",
strip_prefix = "num-traits-0.2.14",
build_file = Label("//bazel/cargo/remote:BUILD.num-traits-0.2.14.bazel"),
)
maybe(
http_archive,
name = "raze__once_cell__1_7_2",
url = "https://crates.io/api/v1/crates/once_cell/1.7.2/download",
type = "tar.gz",
sha256 = "af8b08b04175473088b46763e51ee54da5f9a164bc162f615b91bc179dbf15a3",
strip_prefix = "once_cell-1.7.2",
build_file = Label("//bazel/cargo/remote:BUILD.once_cell-1.7.2.bazel"),
)
maybe(
http_archive,
name = "raze__percent_encoding__2_1_0",
url = "https://crates.io/api/v1/crates/percent-encoding/2.1.0/download",
type = "tar.gz",
sha256 = "d4fd5641d01c8f18a23da7b6fe29298ff4b55afcccdf78973b24cf3175fee32e",
strip_prefix = "percent-encoding-2.1.0",
build_file = Label("//bazel/cargo/remote:BUILD.percent-encoding-2.1.0.bazel"),
)
maybe(
http_archive,
name = "raze__proc_macro2__1_0_26",
url = "https://crates.io/api/v1/crates/proc-macro2/1.0.26/download",
type = "tar.gz",
sha256 = "a152013215dca273577e18d2bf00fa862b89b24169fb78c4c95aeb07992c9cec",
strip_prefix = "proc-macro2-1.0.26",
build_file = Label("//bazel/cargo/remote:BUILD.proc-macro2-1.0.26.bazel"),
)
maybe(
http_archive,
name = "raze__pulldown_cmark__0_8_0",
url = "https://crates.io/api/v1/crates/pulldown-cmark/0.8.0/download",
type = "tar.gz",
sha256 = "ffade02495f22453cd593159ea2f59827aae7f53fa8323f756799b670881dcf8",
strip_prefix = "pulldown-cmark-0.8.0",
build_file = Label("//bazel/cargo/remote:BUILD.pulldown-cmark-0.8.0.bazel"),
)
maybe(
http_archive,
name = "raze__quote__1_0_9",
url = "https://crates.io/api/v1/crates/quote/1.0.9/download",
type = "tar.gz",
sha256 = "c3d0b9745dc2debf507c8422de05d7226cc1f0644216dfdfead988f9b1ab32a7",
strip_prefix = "quote-1.0.9",
build_file = Label("//bazel/cargo/remote:BUILD.quote-1.0.9.bazel"),
)
maybe(
http_archive,
name = "raze__regex__1_4_5",
url = "https://crates.io/api/v1/crates/regex/1.4.5/download",
type = "tar.gz",
sha256 = "957056ecddbeba1b26965114e191d2e8589ce74db242b6ea25fc4062427a5c19",
strip_prefix = "regex-1.4.5",
build_file = Label("//bazel/cargo/remote:BUILD.regex-1.4.5.bazel"),
)
maybe(
http_archive,
name = "raze__regex_automata__0_1_9",
url = "https://crates.io/api/v1/crates/regex-automata/0.1.9/download",
type = "tar.gz",
sha256 = "ae1ded71d66a4a97f5e961fd0cb25a5f366a42a41570d16a763a69c092c26ae4",
strip_prefix = "regex-automata-0.1.9",
build_file = Label("//bazel/cargo/remote:BUILD.regex-automata-0.1.9.bazel"),
)
maybe(
http_archive,
name = "raze__regex_syntax__0_6_23",
url = "https://crates.io/api/v1/crates/regex-syntax/0.6.23/download",
type = "tar.gz",
sha256 = "24d5f089152e60f62d28b835fbff2cd2e8dc0baf1ac13343bef92ab7eed84548",
strip_prefix = "regex-syntax-0.6.23",
build_file = Label("//bazel/cargo/remote:BUILD.regex-syntax-0.6.23.bazel"),
)
maybe(
http_archive,
name = "raze__semver_parser__0_9_0",
url = "https://crates.io/api/v1/crates/semver-parser/0.9.0/download",
type = "tar.gz",
sha256 = "b46e1121e8180c12ff69a742aabc4f310542b6ccb69f1691689ac17fdf8618aa",
strip_prefix = "semver-parser-0.9.0",
build_file = Label("//bazel/cargo/remote:BUILD.semver-parser-0.9.0.bazel"),
)
maybe(
http_archive,
name = "raze__serde__1_0_125",
url = "https://crates.io/api/v1/crates/serde/1.0.125/download",
type = "tar.gz",
sha256 = "558dc50e1a5a5fa7112ca2ce4effcb321b0300c0d4ccf0776a9f60cd89031171",
strip_prefix = "serde-1.0.125",
build_file = Label("//bazel/cargo/remote:BUILD.serde-1.0.125.bazel"),
)
maybe(
http_archive,
name = "raze__syn__1_0_69",
url = "https://crates.io/api/v1/crates/syn/1.0.69/download",
type = "tar.gz",
sha256 = "48fe99c6bd8b1cc636890bcc071842de909d902c81ac7dab53ba33c421ab8ffb",
strip_prefix = "syn-1.0.69",
build_file = Label("//bazel/cargo/remote:BUILD.syn-1.0.69.bazel"),
)
maybe(
http_archive,
name = "raze__time__0_1_43",
url = "https://crates.io/api/v1/crates/time/0.1.43/download",
type = "tar.gz",
sha256 = "ca8a50ef2360fbd1eeb0ecd46795a87a19024eb4b53c5dc916ca1fd95fe62438",
strip_prefix = "time-0.1.43",
build_file = Label("//bazel/cargo/remote:BUILD.time-0.1.43.bazel"),
)
maybe(
http_archive,
name = "raze__tinyvec__1_2_0",
url = "https://crates.io/api/v1/crates/tinyvec/1.2.0/download",
type = "tar.gz",
sha256 = "5b5220f05bb7de7f3f53c7c065e1199b3172696fe2db9f9c4d8ad9b4ee74c342",
strip_prefix = "tinyvec-1.2.0",
build_file = Label("//bazel/cargo/remote:BUILD.tinyvec-1.2.0.bazel"),
)
maybe(
http_archive,
name = "raze__tinyvec_macros__0_1_0",
url = "https://crates.io/api/v1/crates/tinyvec_macros/0.1.0/download",
type = "tar.gz",
sha256 = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c",
strip_prefix = "tinyvec_macros-0.1.0",
build_file = Label("//bazel/cargo/remote:BUILD.tinyvec_macros-0.1.0.bazel"),
)
maybe(
http_archive,
name = "raze__toml__0_5_8",
url = "https://crates.io/api/v1/crates/toml/0.5.8/download",
type = "tar.gz",
sha256 = "a31142970826733df8241ef35dc040ef98c679ab14d7c3e54d827099b3acecaa",
strip_prefix = "toml-0.5.8",
build_file = Label("//bazel/cargo/remote:BUILD.toml-0.5.8.bazel"),
)
maybe(
http_archive,
name = "raze__unicase__2_6_0",
url = "https://crates.io/api/v1/crates/unicase/2.6.0/download",
type = "tar.gz",
sha256 = "50f37be617794602aabbeee0be4f259dc1778fabe05e2d67ee8f79326d5cb4f6",
strip_prefix = "unicase-2.6.0",
build_file = Label("//bazel/cargo/remote:BUILD.unicase-2.6.0.bazel"),
)
maybe(
http_archive,
name = "raze__unicode_bidi__0_3_5",
url = "https://crates.io/api/v1/crates/unicode-bidi/0.3.5/download",
type = "tar.gz",
sha256 = "eeb8be209bb1c96b7c177c7420d26e04eccacb0eeae6b980e35fcb74678107e0",
strip_prefix = "unicode-bidi-0.3.5",
build_file = Label("//bazel/cargo/remote:BUILD.unicode-bidi-0.3.5.bazel"),
)
maybe(
http_archive,
name = "raze__unicode_normalization__0_1_17",
url = "https://crates.io/api/v1/crates/unicode-normalization/0.1.17/download",
type = "tar.gz",
sha256 = "07fbfce1c8a97d547e8b5334978438d9d6ec8c20e38f56d4a4374d181493eaef",
strip_prefix = "unicode-normalization-0.1.17",
build_file = Label("//bazel/cargo/remote:BUILD.unicode-normalization-0.1.17.bazel"),
)
maybe(
http_archive,
name = "raze__unicode_xid__0_2_1",
url = "https://crates.io/api/v1/crates/unicode-xid/0.2.1/download",
type = "tar.gz",
sha256 = "f7fe0bb3479651439c9112f72b6c505038574c9fbb575ed1bf3b797fa39dd564",
strip_prefix = "unicode-xid-0.2.1",
build_file = Label("//bazel/cargo/remote:BUILD.unicode-xid-0.2.1.bazel"),
)
maybe(
http_archive,
name = "raze__url__2_2_1",
url = "https://crates.io/api/v1/crates/url/2.2.1/download",
type = "tar.gz",
sha256 = "9ccd964113622c8e9322cfac19eb1004a07e636c545f325da085d5cdde6f1f8b",
strip_prefix = "url-2.2.1",
build_file = Label("//bazel/cargo/remote:BUILD.url-2.2.1.bazel"),
)
maybe(
http_archive,
name = "raze__version_sync__0_9_2",
url = "https://crates.io/api/v1/crates/version-sync/0.9.2/download",
type = "tar.gz",
sha256 = "7cb94ca10ca0cf44f5d926ac977f0cac2d13e9789aa4bbe9d9388de445e61028",
strip_prefix = "version-sync-0.9.2",
build_file = Label("//bazel/cargo/remote:BUILD.version-sync-0.9.2.bazel"),
)
maybe(
http_archive,
name = "raze__version_check__0_9_3",
url = "https://crates.io/api/v1/crates/version_check/0.9.3/download",
type = "tar.gz",
sha256 = "5fecdca9a5291cc2b8dcf7dc02453fee791a280f3743cb0905f8822ae463b3fe",
strip_prefix = "version_check-0.9.3",
build_file = Label("//bazel/cargo/remote:BUILD.version_check-0.9.3.bazel"),
)
maybe(
http_archive,
name = "raze__wasi__0_10_2_wasi_snapshot_preview1",
url = "https://crates.io/api/v1/crates/wasi/0.10.2+wasi-snapshot-preview1/download",
type = "tar.gz",
sha256 = "fd6fbd9a79829dd1ad0cc20627bf1ed606756a7f77edff7b66b7064f9cb327c6",
strip_prefix = "wasi-0.10.2+wasi-snapshot-preview1",
build_file = Label("//bazel/cargo/remote:BUILD.wasi-0.10.2+wasi-snapshot-preview1.bazel"),
)
maybe(
http_archive,
name = "raze__winapi__0_3_9",
url = "https://crates.io/api/v1/crates/winapi/0.3.9/download",
type = "tar.gz",
sha256 = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419",
strip_prefix = "winapi-0.3.9",
build_file = Label("//bazel/cargo/remote:BUILD.winapi-0.3.9.bazel"),
)
maybe(
http_archive,
name = "raze__winapi_i686_pc_windows_gnu__0_4_0",
url = "https://crates.io/api/v1/crates/winapi-i686-pc-windows-gnu/0.4.0/download",
type = "tar.gz",
sha256 = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6",
strip_prefix = "winapi-i686-pc-windows-gnu-0.4.0",
build_file = Label("//bazel/cargo/remote:BUILD.winapi-i686-pc-windows-gnu-0.4.0.bazel"),
)
maybe(
http_archive,
name = "raze__winapi_x86_64_pc_windows_gnu__0_4_0",
url = "https://crates.io/api/v1/crates/winapi-x86_64-pc-windows-gnu/0.4.0/download",
type = "tar.gz",
sha256 = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f",
strip_prefix = "winapi-x86_64-pc-windows-gnu-0.4.0",
build_file = Label("//bazel/cargo/remote:BUILD.winapi-x86_64-pc-windows-gnu-0.4.0.bazel"),
) | 0.314156 | 0.142649 |
"""Experiment utils."""
import os
import tensorflow as tf
from cen import losses
from cen import metrics
from cen import models
from cen import networks
class ModeKeys(object):
TRAIN = "train"
EVAL = "eval"
INFER = "infer"
def get_input_dtypes(data):
"""Returns input shapes."""
return {k: str(v.dtype) for k, v in data[0].items()}
def get_input_shapes(data):
"""Returns input shapes."""
return {k: v.shape[1:] for k, v in data[0].items()}
def get_output_shape(data):
"""Returns output shapes."""
return data[1].shape[1:]
def build(
cfg,
input_dtypes,
input_shapes,
output_shape,
mode=ModeKeys.TRAIN,
working_dir=None,
):
"""Builds model and callbacks for training or evaluation."""
tf.keras.backend.clear_session()
if working_dir is None:
working_dir = os.getcwd()
# Build model.
net = networks.get(**cfg.network)
model, info = models.get(
cfg.model.name,
encoder=net,
input_dtypes=input_dtypes,
input_shapes=input_shapes,
output_shape=output_shape,
**cfg.model.kwargs,
)
# Build loss and optimizer.
loss = losses.get(**cfg.train.loss)
opt = tf.keras.optimizers.get(dict(**cfg.optimizer))
# Build metrics.
metrics_list = None
if cfg.eval.metrics:
metrics_list = [metrics.get(**v) for _, v in cfg.eval.metrics.items()]
# Compile model for training.
if mode == ModeKeys.TRAIN:
model.compile(optimizer=opt, loss=loss, metrics=metrics_list)
callbacks = []
if cfg.train.checkpoint_kwargs:
callbacks.append(
tf.keras.callbacks.ModelCheckpoint(
filepath=os.path.join(working_dir, "checkpoint"),
**cfg.train.checkpoint_kwargs,
)
)
if cfg.train.tensorboard:
callbacks.append(
tf.keras.callbacks.TensorBoard(
log_dir=os.path.join(working_dir, "tensorboard"),
**cfg.train.tensorboard,
)
)
info["callbacks"] = callbacks
return model, info
# Compile model for evaluation or inference.
else:
model.compile(loss=loss, optimizer=opt, metrics=metrics_list)
checkpoint_path = os.path.join(working_dir, "checkpoint")
model.load_weights(checkpoint_path).expect_partial()
return model, info | cen/experiment/utils.py | """Experiment utils."""
import os
import tensorflow as tf
from cen import losses
from cen import metrics
from cen import models
from cen import networks
class ModeKeys(object):
TRAIN = "train"
EVAL = "eval"
INFER = "infer"
def get_input_dtypes(data):
"""Returns input shapes."""
return {k: str(v.dtype) for k, v in data[0].items()}
def get_input_shapes(data):
"""Returns input shapes."""
return {k: v.shape[1:] for k, v in data[0].items()}
def get_output_shape(data):
"""Returns output shapes."""
return data[1].shape[1:]
def build(
cfg,
input_dtypes,
input_shapes,
output_shape,
mode=ModeKeys.TRAIN,
working_dir=None,
):
"""Builds model and callbacks for training or evaluation."""
tf.keras.backend.clear_session()
if working_dir is None:
working_dir = os.getcwd()
# Build model.
net = networks.get(**cfg.network)
model, info = models.get(
cfg.model.name,
encoder=net,
input_dtypes=input_dtypes,
input_shapes=input_shapes,
output_shape=output_shape,
**cfg.model.kwargs,
)
# Build loss and optimizer.
loss = losses.get(**cfg.train.loss)
opt = tf.keras.optimizers.get(dict(**cfg.optimizer))
# Build metrics.
metrics_list = None
if cfg.eval.metrics:
metrics_list = [metrics.get(**v) for _, v in cfg.eval.metrics.items()]
# Compile model for training.
if mode == ModeKeys.TRAIN:
model.compile(optimizer=opt, loss=loss, metrics=metrics_list)
callbacks = []
if cfg.train.checkpoint_kwargs:
callbacks.append(
tf.keras.callbacks.ModelCheckpoint(
filepath=os.path.join(working_dir, "checkpoint"),
**cfg.train.checkpoint_kwargs,
)
)
if cfg.train.tensorboard:
callbacks.append(
tf.keras.callbacks.TensorBoard(
log_dir=os.path.join(working_dir, "tensorboard"),
**cfg.train.tensorboard,
)
)
info["callbacks"] = callbacks
return model, info
# Compile model for evaluation or inference.
else:
model.compile(loss=loss, optimizer=opt, metrics=metrics_list)
checkpoint_path = os.path.join(working_dir, "checkpoint")
model.load_weights(checkpoint_path).expect_partial()
return model, info | 0.83545 | 0.22396 |
from __future__ import absolute_import, unicode_literals
import abc
import logging
from collections import OrderedDict, defaultdict
from copy import deepcopy
from multiprocessing import cpu_count
import numpy as np
import six
from keras.preprocessing.sequence import pad_sequences as keras_pad_sequences
from keras.utils.generic_utils import Progbar
try:
import spacy
except ImportError:
pass
logger = logging.getLogger(__name__)
class _CountTracker(object):
"""Helper class to track counts of various document hierarchies in the corpus.
For example, if the tokenizer can tokenize docs as (docs, paragraph, sentences, words), then this utility
will track number of paragraphs, number of sentences within paragraphs and number of words within sentence.
"""
def __init__(self):
self._prev_indices = None
self._local_counts = None
self.counts = None
def update(self, indices):
"""Updates counts based on indices. The algorithm tracks the index change at i and
update global counts for all indices beyond i with local counts tracked so far.
"""
# Initialize various lists for the first time based on length of indices.
if self._prev_indices is None:
self._prev_indices = indices
# +1 to track token counts in the last index.
self._local_counts = np.full(len(indices) + 1, 1)
self._local_counts[-1] = 0
self.counts = [[] for _ in range(len(self._local_counts))]
has_reset = False
for i in range(len(indices)):
# index value changed. Push all local values beyond i to count and reset those local_counts.
# For example, if document index changed, push counts on sentences and tokens and reset their local_counts
# to indicate that we are tracking those for new document. We need to do this at all document hierarchies.
if indices[i] > self._prev_indices[i]:
self._local_counts[i] += 1
has_reset = True
for j in range(i + 1, len(self.counts)):
self.counts[j].append(self._local_counts[j])
self._local_counts[j] = 1
# If none of the aux indices changed, update token count.
if not has_reset:
self._local_counts[-1] += 1
self._prev_indices = indices[:]
def finalize(self):
"""This will add the very last document to counts. We also get rid of counts[0] since that
represents document level which doesnt come under anything else. We also convert all count
values to numpy arrays so that stats can be computed easily.
"""
for i in range(1, len(self._local_counts)):
self.counts[i].append(self._local_counts[i])
self.counts.pop(0)
for i in range(len(self.counts)):
self.counts[i] = np.array(self.counts[i])
def _apply_generator(texts, apply_fn):
for text in texts:
yield apply_fn(text)
def _append(lst, indices, value):
"""Adds `value` to `lst` list indexed by `indices`. Will create sub lists as required.
"""
for i, idx in enumerate(indices):
# We need to loop because sometimes indices can increment by more than 1 due to missing tokens.
# Example: Sentence with no words after filtering words.
while len(lst) <= idx:
# Update max counts whenever a new sublist is created.
# There is no need to worry about indices beyond `i` since they will end up creating new lists as well.
lst.append([])
lst = lst[idx]
# Add token and update token max count.
lst.append(value)
def _recursive_apply(lst, apply_fn):
if len(lst) > 0 and not isinstance(lst[0], list):
for i in range(len(lst)):
lst[i] = apply_fn(lst[i])
else:
for sub_list in lst:
_recursive_apply(sub_list, apply_fn)
def _to_unicode(text):
if not isinstance(text, six.text_type):
text = text.decode('utf-8')
return text
def _parse_spacy_kwargs(**kwargs):
"""Supported args include:
Args:
n_threads/num_threads: Number of threads to use. Uses num_cpus - 1 by default.
batch_size: The number of texts to accumulate into a common working set before processing.
(Default value: 1000)
"""
n_threads = kwargs.get('n_threads') or kwargs.get('num_threads')
batch_size = kwargs.get('batch_size')
if n_threads is None or n_threads is -1:
n_threads = cpu_count() - 1
if batch_size is None or batch_size is -1:
batch_size = 1000
return n_threads, batch_size
def _pad_token_sequences(sequences, max_tokens,
padding, truncating, value):
# TODO: better variable names (see below)
return keras_pad_sequences(sequences, maxlen=max_tokens, padding=padding, truncating=truncating, value=value)
def _pad_sent_sequences(sequences, max_sentences, max_tokens, padding, truncating, value):
# TODO: better names (see below)
# Infer max lengths if needed.
if max_sentences is None or max_tokens is None:
max_sentences_computed = 0
max_tokens_computed = 0
for sent_seq in sequences:
max_sentences_computed = max(max_sentences_computed, len(sent_seq))
max_tokens_computed = max(max_tokens_computed, np.max(
[len(token_seq) for token_seq in sent_seq]))
# Only use inferred values for None.
if max_sentences is None:
max_sentences = max_sentences_computed
if max_tokens is None:
max_tokens = max_tokens_computed
result = np.ones(shape=(len(sequences), max_sentences, max_tokens)) * value
for idx, sent_seq in enumerate(sequences):
# empty list/array was found
if not len(sent_seq):
continue
if truncating == 'pre':
trunc = sent_seq[-max_sentences:]
elif truncating == 'post':
trunc = sent_seq[:max_sentences]
else:
raise ValueError(
'Truncating type "%s" not understood' % truncating)
# Apply padding.
if padding == 'post':
result[idx, :len(trunc)] = _pad_token_sequences(
trunc, max_tokens, padding, truncating, value)
elif padding == 'pre':
result[idx, -len(trunc):] = _pad_token_sequences(trunc,
max_tokens, padding, truncating, value)
else:
raise ValueError('Padding type "%s" not understood' % padding)
return result
def unicodify(texts):
"""Encodes all text sequences as unicode. This is a python2 hassle.
Args:
texts: The sequence of texts.
Returns:
Unicode encoded sequences.
"""
return [_to_unicode(text) for text in texts] | texcla/preprocessing/utils.py | from __future__ import absolute_import, unicode_literals
import abc
import logging
from collections import OrderedDict, defaultdict
from copy import deepcopy
from multiprocessing import cpu_count
import numpy as np
import six
from keras.preprocessing.sequence import pad_sequences as keras_pad_sequences
from keras.utils.generic_utils import Progbar
try:
import spacy
except ImportError:
pass
logger = logging.getLogger(__name__)
class _CountTracker(object):
"""Helper class to track counts of various document hierarchies in the corpus.
For example, if the tokenizer can tokenize docs as (docs, paragraph, sentences, words), then this utility
will track number of paragraphs, number of sentences within paragraphs and number of words within sentence.
"""
def __init__(self):
self._prev_indices = None
self._local_counts = None
self.counts = None
def update(self, indices):
"""Updates counts based on indices. The algorithm tracks the index change at i and
update global counts for all indices beyond i with local counts tracked so far.
"""
# Initialize various lists for the first time based on length of indices.
if self._prev_indices is None:
self._prev_indices = indices
# +1 to track token counts in the last index.
self._local_counts = np.full(len(indices) + 1, 1)
self._local_counts[-1] = 0
self.counts = [[] for _ in range(len(self._local_counts))]
has_reset = False
for i in range(len(indices)):
# index value changed. Push all local values beyond i to count and reset those local_counts.
# For example, if document index changed, push counts on sentences and tokens and reset their local_counts
# to indicate that we are tracking those for new document. We need to do this at all document hierarchies.
if indices[i] > self._prev_indices[i]:
self._local_counts[i] += 1
has_reset = True
for j in range(i + 1, len(self.counts)):
self.counts[j].append(self._local_counts[j])
self._local_counts[j] = 1
# If none of the aux indices changed, update token count.
if not has_reset:
self._local_counts[-1] += 1
self._prev_indices = indices[:]
def finalize(self):
"""This will add the very last document to counts. We also get rid of counts[0] since that
represents document level which doesnt come under anything else. We also convert all count
values to numpy arrays so that stats can be computed easily.
"""
for i in range(1, len(self._local_counts)):
self.counts[i].append(self._local_counts[i])
self.counts.pop(0)
for i in range(len(self.counts)):
self.counts[i] = np.array(self.counts[i])
def _apply_generator(texts, apply_fn):
for text in texts:
yield apply_fn(text)
def _append(lst, indices, value):
"""Adds `value` to `lst` list indexed by `indices`. Will create sub lists as required.
"""
for i, idx in enumerate(indices):
# We need to loop because sometimes indices can increment by more than 1 due to missing tokens.
# Example: Sentence with no words after filtering words.
while len(lst) <= idx:
# Update max counts whenever a new sublist is created.
# There is no need to worry about indices beyond `i` since they will end up creating new lists as well.
lst.append([])
lst = lst[idx]
# Add token and update token max count.
lst.append(value)
def _recursive_apply(lst, apply_fn):
if len(lst) > 0 and not isinstance(lst[0], list):
for i in range(len(lst)):
lst[i] = apply_fn(lst[i])
else:
for sub_list in lst:
_recursive_apply(sub_list, apply_fn)
def _to_unicode(text):
if not isinstance(text, six.text_type):
text = text.decode('utf-8')
return text
def _parse_spacy_kwargs(**kwargs):
"""Supported args include:
Args:
n_threads/num_threads: Number of threads to use. Uses num_cpus - 1 by default.
batch_size: The number of texts to accumulate into a common working set before processing.
(Default value: 1000)
"""
n_threads = kwargs.get('n_threads') or kwargs.get('num_threads')
batch_size = kwargs.get('batch_size')
if n_threads is None or n_threads is -1:
n_threads = cpu_count() - 1
if batch_size is None or batch_size is -1:
batch_size = 1000
return n_threads, batch_size
def _pad_token_sequences(sequences, max_tokens,
padding, truncating, value):
# TODO: better variable names (see below)
return keras_pad_sequences(sequences, maxlen=max_tokens, padding=padding, truncating=truncating, value=value)
def _pad_sent_sequences(sequences, max_sentences, max_tokens, padding, truncating, value):
# TODO: better names (see below)
# Infer max lengths if needed.
if max_sentences is None or max_tokens is None:
max_sentences_computed = 0
max_tokens_computed = 0
for sent_seq in sequences:
max_sentences_computed = max(max_sentences_computed, len(sent_seq))
max_tokens_computed = max(max_tokens_computed, np.max(
[len(token_seq) for token_seq in sent_seq]))
# Only use inferred values for None.
if max_sentences is None:
max_sentences = max_sentences_computed
if max_tokens is None:
max_tokens = max_tokens_computed
result = np.ones(shape=(len(sequences), max_sentences, max_tokens)) * value
for idx, sent_seq in enumerate(sequences):
# empty list/array was found
if not len(sent_seq):
continue
if truncating == 'pre':
trunc = sent_seq[-max_sentences:]
elif truncating == 'post':
trunc = sent_seq[:max_sentences]
else:
raise ValueError(
'Truncating type "%s" not understood' % truncating)
# Apply padding.
if padding == 'post':
result[idx, :len(trunc)] = _pad_token_sequences(
trunc, max_tokens, padding, truncating, value)
elif padding == 'pre':
result[idx, -len(trunc):] = _pad_token_sequences(trunc,
max_tokens, padding, truncating, value)
else:
raise ValueError('Padding type "%s" not understood' % padding)
return result
def unicodify(texts):
"""Encodes all text sequences as unicode. This is a python2 hassle.
Args:
texts: The sequence of texts.
Returns:
Unicode encoded sequences.
"""
return [_to_unicode(text) for text in texts] | 0.702632 | 0.328718 |
from __future__ import absolute_import
import calendar
import datetime
import decimal
import iso8601
import sqlalchemy
from sqlalchemy.dialects import mysql
from sqlalchemy import types
from gnocchi import utils
class PreciseTimestamp(types.TypeDecorator):
"""Represents a timestamp precise to the microsecond.
Deprecated in favor of TimestampUTC.
Still used in alembic migrations.
"""
impl = sqlalchemy.DateTime
@staticmethod
def _decimal_to_dt(dec):
"""Return a datetime from Decimal unixtime format."""
if dec is None:
return None
integer = int(dec)
micro = (dec - decimal.Decimal(integer)) * decimal.Decimal(1000000)
daittyme = datetime.datetime.utcfromtimestamp(integer)
return daittyme.replace(microsecond=int(round(micro)))
@staticmethod
def _dt_to_decimal(utc):
"""Datetime to Decimal.
Some databases don't store microseconds in datetime
so we always store as Decimal unixtime.
"""
if utc is None:
return None
decimal.getcontext().prec = 30
return (decimal.Decimal(str(calendar.timegm(utc.utctimetuple()))) +
(decimal.Decimal(str(utc.microsecond)) /
decimal.Decimal("1000000.0")))
def load_dialect_impl(self, dialect):
if dialect.name == 'mysql':
return dialect.type_descriptor(
types.DECIMAL(precision=20,
scale=6,
asdecimal=True))
return dialect.type_descriptor(self.impl)
def compare_against_backend(self, dialect, conn_type):
if dialect.name == 'mysql':
return issubclass(type(conn_type), types.DECIMAL)
return issubclass(type(conn_type), type(self.impl))
def process_bind_param(self, value, dialect):
if value is not None:
value = utils.normalize_time(value)
if dialect.name == 'mysql':
return self._dt_to_decimal(value)
return value
def process_result_value(self, value, dialect):
if dialect.name == 'mysql':
value = self._decimal_to_dt(value)
if value is not None:
return utils.normalize_time(value).replace(
tzinfo=iso8601.iso8601.UTC)
class TimestampUTC(types.TypeDecorator):
"""Represents a timestamp precise to the microsecond."""
impl = sqlalchemy.DateTime
def load_dialect_impl(self, dialect):
if dialect.name == 'mysql':
return dialect.type_descriptor(mysql.DATETIME(fsp=6))
return self.impl
def process_bind_param(self, value, dialect):
if value is not None:
return utils.normalize_time(value)
def process_result_value(self, value, dialect):
if value is not None:
return value.replace(tzinfo=iso8601.iso8601.UTC) | gnocchi/indexer/sqlalchemy_types.py | from __future__ import absolute_import
import calendar
import datetime
import decimal
import iso8601
import sqlalchemy
from sqlalchemy.dialects import mysql
from sqlalchemy import types
from gnocchi import utils
class PreciseTimestamp(types.TypeDecorator):
"""Represents a timestamp precise to the microsecond.
Deprecated in favor of TimestampUTC.
Still used in alembic migrations.
"""
impl = sqlalchemy.DateTime
@staticmethod
def _decimal_to_dt(dec):
"""Return a datetime from Decimal unixtime format."""
if dec is None:
return None
integer = int(dec)
micro = (dec - decimal.Decimal(integer)) * decimal.Decimal(1000000)
daittyme = datetime.datetime.utcfromtimestamp(integer)
return daittyme.replace(microsecond=int(round(micro)))
@staticmethod
def _dt_to_decimal(utc):
"""Datetime to Decimal.
Some databases don't store microseconds in datetime
so we always store as Decimal unixtime.
"""
if utc is None:
return None
decimal.getcontext().prec = 30
return (decimal.Decimal(str(calendar.timegm(utc.utctimetuple()))) +
(decimal.Decimal(str(utc.microsecond)) /
decimal.Decimal("1000000.0")))
def load_dialect_impl(self, dialect):
if dialect.name == 'mysql':
return dialect.type_descriptor(
types.DECIMAL(precision=20,
scale=6,
asdecimal=True))
return dialect.type_descriptor(self.impl)
def compare_against_backend(self, dialect, conn_type):
if dialect.name == 'mysql':
return issubclass(type(conn_type), types.DECIMAL)
return issubclass(type(conn_type), type(self.impl))
def process_bind_param(self, value, dialect):
if value is not None:
value = utils.normalize_time(value)
if dialect.name == 'mysql':
return self._dt_to_decimal(value)
return value
def process_result_value(self, value, dialect):
if dialect.name == 'mysql':
value = self._decimal_to_dt(value)
if value is not None:
return utils.normalize_time(value).replace(
tzinfo=iso8601.iso8601.UTC)
class TimestampUTC(types.TypeDecorator):
"""Represents a timestamp precise to the microsecond."""
impl = sqlalchemy.DateTime
def load_dialect_impl(self, dialect):
if dialect.name == 'mysql':
return dialect.type_descriptor(mysql.DATETIME(fsp=6))
return self.impl
def process_bind_param(self, value, dialect):
if value is not None:
return utils.normalize_time(value)
def process_result_value(self, value, dialect):
if value is not None:
return value.replace(tzinfo=iso8601.iso8601.UTC) | 0.750278 | 0.199347 |
import logging
import doctest
import random
from ox_cache.mixins import TimedExpiryMixin
from ox_cache.memoizers import OxMemoizer
from ox_cache.locks import FakeLock
class RandomReplacementMixin:
"""Example mixin to do random replacement.
"""
def __init__(self, *args, max_size=128, **kwargs):
self.max_size = max_size
super().__init__(*args, **kwargs)
def _pre_store(self, key, value, ttl_info=None, **opts):
dummy = key, value, ttl_info, opts
while len(self._data) >= self.max_size:
full_key_to_delete = random.choice(list(self._data))
logging.debug('%s will remove key %s',
self.__class__.__name__, full_key_to_delete)
self._delete_full_key(full_key_to_delete, lock=FakeLock())
class RandomReplacementMemoizer(
RandomReplacementMixin, TimedExpiryMixin, OxMemoizer):
"""Memoizer class using time based refresh via RandomReplacementMixin
This is a class that can be used to memoize a function keeping
only `self.max_size` elements with random replacement. This is mainly
for demonstration or statistical purposes since randomly kicking
out an item is inefficient.
>>> from ox_cache.tests import RandomReplacementMemoizer
>>> @RandomReplacementMemoizer
... def my_func(x, y):
... 'Add two inputs'
... z = x + y
... print('called my_func(%s, %s) = %s' % (repr(x), repr(y), repr(z)))
... return z
...
>>> my_func(1, 2)
called my_func(1, 2) = 3
3
>>> my_func.max_size = 3
>>> data = [my_func(1, i) for i in range(5)] # doctest: +ELLIPSIS
called my_func(1, 0) = ...
called my_func(1, 1) = ...
called my_func(1, 3) = ...
called my_func(1, 4) = ...
>>> len(my_func)
3
"""
def _regr_test_cache():
"""Simple tests for basic cache.
>>> from ox_cache import OxCacheBase, TimedExpiryMixin
>>> class TimedCache(TimedExpiryMixin, OxCacheBase):
... 'Simple cache which expires items after after self.expiry_seconds.'
... def make_value(self, key, **opts):
... 'Simple function to create value for requested key.'
... print('Calling refresh for key="%s"' % key)
... if opts:
... print('opts were %s' % str(opts))
... return 'key="%s" is fun!' % key
...
>>> cache = TimedCache(expiry_seconds=100) # expires after 100 seconds
>>> cache.get('test') # Will call make_value to generate value.
Calling refresh for key="test"
'key="test" is fun!'
>>> cache.ttl('test') > 60 # Check time to live is pretty long
True
>>> cache.get('test') # If called immediately, will use cached item
'key="test" is fun!'
>>> cache.expiry_seconds = 1 # Change expiration time to be much faster
>>> import time; time.sleep(1.1) # Wait a few seconds for cache item to expire
>>> cache.get('test') # Will generate a new value since time limit expired
Calling refresh for key="test"
'key="test" is fun!'
>>> cache['test']
'key="test" is fun!'
>>> cache['test'] = 'blah' # Manually store new value
>>> cache['test']
'blah'
>>> time.sleep(1.2)
>>> removed = cache.clean()
>>> len(removed)
1
>>> type(removed[0][0].odict())
<class 'dict'>
>>> cache.get('test', __not_keys=('tag',), tag='foo')
Calling refresh for key="test"
opts were {'__not_keys': ('tag',), 'tag': 'foo'}
'key="test" is fun!'
>>> cache.make_key('test', __not_keys=('tag',), tag='foo') #doctest: +ELLIPSIS
OxCacheFullKey(...)
"""
if __name__ == '__main__':
doctest.testmod()
print('Finished tests') | ox_cache/tests.py | import logging
import doctest
import random
from ox_cache.mixins import TimedExpiryMixin
from ox_cache.memoizers import OxMemoizer
from ox_cache.locks import FakeLock
class RandomReplacementMixin:
"""Example mixin to do random replacement.
"""
def __init__(self, *args, max_size=128, **kwargs):
self.max_size = max_size
super().__init__(*args, **kwargs)
def _pre_store(self, key, value, ttl_info=None, **opts):
dummy = key, value, ttl_info, opts
while len(self._data) >= self.max_size:
full_key_to_delete = random.choice(list(self._data))
logging.debug('%s will remove key %s',
self.__class__.__name__, full_key_to_delete)
self._delete_full_key(full_key_to_delete, lock=FakeLock())
class RandomReplacementMemoizer(
RandomReplacementMixin, TimedExpiryMixin, OxMemoizer):
"""Memoizer class using time based refresh via RandomReplacementMixin
This is a class that can be used to memoize a function keeping
only `self.max_size` elements with random replacement. This is mainly
for demonstration or statistical purposes since randomly kicking
out an item is inefficient.
>>> from ox_cache.tests import RandomReplacementMemoizer
>>> @RandomReplacementMemoizer
... def my_func(x, y):
... 'Add two inputs'
... z = x + y
... print('called my_func(%s, %s) = %s' % (repr(x), repr(y), repr(z)))
... return z
...
>>> my_func(1, 2)
called my_func(1, 2) = 3
3
>>> my_func.max_size = 3
>>> data = [my_func(1, i) for i in range(5)] # doctest: +ELLIPSIS
called my_func(1, 0) = ...
called my_func(1, 1) = ...
called my_func(1, 3) = ...
called my_func(1, 4) = ...
>>> len(my_func)
3
"""
def _regr_test_cache():
"""Simple tests for basic cache.
>>> from ox_cache import OxCacheBase, TimedExpiryMixin
>>> class TimedCache(TimedExpiryMixin, OxCacheBase):
... 'Simple cache which expires items after after self.expiry_seconds.'
... def make_value(self, key, **opts):
... 'Simple function to create value for requested key.'
... print('Calling refresh for key="%s"' % key)
... if opts:
... print('opts were %s' % str(opts))
... return 'key="%s" is fun!' % key
...
>>> cache = TimedCache(expiry_seconds=100) # expires after 100 seconds
>>> cache.get('test') # Will call make_value to generate value.
Calling refresh for key="test"
'key="test" is fun!'
>>> cache.ttl('test') > 60 # Check time to live is pretty long
True
>>> cache.get('test') # If called immediately, will use cached item
'key="test" is fun!'
>>> cache.expiry_seconds = 1 # Change expiration time to be much faster
>>> import time; time.sleep(1.1) # Wait a few seconds for cache item to expire
>>> cache.get('test') # Will generate a new value since time limit expired
Calling refresh for key="test"
'key="test" is fun!'
>>> cache['test']
'key="test" is fun!'
>>> cache['test'] = 'blah' # Manually store new value
>>> cache['test']
'blah'
>>> time.sleep(1.2)
>>> removed = cache.clean()
>>> len(removed)
1
>>> type(removed[0][0].odict())
<class 'dict'>
>>> cache.get('test', __not_keys=('tag',), tag='foo')
Calling refresh for key="test"
opts were {'__not_keys': ('tag',), 'tag': 'foo'}
'key="test" is fun!'
>>> cache.make_key('test', __not_keys=('tag',), tag='foo') #doctest: +ELLIPSIS
OxCacheFullKey(...)
"""
if __name__ == '__main__':
doctest.testmod()
print('Finished tests') | 0.652463 | 0.173026 |
from office365.directory.identities.api_connector import IdentityApiConnector
from office365.directory.identities.conditional_access_root import ConditionalAccessRoot
from office365.directory.identities.userflows.attribute import IdentityUserFlowAttribute
from office365.directory.identities.userflows.b2x.user_flow import B2XIdentityUserFlow
from office365.directory.identities.providers.identity_provider_base import IdentityProviderBase
from office365.entity import Entity
from office365.entity_collection import EntityCollection
from office365.runtime.resource_path import ResourcePath
class IdentityContainer(Entity):
"""
Represents the entry point to different features in External Identities for
both Azure Active Directory (Azure AD) and Azure AD B2C tenants.
"""
@property
def api_connectors(self):
"""
Represents entry point for API connectors.
"""
return self.properties.get('apiConnectors',
EntityCollection(self.context, IdentityApiConnector,
ResourcePath("apiConnectors", self.resource_path)))
@property
def conditional_access(self):
"""The entry point for the Conditional Access (CA) object model."""
return self.properties.get('conditionalAccess',
ConditionalAccessRoot(self.context,
ResourcePath("conditionalAccess", self.resource_path)))
@property
def identity_providers(self):
"""Represents entry point for identity provider base."""
return self.properties.get('identityProviders',
EntityCollection(self.context, IdentityProviderBase,
ResourcePath("identityProviders", self.resource_path)))
@property
def b2x_user_flows(self):
"""
Represents entry point for B2X/self-service sign-up identity userflows.
"""
return self.properties.get('b2xUserFlows',
EntityCollection(self.context, B2XIdentityUserFlow,
ResourcePath("b2xUserFlows", self.resource_path)))
@property
def user_flow_attributes(self):
"""
Represents entry point for identity userflow attributes.
"""
return self.properties.get('userFlowAttributes',
EntityCollection(self.context, IdentityUserFlowAttribute,
ResourcePath("userFlowAttributes", self.resource_path)))
def get_property(self, name, default_value=None):
if default_value is None:
property_mapping = {
"apiConnectors": self.api_connectors,
"b2xUserFlows": self.b2x_user_flows,
"conditionalAccess": self.conditional_access,
"identityProviders": self.identity_providers,
"userFlowAttributes": self.user_flow_attributes
}
default_value = property_mapping.get(name, None)
return super(IdentityContainer, self).get_property(name, default_value) | office365/directory/identities/identity_container.py | from office365.directory.identities.api_connector import IdentityApiConnector
from office365.directory.identities.conditional_access_root import ConditionalAccessRoot
from office365.directory.identities.userflows.attribute import IdentityUserFlowAttribute
from office365.directory.identities.userflows.b2x.user_flow import B2XIdentityUserFlow
from office365.directory.identities.providers.identity_provider_base import IdentityProviderBase
from office365.entity import Entity
from office365.entity_collection import EntityCollection
from office365.runtime.resource_path import ResourcePath
class IdentityContainer(Entity):
"""
Represents the entry point to different features in External Identities for
both Azure Active Directory (Azure AD) and Azure AD B2C tenants.
"""
@property
def api_connectors(self):
"""
Represents entry point for API connectors.
"""
return self.properties.get('apiConnectors',
EntityCollection(self.context, IdentityApiConnector,
ResourcePath("apiConnectors", self.resource_path)))
@property
def conditional_access(self):
"""The entry point for the Conditional Access (CA) object model."""
return self.properties.get('conditionalAccess',
ConditionalAccessRoot(self.context,
ResourcePath("conditionalAccess", self.resource_path)))
@property
def identity_providers(self):
"""Represents entry point for identity provider base."""
return self.properties.get('identityProviders',
EntityCollection(self.context, IdentityProviderBase,
ResourcePath("identityProviders", self.resource_path)))
@property
def b2x_user_flows(self):
"""
Represents entry point for B2X/self-service sign-up identity userflows.
"""
return self.properties.get('b2xUserFlows',
EntityCollection(self.context, B2XIdentityUserFlow,
ResourcePath("b2xUserFlows", self.resource_path)))
@property
def user_flow_attributes(self):
"""
Represents entry point for identity userflow attributes.
"""
return self.properties.get('userFlowAttributes',
EntityCollection(self.context, IdentityUserFlowAttribute,
ResourcePath("userFlowAttributes", self.resource_path)))
def get_property(self, name, default_value=None):
if default_value is None:
property_mapping = {
"apiConnectors": self.api_connectors,
"b2xUserFlows": self.b2x_user_flows,
"conditionalAccess": self.conditional_access,
"identityProviders": self.identity_providers,
"userFlowAttributes": self.user_flow_attributes
}
default_value = property_mapping.get(name, None)
return super(IdentityContainer, self).get_property(name, default_value) | 0.874761 | 0.15876 |
import pandas as pd
import numpy as np
import math
import os
from scipy.special import entr
csvFolderPath = "../../../COIN/4_Repos/Ready_For_Regression/"
resultPath = '../Regression/input.csv'
def giniCalculator(array):
x = np.array(array)
mad = np.abs(np.subtract.outer(x, x)).mean()
# Relative mean absolute difference
rmad = mad / np.mean(x)
# Gini coefficient
g = 0.5 * rmad
g = round(g,4)
return g
# Function used to extract aggregated metrics from 1 repository
def processCSV(dir, df):
actorPath = csvFolderPath + dir + '/processed/nodes.csv'
ticketsPath = csvFolderPath + dir + '/processed/edges.csv'
SOTPath = csvFolderPath + dir + '/processed/sentiment_over_time.csv'
activityPath = csvFolderPath + dir + '/processed/activity.csv'
awvciPath = csvFolderPath + dir + '/processed/awvci.csv'
betwCentrPath = csvFolderPath + dir + '/processed/betw_centr.csv'
betwOscPath = csvFolderPath + dir + '/processed/betw_osc.csv'
densityPath = csvFolderPath + dir + '/processed/density.csv'
repo = dir
# Extract metrics from actor file
actors = pd.read_csv(actorPath, sep=',', encoding='utf-8', error_bad_lines=False)
actors_rows = actors.shape[0]
ntop = int(round((actors_rows * 0.05), 0))
actorsTop = actors.sort_values(by='total influence', ascending=False).head(ntop)
top_avg_deg_cent = round(actorsTop['Degree centrality'].mean(), 4)
top_gini_deg_cent = giniCalculator(actorsTop['Degree centrality'])
top_avg_betw_osc = round(actorsTop['Betweenness centrality oscillation'].mean(), 4)
top_gini_betw_osc = giniCalculator(actorsTop['Betweenness centrality oscillation'])
top_avg_sentiment = round(actorsTop['avg sentiment'].mean(), 4)
top_gini_sentiment = giniCalculator(actorsTop['avg sentiment'])
top_avg_complexity = round(actorsTop['avg complexity'].mean(), 4)
top_gini_complexity = giniCalculator(actorsTop['avg complexity'])
top_avg_influence = round(actorsTop['total influence'].mean(), 4)
top_gini_influence = giniCalculator(actorsTop['total influence'])
top_avg_influence_pm = round(actorsTop['average influence per message'].mean(), 4)
top_gini_influence_pm = giniCalculator(actorsTop['average influence per message'])
top_avg_contrib = round(actorsTop['Contribution index'].mean(), 4)
top_gini_contrib = giniCalculator(actorsTop['Contribution index'])
top_avg_contrib_oscil = round(actorsTop['Contribution index oscillation'].mean(), 4)
top_gini_contrib_oscil = giniCalculator(actorsTop['Contribution index oscillation'])
actorsConnected = actors.loc[actors['Degree centrality'] > 1]
avg_deg_cent = round(actorsConnected['Degree centrality'].mean(), 4)
gini_deg_cent = giniCalculator(actorsConnected['Degree centrality'])
avg_betw_osc = round(actorsConnected['Betweenness centrality oscillation'].mean(), 4)
gini_betw_osc = giniCalculator(actorsConnected['Betweenness centrality oscillation'])
avg_influence = round(actorsConnected['total influence'].mean(), 4)
gini_influence = giniCalculator(actorsConnected['total influence'])
avg_influence_pm = round(actorsConnected['average influence per message'].mean(), 4)
gini_influence_pm = giniCalculator(actorsConnected['average influence per message'])
avg_contrib = round(actorsConnected['Contribution index'].mean(), 4)
# gini_contrib = giniCalculator(actorsConnected['Contribution index'])
avg_contrib_oscil = round(actorsConnected['Contribution index oscillation'].mean(), 4)
gini_contrib_oscil = giniCalculator(actorsConnected['Contribution index oscillation'])
avg_sentiment = round(actorsConnected['avg sentiment'].mean(), 4)
gini_sentiment = giniCalculator(actorsConnected['avg sentiment'])
avg_complexity = round(actorsConnected['avg complexity'].mean(), 4)
gini_complexity = giniCalculator(actorsConnected['avg complexity'])
perc_connected = round((actors.loc[actors['Degree centrality'] >= ntop].shape[0]) / actors_rows, 4)
perc_isolated = round((actors.loc[actors['Degree centrality'] == 1].shape[0]) / actors_rows, 4)
perc_hirable = round((actors.loc[actors['hireable'] == 1].shape[0]) / actors_rows, 4)
# Extract metrics from ticket file
edges = pd.read_csv(ticketsPath, sep=',', encoding='utf-8', error_bad_lines=False)
edges_rows = edges.shape[0]
perc_closed_issues = round((edges.loc[edges['Status'] == 'closed'].shape[0]) / edges_rows, 4)
perc_creation = round((edges.loc[edges['Edge_type'] == 'CREATION'].shape[0]) / edges_rows, 4)
perc_solo = edges[['Name']].groupby('Name').size().reset_index(name='counts')
perc_solo = round((perc_solo.loc[perc_solo['counts'] == 1].shape[0]) / perc_solo.shape[0], 4)
# Extract metrics from sentiment over time data
SOT = pd.read_csv(SOTPath, sep=',', encoding='utf-8', error_bad_lines=False)
group_messages = round(SOT['Avg. messages per day'].mean(), 4)
group_sentiment = round(SOT['Sentiment'].mean(), 4)
group_emotionality = round(SOT['Emotionality'].mean(), 4)
group_complexity = round(SOT['Complexity'].mean(), 4)
group_influence = round(SOT['Influence'].mean(), 4)
# Extract time series data
activity = pd.read_csv(activityPath, sep=',', encoding='utf-8', error_bad_lines=False).T[[0]]
activity['Activity_Increase'] = np.nan
activity = activity[2:].reset_index()
for index, row in activity.iterrows():
if index > 0:
activity.loc[index, 'Activity_Increase'] = activity.loc[index, 0] / activity.loc[index - 1, 0]
group_activity_increase = round(activity['Activity_Increase'].mean(), 4)
if math.isnan(group_activity_increase):
group_activity_increase = 1
group_activity = activity.loc[:,0]
group_activity = round(group_activity.tail(1).values[0],4)
awvci = pd.read_csv(awvciPath, sep=',', encoding='utf-8', error_bad_lines=False).T[[0]]
awvci['awvci_Increase'] = np.nan
awvci = awvci[2:].reset_index()
for index, row in awvci.iterrows():
if index > 0:
if awvci.loc[index - 1, 0] > 0:
awvci.loc[index, 'awvci_Increase'] = awvci.loc[index, 0] / awvci.loc[index - 1, 0]
group_awvci_increase = round(awvci['awvci_Increase'].mean(), 4)
if math.isnan(group_awvci_increase):
group_awvci_increase = 1
group_awvci = awvci.loc[:,0]
group_awvci = round(group_awvci.tail(1).values[0],4)
betwCentr = pd.read_csv(betwCentrPath, sep=',', encoding='utf-8', error_bad_lines=False).T[[0]]
betwCentr['betwCentr_Increase'] = np.nan
betwCentr = betwCentr[2:].reset_index()
for index, row in betwCentr.iterrows():
if index > 0:
if betwCentr.iloc[index - 1, 1] > 0:
betwCentr.loc[index, 'betwCentr_Increase'] = betwCentr.iloc[index, 1] / betwCentr.iloc[index - 1, 1]
group_betwCentr_increase = round(betwCentr['betwCentr_Increase'].mean(), 4)
if math.isnan(group_betwCentr_increase):
group_betwCentr_increase = 1
group_betw_centr = betwCentr.loc[:,0]
group_betw_centr = round(group_betw_centr.tail(1).values[0],4)
betwOsc = pd.read_csv(betwOscPath, sep=',', encoding='utf-8', error_bad_lines=False).T[[0]]
betwOsc['betwOsc_Increase'] = np.nan
betwOsc = betwOsc[2:].reset_index()
for index, row in betwOsc.iterrows():
if index > 0:
if betwOsc.loc[index - 1, 0] > 0:
betwOsc.loc[index, 'betwOsc_Increase'] = betwOsc.loc[index, 0] / betwOsc.loc[index - 1, 0]
group_betwOsc_increase = round(betwOsc['betwOsc_Increase'].mean(), 4)
if math.isnan(group_betwOsc_increase):
group_betwOsc_increase = 1
group_betw_osc = betwOsc.loc[:,0]
group_betw_osc = round(group_betw_osc.tail(1).values[0],4)
density = pd.read_csv(densityPath, sep=',', encoding='utf-8', error_bad_lines=False).T[[0]]
density['density_Increase'] = np.nan
density = density[2:].reset_index()
for index, row in density.iterrows():
if index > 0:
if density.loc[index - 1, 0] > 0:
density.loc[index, 'density_Increase'] = density.loc[index, 0] / density.loc[index - 1, 0]
group_density_increase = round(density['density_Increase'].mean(), 4)
if math.isnan(group_density_increase):
group_density_increase = 1
group_density = density.loc[:,0]
group_density = round(group_density.tail(1).values[0],4)
result = df.append({'Group_Messages_Per_Day': group_messages,
'Group_Sentiment': group_sentiment,
'Group_Emotionality': group_emotionality,
'Group_Complexity': group_complexity,
'Group_Influence': group_influence,
'Group_Percentage_Activity_Increase_Monthly': group_activity_increase,
'Group_Activity': group_activity,
'Group_Percentage_AWVCI_Increase_Monthly': group_awvci_increase,
'Group_AWVCI':group_awvci,
'Group_Percentage_Betweenness_Centrality_Increase_Monthly': group_betwCentr_increase,
'Group_Betweenness_Centrality':group_betw_centr,
'Group_Percentage_Betweenness_Oscillation_Increase_Monthly': group_betwOsc_increase,
'Group_Betweenness_Oscillation':group_betw_osc,
'Group_Percentage_Density_Increase_Monthly': group_density_increase,
'Group_Density':group_density,
'Avg_Degree_Centrality_Top': top_avg_deg_cent,
'Gini_Degree_Centrality_Top': top_gini_deg_cent,
'Avg_Degree_Centrality': avg_deg_cent,
'Gini_Degree_Centrality': gini_deg_cent,
'Avg_Betweenness_Osc_Top': top_avg_betw_osc,
'Gini_Betweenness_Osc_Top': top_gini_betw_osc,
'Avg_Betweenness_Osc': avg_betw_osc,
'Gini_Betweenness_Osc': gini_betw_osc,
'Percentage_Connected_Actors': perc_connected,
'Avg_Sentiment': avg_sentiment,
'Gini_Sentiment': gini_sentiment,
'Avg_Sentiment_Top': top_avg_sentiment,
'Gini_Sentiment_Top': top_gini_sentiment,
'Avg_complexity': avg_complexity,
'Gini_complexity': gini_complexity,
'Avg_complexity_Top': top_avg_complexity,
'Gini_complexity_Top': top_gini_complexity,
'Avg_Influence_Top': top_avg_influence,
'Gini_Influence_Top': top_gini_influence,
'Avg_Influence': avg_influence,
'Gini_Influence': gini_influence,
'Avg_Influence_Per_Message_Top': top_avg_influence_pm,
'Gini_Influence_Per_Message_Top': top_gini_influence_pm,
'Avg_Influence_Per_Message': avg_influence_pm,
'Gini_Influence_Per_Message': gini_influence_pm,
'Avg_Contribution_Index_Top': top_avg_contrib,
'Gini_Contribution_Index_Top': top_gini_contrib,
'Avg_Contribution_Index': avg_contrib,
'Avg_Contribution_Index_Oscil_Top': top_avg_contrib_oscil,
'Gini_Contribution_Index_Oscil_Top': top_gini_contrib_oscil,
'Avg_Contribution_Index_Oscil': avg_contrib_oscil,
'Gini_Contribution_Index_Oscil': gini_contrib_oscil,
'Percentage_Closed_Issues': perc_closed_issues,
'Percentage_Creations': perc_creation,
'Percentage_Isolated_Actors': perc_isolated,
'Percentage_Hirable_Actors': perc_hirable,
'Percentage_Solo_Issues': perc_solo,
'Repository_Name': repo}, ignore_index=True)
return result
# 1: Create DataFrame
df = pd.DataFrame(columns=['Group_Messages_Per_Day',
'Group_Sentiment',
'Group_Emotionality',
'Group_Complexity',
'Group_Influence',
'Group_Percentage_Activity_Increase_Monthly',
'Group_Activity',
'Group_Percentage_AWVCI_Increase_Monthly',
'Group_AWVCI',
'Group_Percentage_Betweenness_Centrality_Increase_Monthly',
'Group_Betweenness_Centrality',
'Group_Percentage_Betweenness_Oscillation_Increase_Monthly',
'Group_Betweenness_Oscillation',
'Group_Percentage_Density_Increase_Monthly',
'Group_Density',
'Avg_Degree_Centrality_Top',
'Gini_Degree_Centrality_Top',
'Avg_Degree_Centrality',
'Gini_Degree_Centrality',
'Avg_Betweenness_Osc_Top',
'Gini_Betweenness_Osc_Top',
'Avg_Betweenness_Osc',
'Gini_Betweenness_Osc',
'Percentage_Connected_Actors',
'Avg_Sentiment',
'Gini_Sentiment',
'Avg_Sentiment_Top',
'Gini_Sentiment_Top',
'Avg_complexity',
'Gini_complexity',
'Avg_complexity_Top',
'Gini_complexity_Top',
'Avg_Influence_Top',
'Gini_Influence_Top',
'Avg_Influence',
'Gini_Influence',
'Avg_Influence_Per_Message_Top',
'Gini_Influence_Per_Message_Top',
'Avg_Influence_Per_Message',
'Gini_Influence_Per_Message',
'Avg_Contribution_Index_Top',
'Gini_Contribution_Index_Top',
'Avg_Contribution_Index',
'Avg_Contribution_Index_Oscil_Top',
'Gini_Contribution_Index_Oscil_Top',
'Avg_Contribution_Index_Oscil',
'Gini_Contribution_Index_Oscil',
'Percentage_Isolated_Actors',
'Percentage_Hirable_Actors',
'Percentage_Closed_Issues',
'Percentage_Creations',
'Percentage_Solo_Issues',
'Repository_Name',
'Target'])
# 2: Process Input CSVs
for dirs in os.listdir(csvFolderPath):
print(dirs)
df = processCSV(dirs, df)
df = df.sort_values(by='Repository_Name', ascending=True)
# drop columns with null values
df = df.dropna(axis=1)
# drop columns which contain the string Avg in the header
df = df[df.columns.drop(list(df.filter(regex='Avg')))]
df.to_csv(resultPath, sep=',', encoding='utf-8', index=False)
print(1) | scripts/DataPrep/DataPrep.py | import pandas as pd
import numpy as np
import math
import os
from scipy.special import entr
csvFolderPath = "../../../COIN/4_Repos/Ready_For_Regression/"
resultPath = '../Regression/input.csv'
def giniCalculator(array):
x = np.array(array)
mad = np.abs(np.subtract.outer(x, x)).mean()
# Relative mean absolute difference
rmad = mad / np.mean(x)
# Gini coefficient
g = 0.5 * rmad
g = round(g,4)
return g
# Function used to extract aggregated metrics from 1 repository
def processCSV(dir, df):
actorPath = csvFolderPath + dir + '/processed/nodes.csv'
ticketsPath = csvFolderPath + dir + '/processed/edges.csv'
SOTPath = csvFolderPath + dir + '/processed/sentiment_over_time.csv'
activityPath = csvFolderPath + dir + '/processed/activity.csv'
awvciPath = csvFolderPath + dir + '/processed/awvci.csv'
betwCentrPath = csvFolderPath + dir + '/processed/betw_centr.csv'
betwOscPath = csvFolderPath + dir + '/processed/betw_osc.csv'
densityPath = csvFolderPath + dir + '/processed/density.csv'
repo = dir
# Extract metrics from actor file
actors = pd.read_csv(actorPath, sep=',', encoding='utf-8', error_bad_lines=False)
actors_rows = actors.shape[0]
ntop = int(round((actors_rows * 0.05), 0))
actorsTop = actors.sort_values(by='total influence', ascending=False).head(ntop)
top_avg_deg_cent = round(actorsTop['Degree centrality'].mean(), 4)
top_gini_deg_cent = giniCalculator(actorsTop['Degree centrality'])
top_avg_betw_osc = round(actorsTop['Betweenness centrality oscillation'].mean(), 4)
top_gini_betw_osc = giniCalculator(actorsTop['Betweenness centrality oscillation'])
top_avg_sentiment = round(actorsTop['avg sentiment'].mean(), 4)
top_gini_sentiment = giniCalculator(actorsTop['avg sentiment'])
top_avg_complexity = round(actorsTop['avg complexity'].mean(), 4)
top_gini_complexity = giniCalculator(actorsTop['avg complexity'])
top_avg_influence = round(actorsTop['total influence'].mean(), 4)
top_gini_influence = giniCalculator(actorsTop['total influence'])
top_avg_influence_pm = round(actorsTop['average influence per message'].mean(), 4)
top_gini_influence_pm = giniCalculator(actorsTop['average influence per message'])
top_avg_contrib = round(actorsTop['Contribution index'].mean(), 4)
top_gini_contrib = giniCalculator(actorsTop['Contribution index'])
top_avg_contrib_oscil = round(actorsTop['Contribution index oscillation'].mean(), 4)
top_gini_contrib_oscil = giniCalculator(actorsTop['Contribution index oscillation'])
actorsConnected = actors.loc[actors['Degree centrality'] > 1]
avg_deg_cent = round(actorsConnected['Degree centrality'].mean(), 4)
gini_deg_cent = giniCalculator(actorsConnected['Degree centrality'])
avg_betw_osc = round(actorsConnected['Betweenness centrality oscillation'].mean(), 4)
gini_betw_osc = giniCalculator(actorsConnected['Betweenness centrality oscillation'])
avg_influence = round(actorsConnected['total influence'].mean(), 4)
gini_influence = giniCalculator(actorsConnected['total influence'])
avg_influence_pm = round(actorsConnected['average influence per message'].mean(), 4)
gini_influence_pm = giniCalculator(actorsConnected['average influence per message'])
avg_contrib = round(actorsConnected['Contribution index'].mean(), 4)
# gini_contrib = giniCalculator(actorsConnected['Contribution index'])
avg_contrib_oscil = round(actorsConnected['Contribution index oscillation'].mean(), 4)
gini_contrib_oscil = giniCalculator(actorsConnected['Contribution index oscillation'])
avg_sentiment = round(actorsConnected['avg sentiment'].mean(), 4)
gini_sentiment = giniCalculator(actorsConnected['avg sentiment'])
avg_complexity = round(actorsConnected['avg complexity'].mean(), 4)
gini_complexity = giniCalculator(actorsConnected['avg complexity'])
perc_connected = round((actors.loc[actors['Degree centrality'] >= ntop].shape[0]) / actors_rows, 4)
perc_isolated = round((actors.loc[actors['Degree centrality'] == 1].shape[0]) / actors_rows, 4)
perc_hirable = round((actors.loc[actors['hireable'] == 1].shape[0]) / actors_rows, 4)
# Extract metrics from ticket file
edges = pd.read_csv(ticketsPath, sep=',', encoding='utf-8', error_bad_lines=False)
edges_rows = edges.shape[0]
perc_closed_issues = round((edges.loc[edges['Status'] == 'closed'].shape[0]) / edges_rows, 4)
perc_creation = round((edges.loc[edges['Edge_type'] == 'CREATION'].shape[0]) / edges_rows, 4)
perc_solo = edges[['Name']].groupby('Name').size().reset_index(name='counts')
perc_solo = round((perc_solo.loc[perc_solo['counts'] == 1].shape[0]) / perc_solo.shape[0], 4)
# Extract metrics from sentiment over time data
SOT = pd.read_csv(SOTPath, sep=',', encoding='utf-8', error_bad_lines=False)
group_messages = round(SOT['Avg. messages per day'].mean(), 4)
group_sentiment = round(SOT['Sentiment'].mean(), 4)
group_emotionality = round(SOT['Emotionality'].mean(), 4)
group_complexity = round(SOT['Complexity'].mean(), 4)
group_influence = round(SOT['Influence'].mean(), 4)
# Extract time series data
activity = pd.read_csv(activityPath, sep=',', encoding='utf-8', error_bad_lines=False).T[[0]]
activity['Activity_Increase'] = np.nan
activity = activity[2:].reset_index()
for index, row in activity.iterrows():
if index > 0:
activity.loc[index, 'Activity_Increase'] = activity.loc[index, 0] / activity.loc[index - 1, 0]
group_activity_increase = round(activity['Activity_Increase'].mean(), 4)
if math.isnan(group_activity_increase):
group_activity_increase = 1
group_activity = activity.loc[:,0]
group_activity = round(group_activity.tail(1).values[0],4)
awvci = pd.read_csv(awvciPath, sep=',', encoding='utf-8', error_bad_lines=False).T[[0]]
awvci['awvci_Increase'] = np.nan
awvci = awvci[2:].reset_index()
for index, row in awvci.iterrows():
if index > 0:
if awvci.loc[index - 1, 0] > 0:
awvci.loc[index, 'awvci_Increase'] = awvci.loc[index, 0] / awvci.loc[index - 1, 0]
group_awvci_increase = round(awvci['awvci_Increase'].mean(), 4)
if math.isnan(group_awvci_increase):
group_awvci_increase = 1
group_awvci = awvci.loc[:,0]
group_awvci = round(group_awvci.tail(1).values[0],4)
betwCentr = pd.read_csv(betwCentrPath, sep=',', encoding='utf-8', error_bad_lines=False).T[[0]]
betwCentr['betwCentr_Increase'] = np.nan
betwCentr = betwCentr[2:].reset_index()
for index, row in betwCentr.iterrows():
if index > 0:
if betwCentr.iloc[index - 1, 1] > 0:
betwCentr.loc[index, 'betwCentr_Increase'] = betwCentr.iloc[index, 1] / betwCentr.iloc[index - 1, 1]
group_betwCentr_increase = round(betwCentr['betwCentr_Increase'].mean(), 4)
if math.isnan(group_betwCentr_increase):
group_betwCentr_increase = 1
group_betw_centr = betwCentr.loc[:,0]
group_betw_centr = round(group_betw_centr.tail(1).values[0],4)
betwOsc = pd.read_csv(betwOscPath, sep=',', encoding='utf-8', error_bad_lines=False).T[[0]]
betwOsc['betwOsc_Increase'] = np.nan
betwOsc = betwOsc[2:].reset_index()
for index, row in betwOsc.iterrows():
if index > 0:
if betwOsc.loc[index - 1, 0] > 0:
betwOsc.loc[index, 'betwOsc_Increase'] = betwOsc.loc[index, 0] / betwOsc.loc[index - 1, 0]
group_betwOsc_increase = round(betwOsc['betwOsc_Increase'].mean(), 4)
if math.isnan(group_betwOsc_increase):
group_betwOsc_increase = 1
group_betw_osc = betwOsc.loc[:,0]
group_betw_osc = round(group_betw_osc.tail(1).values[0],4)
density = pd.read_csv(densityPath, sep=',', encoding='utf-8', error_bad_lines=False).T[[0]]
density['density_Increase'] = np.nan
density = density[2:].reset_index()
for index, row in density.iterrows():
if index > 0:
if density.loc[index - 1, 0] > 0:
density.loc[index, 'density_Increase'] = density.loc[index, 0] / density.loc[index - 1, 0]
group_density_increase = round(density['density_Increase'].mean(), 4)
if math.isnan(group_density_increase):
group_density_increase = 1
group_density = density.loc[:,0]
group_density = round(group_density.tail(1).values[0],4)
result = df.append({'Group_Messages_Per_Day': group_messages,
'Group_Sentiment': group_sentiment,
'Group_Emotionality': group_emotionality,
'Group_Complexity': group_complexity,
'Group_Influence': group_influence,
'Group_Percentage_Activity_Increase_Monthly': group_activity_increase,
'Group_Activity': group_activity,
'Group_Percentage_AWVCI_Increase_Monthly': group_awvci_increase,
'Group_AWVCI':group_awvci,
'Group_Percentage_Betweenness_Centrality_Increase_Monthly': group_betwCentr_increase,
'Group_Betweenness_Centrality':group_betw_centr,
'Group_Percentage_Betweenness_Oscillation_Increase_Monthly': group_betwOsc_increase,
'Group_Betweenness_Oscillation':group_betw_osc,
'Group_Percentage_Density_Increase_Monthly': group_density_increase,
'Group_Density':group_density,
'Avg_Degree_Centrality_Top': top_avg_deg_cent,
'Gini_Degree_Centrality_Top': top_gini_deg_cent,
'Avg_Degree_Centrality': avg_deg_cent,
'Gini_Degree_Centrality': gini_deg_cent,
'Avg_Betweenness_Osc_Top': top_avg_betw_osc,
'Gini_Betweenness_Osc_Top': top_gini_betw_osc,
'Avg_Betweenness_Osc': avg_betw_osc,
'Gini_Betweenness_Osc': gini_betw_osc,
'Percentage_Connected_Actors': perc_connected,
'Avg_Sentiment': avg_sentiment,
'Gini_Sentiment': gini_sentiment,
'Avg_Sentiment_Top': top_avg_sentiment,
'Gini_Sentiment_Top': top_gini_sentiment,
'Avg_complexity': avg_complexity,
'Gini_complexity': gini_complexity,
'Avg_complexity_Top': top_avg_complexity,
'Gini_complexity_Top': top_gini_complexity,
'Avg_Influence_Top': top_avg_influence,
'Gini_Influence_Top': top_gini_influence,
'Avg_Influence': avg_influence,
'Gini_Influence': gini_influence,
'Avg_Influence_Per_Message_Top': top_avg_influence_pm,
'Gini_Influence_Per_Message_Top': top_gini_influence_pm,
'Avg_Influence_Per_Message': avg_influence_pm,
'Gini_Influence_Per_Message': gini_influence_pm,
'Avg_Contribution_Index_Top': top_avg_contrib,
'Gini_Contribution_Index_Top': top_gini_contrib,
'Avg_Contribution_Index': avg_contrib,
'Avg_Contribution_Index_Oscil_Top': top_avg_contrib_oscil,
'Gini_Contribution_Index_Oscil_Top': top_gini_contrib_oscil,
'Avg_Contribution_Index_Oscil': avg_contrib_oscil,
'Gini_Contribution_Index_Oscil': gini_contrib_oscil,
'Percentage_Closed_Issues': perc_closed_issues,
'Percentage_Creations': perc_creation,
'Percentage_Isolated_Actors': perc_isolated,
'Percentage_Hirable_Actors': perc_hirable,
'Percentage_Solo_Issues': perc_solo,
'Repository_Name': repo}, ignore_index=True)
return result
# 1: Create DataFrame
df = pd.DataFrame(columns=['Group_Messages_Per_Day',
'Group_Sentiment',
'Group_Emotionality',
'Group_Complexity',
'Group_Influence',
'Group_Percentage_Activity_Increase_Monthly',
'Group_Activity',
'Group_Percentage_AWVCI_Increase_Monthly',
'Group_AWVCI',
'Group_Percentage_Betweenness_Centrality_Increase_Monthly',
'Group_Betweenness_Centrality',
'Group_Percentage_Betweenness_Oscillation_Increase_Monthly',
'Group_Betweenness_Oscillation',
'Group_Percentage_Density_Increase_Monthly',
'Group_Density',
'Avg_Degree_Centrality_Top',
'Gini_Degree_Centrality_Top',
'Avg_Degree_Centrality',
'Gini_Degree_Centrality',
'Avg_Betweenness_Osc_Top',
'Gini_Betweenness_Osc_Top',
'Avg_Betweenness_Osc',
'Gini_Betweenness_Osc',
'Percentage_Connected_Actors',
'Avg_Sentiment',
'Gini_Sentiment',
'Avg_Sentiment_Top',
'Gini_Sentiment_Top',
'Avg_complexity',
'Gini_complexity',
'Avg_complexity_Top',
'Gini_complexity_Top',
'Avg_Influence_Top',
'Gini_Influence_Top',
'Avg_Influence',
'Gini_Influence',
'Avg_Influence_Per_Message_Top',
'Gini_Influence_Per_Message_Top',
'Avg_Influence_Per_Message',
'Gini_Influence_Per_Message',
'Avg_Contribution_Index_Top',
'Gini_Contribution_Index_Top',
'Avg_Contribution_Index',
'Avg_Contribution_Index_Oscil_Top',
'Gini_Contribution_Index_Oscil_Top',
'Avg_Contribution_Index_Oscil',
'Gini_Contribution_Index_Oscil',
'Percentage_Isolated_Actors',
'Percentage_Hirable_Actors',
'Percentage_Closed_Issues',
'Percentage_Creations',
'Percentage_Solo_Issues',
'Repository_Name',
'Target'])
# 2: Process Input CSVs
for dirs in os.listdir(csvFolderPath):
print(dirs)
df = processCSV(dirs, df)
df = df.sort_values(by='Repository_Name', ascending=True)
# drop columns with null values
df = df.dropna(axis=1)
# drop columns which contain the string Avg in the header
df = df[df.columns.drop(list(df.filter(regex='Avg')))]
df.to_csv(resultPath, sep=',', encoding='utf-8', index=False)
print(1) | 0.380644 | 0.233553 |
from gdcmdtools.perm import GDPerm
from gdcmdtools.perm import help_permission_text
import argparse
from argparse import RawTextHelpFormatter
from gdcmdtools.base import BASE_INFO
from gdcmdtools.base import DEBUG_LEVEL
from pprint import pprint
import sys
import logging
logger = logging.getLogger()
__THIS_APP = 'gdperm'
__THIS_DESCRIPTION = 'Tool to change file\'s permission on Google Drive'
__THIS_VERSION = BASE_INFO["version"]
def test():
file_id = "https://drive.google.com/open?id=0B60IjoJ-xHK6YU1wZ2hsQVQ0SzA"
permission_id = "02914492818163807046i"
action1 = {
'name': 'update',
'param': [
permission_id,
'user',
'writer',
'<EMAIL>']}
action2 = {
'name': 'update',
'param': [
permission_id,
'user',
'reader',
'<EMAIL>']}
for action in [action1, action2]:
perm = GDPerm(file_id, action)
result = perm.run()
pprint(result)
assert result[u"role"] == action["param"][2]
if __name__ == '__main__':
arg_parser = argparse.ArgumentParser(
description='%s v%s - %s - %s (%s)' %
(__THIS_APP,
__THIS_VERSION,
__THIS_DESCRIPTION,
BASE_INFO["app"],
BASE_INFO["description"]),
formatter_class=RawTextHelpFormatter)
arg_parser.add_argument(
'file_id',
help='The id for the file you\'re going to change permission')
mutex_group = arg_parser.add_mutually_exclusive_group(required=False)
mutex_group.add_argument(
'--list',
action='store_true',
help='list the permission resource of the file')
mutex_group.add_argument(
'--get',
metavar='PERMISSION_ID',
help='get the permission resource by id')
PERMISSION_METAVAR = ('TYPE', 'ROLE', 'VALUE')
mutex_group.add_argument(
'--insert',
metavar=PERMISSION_METAVAR,
nargs=len(PERMISSION_METAVAR),
help="set the permission of the created folder, can be:\n" +
'\n'.join(help_permission_text) +
'\nvalue: user or group e-mail address,\nor \'me\' to refer to the current authorized user\n' +
'ex: -p anyone reader me # set the uploaded file public-read')
UPDATE_PERMISSION_METAVAR = ("PERMISSION_ID",) + PERMISSION_METAVAR
mutex_group.add_argument(
'--update',
metavar=UPDATE_PERMISSION_METAVAR,
nargs=len(UPDATE_PERMISSION_METAVAR),
help="update the permission, refer to the help of --insert")
mutex_group.add_argument(
'--delete',
metavar='PERMISSION_ID',
help='delete the permission of the file by id')
mutex_group.add_argument(
'--get_by_user',
metavar='USER_EMAIL',
help='get the permission associated with user')
arg_parser.add_argument('--debug',
choices=DEBUG_LEVEL,
default=DEBUG_LEVEL[-1],
help='define the debug level')
args = arg_parser.parse_args()
# set debug devel
logger.setLevel(getattr(logging, args.debug.upper()))
action = {}
valid_actions = [
"list",
"get",
"insert",
"update",
"delete",
"get_by_user"]
for a in valid_actions:
action[a] = args.__dict__[a]
# check which action is given by argument
for act in action:
if action[act] != mutex_group.get_default(act):
pass_action = {"name": act, "param": action[act]}
logger.debug("pass_action=%s" % pass_action)
perm = GDPerm(args.file_id, pass_action)
result = perm.run()
pprint(result)
if result is None:
sys.exit(1)
else:
sys.exit(0)
logger.error('unexpected error')
sys.exit(1) | gdperm.py |
from gdcmdtools.perm import GDPerm
from gdcmdtools.perm import help_permission_text
import argparse
from argparse import RawTextHelpFormatter
from gdcmdtools.base import BASE_INFO
from gdcmdtools.base import DEBUG_LEVEL
from pprint import pprint
import sys
import logging
logger = logging.getLogger()
__THIS_APP = 'gdperm'
__THIS_DESCRIPTION = 'Tool to change file\'s permission on Google Drive'
__THIS_VERSION = BASE_INFO["version"]
def test():
file_id = "https://drive.google.com/open?id=0B60IjoJ-xHK6YU1wZ2hsQVQ0SzA"
permission_id = "02914492818163807046i"
action1 = {
'name': 'update',
'param': [
permission_id,
'user',
'writer',
'<EMAIL>']}
action2 = {
'name': 'update',
'param': [
permission_id,
'user',
'reader',
'<EMAIL>']}
for action in [action1, action2]:
perm = GDPerm(file_id, action)
result = perm.run()
pprint(result)
assert result[u"role"] == action["param"][2]
if __name__ == '__main__':
arg_parser = argparse.ArgumentParser(
description='%s v%s - %s - %s (%s)' %
(__THIS_APP,
__THIS_VERSION,
__THIS_DESCRIPTION,
BASE_INFO["app"],
BASE_INFO["description"]),
formatter_class=RawTextHelpFormatter)
arg_parser.add_argument(
'file_id',
help='The id for the file you\'re going to change permission')
mutex_group = arg_parser.add_mutually_exclusive_group(required=False)
mutex_group.add_argument(
'--list',
action='store_true',
help='list the permission resource of the file')
mutex_group.add_argument(
'--get',
metavar='PERMISSION_ID',
help='get the permission resource by id')
PERMISSION_METAVAR = ('TYPE', 'ROLE', 'VALUE')
mutex_group.add_argument(
'--insert',
metavar=PERMISSION_METAVAR,
nargs=len(PERMISSION_METAVAR),
help="set the permission of the created folder, can be:\n" +
'\n'.join(help_permission_text) +
'\nvalue: user or group e-mail address,\nor \'me\' to refer to the current authorized user\n' +
'ex: -p anyone reader me # set the uploaded file public-read')
UPDATE_PERMISSION_METAVAR = ("PERMISSION_ID",) + PERMISSION_METAVAR
mutex_group.add_argument(
'--update',
metavar=UPDATE_PERMISSION_METAVAR,
nargs=len(UPDATE_PERMISSION_METAVAR),
help="update the permission, refer to the help of --insert")
mutex_group.add_argument(
'--delete',
metavar='PERMISSION_ID',
help='delete the permission of the file by id')
mutex_group.add_argument(
'--get_by_user',
metavar='USER_EMAIL',
help='get the permission associated with user')
arg_parser.add_argument('--debug',
choices=DEBUG_LEVEL,
default=DEBUG_LEVEL[-1],
help='define the debug level')
args = arg_parser.parse_args()
# set debug devel
logger.setLevel(getattr(logging, args.debug.upper()))
action = {}
valid_actions = [
"list",
"get",
"insert",
"update",
"delete",
"get_by_user"]
for a in valid_actions:
action[a] = args.__dict__[a]
# check which action is given by argument
for act in action:
if action[act] != mutex_group.get_default(act):
pass_action = {"name": act, "param": action[act]}
logger.debug("pass_action=%s" % pass_action)
perm = GDPerm(args.file_id, pass_action)
result = perm.run()
pprint(result)
if result is None:
sys.exit(1)
else:
sys.exit(0)
logger.error('unexpected error')
sys.exit(1) | 0.239883 | 0.123842 |
from kivy.properties import NumericProperty
from kivy.uix.widget import Widget
from kivy.uix.floatlayout import FloatLayout
from kivy.uix.scatter import Scatter
from kivy.clock import Clock
from math import atan2, degrees
class Card(Scatter):
symm_boundary = NumericProperty(120)
angle = NumericProperty(0)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def on_touch_down(self, touch):
if not self.collide_point(*touch.pos):
return False
touch.ud['direction'] = 0
touch.ud['origCenter'] = self.center
return super().on_touch_down(touch)
def on_touch_move(self, touch):
if not(self.collide_point(*touch.pos) and 'origCenter' in touch.ud):
return False
'''
diff_x_tp = touch.x - touch.ud['origCenter'][0]
if 'angle' in touch.profile:
touch.ud['angle'] = touch.a
print('The touch angle is', touch.a)
else:
# replace initial_y and initial_x with the some sort of baseline(y) center(x) (=origin) around which we want to rotate
def updates_spin(*args):
if diff_x_tp > 0:
self.angle = -abs(touch.x - touch.ud['origCenter'][0])/10
elif diff_x_tp < 0:
self.angle = abs(touch.x - touch.ud['origCenter'][0])/10
else:
pass # do nothing
#self.angle = degrees(atan2(touch.y - touch.ud['origCenter'][1], touch.x - touch.ud['origCenter'][0]))
print('The touch angle is', self.angle)
Clock.schedule_once(updates_spin, 0)
'''
touch.ud['direction'] = (self.center_x - touch.ud['origCenter'][0])
return super().on_touch_move(touch)
def on_touch_up(self, touch):
if not(self.collide_point(*touch.pos) and 'direction' in touch.ud):
return False
if touch.ud['direction'] >= self.symm_boundary:
print('Released right ---', touch)
elif touch.ud['direction'] <= -self.symm_boundary:
print('Released left ---', touch)
else:
self.center = touch.ud['origCenter']
def updates_spin(*args):
self.angle = 0
Clock.schedule_once(updates_spin, 0.01)
print('Reseting position and angle')
return super().on_touch_up(touch)
def on_transform_with_touch(self, touch):
'''
Called when a touch event has transformed the scatter widget.
By default this does nothing, but can be overriden by derived
classes that need to react to transformations caused by user
input.
:Parameters:
`touch`:
The touch object which triggered the transformation.
.. versionadded:: 1.8.0
'''
print('Moving', touch.ud['direction'])
def build(self):
return self
class Swiper(FloatLayout):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def build(self):
'''
card_config = {
#'size_hint': (None, None),
#'size': (300,700),
#'pos_hint': {'center_y': .5, 'center_x': .5},
'do_rotation': True,
'do_scale': False,
'do_translation': True,
'do_collide_after_children': False
}
'''
return self | app/widgets/swiper.py | from kivy.properties import NumericProperty
from kivy.uix.widget import Widget
from kivy.uix.floatlayout import FloatLayout
from kivy.uix.scatter import Scatter
from kivy.clock import Clock
from math import atan2, degrees
class Card(Scatter):
symm_boundary = NumericProperty(120)
angle = NumericProperty(0)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def on_touch_down(self, touch):
if not self.collide_point(*touch.pos):
return False
touch.ud['direction'] = 0
touch.ud['origCenter'] = self.center
return super().on_touch_down(touch)
def on_touch_move(self, touch):
if not(self.collide_point(*touch.pos) and 'origCenter' in touch.ud):
return False
'''
diff_x_tp = touch.x - touch.ud['origCenter'][0]
if 'angle' in touch.profile:
touch.ud['angle'] = touch.a
print('The touch angle is', touch.a)
else:
# replace initial_y and initial_x with the some sort of baseline(y) center(x) (=origin) around which we want to rotate
def updates_spin(*args):
if diff_x_tp > 0:
self.angle = -abs(touch.x - touch.ud['origCenter'][0])/10
elif diff_x_tp < 0:
self.angle = abs(touch.x - touch.ud['origCenter'][0])/10
else:
pass # do nothing
#self.angle = degrees(atan2(touch.y - touch.ud['origCenter'][1], touch.x - touch.ud['origCenter'][0]))
print('The touch angle is', self.angle)
Clock.schedule_once(updates_spin, 0)
'''
touch.ud['direction'] = (self.center_x - touch.ud['origCenter'][0])
return super().on_touch_move(touch)
def on_touch_up(self, touch):
if not(self.collide_point(*touch.pos) and 'direction' in touch.ud):
return False
if touch.ud['direction'] >= self.symm_boundary:
print('Released right ---', touch)
elif touch.ud['direction'] <= -self.symm_boundary:
print('Released left ---', touch)
else:
self.center = touch.ud['origCenter']
def updates_spin(*args):
self.angle = 0
Clock.schedule_once(updates_spin, 0.01)
print('Reseting position and angle')
return super().on_touch_up(touch)
def on_transform_with_touch(self, touch):
'''
Called when a touch event has transformed the scatter widget.
By default this does nothing, but can be overriden by derived
classes that need to react to transformations caused by user
input.
:Parameters:
`touch`:
The touch object which triggered the transformation.
.. versionadded:: 1.8.0
'''
print('Moving', touch.ud['direction'])
def build(self):
return self
class Swiper(FloatLayout):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def build(self):
'''
card_config = {
#'size_hint': (None, None),
#'size': (300,700),
#'pos_hint': {'center_y': .5, 'center_x': .5},
'do_rotation': True,
'do_scale': False,
'do_translation': True,
'do_collide_after_children': False
}
'''
return self | 0.777131 | 0.223441 |
import unittest
from src.utils import *
class TestUtilsModule(unittest.TestCase):
"""Tests the utils.py module."""
def test_get_start_end_values_two_or_more_elements_in_array(self) -> None:
arr = np.array([1, 2])
return_value = get_start_end_values(array=arr)
self.assertEqual(len(return_value), 2)
self.assertEqual(arr[0], return_value[0])
self.assertEqual(arr[-1], return_value[1])
def test_get_start_end_values_only_one_elements_in_array(self) -> None:
arr = np.array([5])
return_value = get_start_end_values(array=arr)
self.assertEqual(len(return_value), 2)
self.assertEqual(arr[0], return_value[0])
self.assertEqual(arr[0], return_value[1])
def test_get_start_end_values_raises_index_error_with_empty_array(self) -> None:
arr = np.array([])
with self.assertRaises(IndexError):
get_start_end_values(array=arr)
def test_get_missing_coord_y_equals_x_line(self) -> None:
# line -> y = x
p1 = (0, 0)
x1, y1 = p1
p2 = (1, 1)
x2, expected_y2 = p2
actual_y2 = get_missing_coordinate(x1=x1, y1=y1, x2=x2, angular_coefficient=1.0)
self.assertEqual(expected_y2, actual_y2)
def test_get_missing_coord_y_equals_minus_two_x_plus_three_line(self) -> None:
# line -> y = -2x + 3
p1 = (0, 3)
x1, y1 = p1
p2 = (5, -7)
x2, expected_y2 = p2
actual_y2 = get_missing_coordinate(x1=x1, y1=y1, x2=x2, angular_coefficient=-2.0)
self.assertEqual(expected_y2, actual_y2)
def test_array_in_sequence_for_array_in_sequence(self) -> None:
array_01 = np.array([1, 2, 3])
array_02 = np.array([4, 5, 6])
sequence = (array_01, 'a', True, array_01, array_02, 2)
self.assertTrue(array_in_sequence(array_01, sequence))
self.assertTrue(array_in_sequence(array_02, sequence))
def test_array_in_sequence_for_array_not_sequence(self) -> None:
array_01 = np.array([1, 2, 3])
array_02 = np.array([4, 5, 6])
sequence = ('a', array_02, True, array_02, 2)
self.assertFalse(array_in_sequence(array_01, sequence))
self.assertFalse(array_in_sequence(array_01, (True, False, 'Sun', 'Moon', 10)))
self.assertFalse(array_in_sequence(array_01, 'This string is also a Python sequence.'))
if __name__ == '__main__':
unittest.main() | test/test_utils.py |
import unittest
from src.utils import *
class TestUtilsModule(unittest.TestCase):
"""Tests the utils.py module."""
def test_get_start_end_values_two_or_more_elements_in_array(self) -> None:
arr = np.array([1, 2])
return_value = get_start_end_values(array=arr)
self.assertEqual(len(return_value), 2)
self.assertEqual(arr[0], return_value[0])
self.assertEqual(arr[-1], return_value[1])
def test_get_start_end_values_only_one_elements_in_array(self) -> None:
arr = np.array([5])
return_value = get_start_end_values(array=arr)
self.assertEqual(len(return_value), 2)
self.assertEqual(arr[0], return_value[0])
self.assertEqual(arr[0], return_value[1])
def test_get_start_end_values_raises_index_error_with_empty_array(self) -> None:
arr = np.array([])
with self.assertRaises(IndexError):
get_start_end_values(array=arr)
def test_get_missing_coord_y_equals_x_line(self) -> None:
# line -> y = x
p1 = (0, 0)
x1, y1 = p1
p2 = (1, 1)
x2, expected_y2 = p2
actual_y2 = get_missing_coordinate(x1=x1, y1=y1, x2=x2, angular_coefficient=1.0)
self.assertEqual(expected_y2, actual_y2)
def test_get_missing_coord_y_equals_minus_two_x_plus_three_line(self) -> None:
# line -> y = -2x + 3
p1 = (0, 3)
x1, y1 = p1
p2 = (5, -7)
x2, expected_y2 = p2
actual_y2 = get_missing_coordinate(x1=x1, y1=y1, x2=x2, angular_coefficient=-2.0)
self.assertEqual(expected_y2, actual_y2)
def test_array_in_sequence_for_array_in_sequence(self) -> None:
array_01 = np.array([1, 2, 3])
array_02 = np.array([4, 5, 6])
sequence = (array_01, 'a', True, array_01, array_02, 2)
self.assertTrue(array_in_sequence(array_01, sequence))
self.assertTrue(array_in_sequence(array_02, sequence))
def test_array_in_sequence_for_array_not_sequence(self) -> None:
array_01 = np.array([1, 2, 3])
array_02 = np.array([4, 5, 6])
sequence = ('a', array_02, True, array_02, 2)
self.assertFalse(array_in_sequence(array_01, sequence))
self.assertFalse(array_in_sequence(array_01, (True, False, 'Sun', 'Moon', 10)))
self.assertFalse(array_in_sequence(array_01, 'This string is also a Python sequence.'))
if __name__ == '__main__':
unittest.main() | 0.728169 | 0.751169 |
#Import Libraries
import json
import logging
from flask import Flask
from flask import render_template
import logging_formatting
import test_covid_news_handling as tcnh
import covid_data_handler as cdh
import covid_news_handling as cnh
import test_covid_data_handler as tcdh
#Initalise app
app = Flask(__name__)
#Create Logger for this module
logger = logging.getLogger(__name__)
#Opening JSON file
j = open('config.json')
#Returns JSON object as a dictionary
data_configuration_json = json.load(j)
#Closing file
j.close()
#Define Global Variables
news = cnh.news_API_request(covid_terms = "Covid COVID-19 coronavirus")
covid_data_exeter = cdh.covid_API_request()
covid_data_england = cdh.covid_API_request(location = data_configuration_json["location"],
location_type = data_configuration_json["location_type"])
@app.route('/')
def redirect():
"""Redirects user to the /index app route"""
#Logging
logging.info("The redirect function has been called")
string = "Please enter the url http://127.0.0.1:5000/index to access the dashboard."
return string
@app.route('/index')
def run_application():
"""Main function which is responsible for the events produced by the client"""
#Logging
logging.info("The run_application function has been called")
#Perfom Tests
#Covid Data Handler Tests
tcdh.test_parse_csv_data()
tcdh.test_process_covid_csv_data()
tcdh.test_covid_API_request()
#Covid News Handling Tests
tcnh.test_news_API_request()
news_articles = news['articles']
cnh.delete_news_article(news_articles)
return render_template('index.html',
title = 'Coronavirus Daily Update',
image = 'coronavirus1.jpg',
news_articles = news['articles'],
location = covid_data_exeter['data'][0]['areaName'],
local_7day_infections =
covid_data_exeter['data'][0]['newCasesByPublishDateRollingSum'],
nation_location =
covid_data_england['data'][0]['areaName'],
national_7day_infections =
covid_data_england['data'][0]['newCasesByPublishDateRollingSum'],
hospital_cases =
"Hospital Cases: " + str(covid_data_england['data'][0]['hospitalCases']),
deaths_total =
"Total Deaths: " + str(covid_data_england['data'][0]['cumDeaths28DaysByPublishDate']))
if __name__ == '__main__':
app.run() | user_interface.py |
#Import Libraries
import json
import logging
from flask import Flask
from flask import render_template
import logging_formatting
import test_covid_news_handling as tcnh
import covid_data_handler as cdh
import covid_news_handling as cnh
import test_covid_data_handler as tcdh
#Initalise app
app = Flask(__name__)
#Create Logger for this module
logger = logging.getLogger(__name__)
#Opening JSON file
j = open('config.json')
#Returns JSON object as a dictionary
data_configuration_json = json.load(j)
#Closing file
j.close()
#Define Global Variables
news = cnh.news_API_request(covid_terms = "Covid COVID-19 coronavirus")
covid_data_exeter = cdh.covid_API_request()
covid_data_england = cdh.covid_API_request(location = data_configuration_json["location"],
location_type = data_configuration_json["location_type"])
@app.route('/')
def redirect():
"""Redirects user to the /index app route"""
#Logging
logging.info("The redirect function has been called")
string = "Please enter the url http://127.0.0.1:5000/index to access the dashboard."
return string
@app.route('/index')
def run_application():
"""Main function which is responsible for the events produced by the client"""
#Logging
logging.info("The run_application function has been called")
#Perfom Tests
#Covid Data Handler Tests
tcdh.test_parse_csv_data()
tcdh.test_process_covid_csv_data()
tcdh.test_covid_API_request()
#Covid News Handling Tests
tcnh.test_news_API_request()
news_articles = news['articles']
cnh.delete_news_article(news_articles)
return render_template('index.html',
title = 'Coronavirus Daily Update',
image = 'coronavirus1.jpg',
news_articles = news['articles'],
location = covid_data_exeter['data'][0]['areaName'],
local_7day_infections =
covid_data_exeter['data'][0]['newCasesByPublishDateRollingSum'],
nation_location =
covid_data_england['data'][0]['areaName'],
national_7day_infections =
covid_data_england['data'][0]['newCasesByPublishDateRollingSum'],
hospital_cases =
"Hospital Cases: " + str(covid_data_england['data'][0]['hospitalCases']),
deaths_total =
"Total Deaths: " + str(covid_data_england['data'][0]['cumDeaths28DaysByPublishDate']))
if __name__ == '__main__':
app.run() | 0.314682 | 0.055823 |
import os
import sys
from setuptools import setup, find_packages
from configparser import ConfigParser
if sys.version_info < (3, 6):
error = """
GWCS supports Python versions 3.6 and above.
"""
sys.exit(error)
conf = ConfigParser()
conf.read(['setup.cfg'])
metadata = dict(conf.items('metadata'))
PACKAGENAME = metadata.get('name', 'packagename')
DESCRIPTION = metadata.get('description', 'Astropy affiliated package')
AUTHOR = metadata.get('author', '')
AUTHOR_EMAIL = metadata.get('author_email', '')
LICENSE = metadata.get('license', 'unknown')
URL = metadata.get('url', 'http://astropy.org')
def get_package_data():
# Installs the schema files
schemas = []
root = os.path.join(PACKAGENAME, 'schemas')
for node, dirs, files in os.walk(root):
for fname in files:
if fname.endswith('.yaml'):
schemas.append(
os.path.relpath(os.path.join(node, fname), root))
# In the package directory, install to the subdirectory 'schemas'
schemas = [os.path.join('schemas', s) for s in schemas]
return schemas
schemas = get_package_data()
PACKAGE_DATA ={'gwcs':schemas}
entry_points = {'asdf_extensions': 'gwcs = gwcs.extension:GWCSExtension',
'bandit.formatters': 'bson = bandit_bson:formatter'}
DOCS_REQUIRE = [
'sphinx',
'sphinx-automodapi',
'sphinx-rtd-theme',
'stsci-rtd-theme',
'sphinx-astropy',
'sphinx-asdf',
]
TESTS_REQUIRE = [
'pytest>=4.6,<6',
'pytest-doctestplus',
'scipy',
]
setup(name=PACKAGENAME,
use_scm_version=True,
setup_requires=['setuptools_scm'],
description=DESCRIPTION,
install_requires=[
'astropy>=4.1',
'numpy',
'scipy',
'asdf'],
packages=find_packages(),
extras_require={
'test': TESTS_REQUIRE,
'docs': DOCS_REQUIRE,
},
tests_require=TESTS_REQUIRE,
author=AUTHOR,
author_email=AUTHOR_EMAIL,
license=LICENSE,
url=URL,
package_data=PACKAGE_DATA,
entry_points=entry_points,
) | setup.py |
import os
import sys
from setuptools import setup, find_packages
from configparser import ConfigParser
if sys.version_info < (3, 6):
error = """
GWCS supports Python versions 3.6 and above.
"""
sys.exit(error)
conf = ConfigParser()
conf.read(['setup.cfg'])
metadata = dict(conf.items('metadata'))
PACKAGENAME = metadata.get('name', 'packagename')
DESCRIPTION = metadata.get('description', 'Astropy affiliated package')
AUTHOR = metadata.get('author', '')
AUTHOR_EMAIL = metadata.get('author_email', '')
LICENSE = metadata.get('license', 'unknown')
URL = metadata.get('url', 'http://astropy.org')
def get_package_data():
# Installs the schema files
schemas = []
root = os.path.join(PACKAGENAME, 'schemas')
for node, dirs, files in os.walk(root):
for fname in files:
if fname.endswith('.yaml'):
schemas.append(
os.path.relpath(os.path.join(node, fname), root))
# In the package directory, install to the subdirectory 'schemas'
schemas = [os.path.join('schemas', s) for s in schemas]
return schemas
schemas = get_package_data()
PACKAGE_DATA ={'gwcs':schemas}
entry_points = {'asdf_extensions': 'gwcs = gwcs.extension:GWCSExtension',
'bandit.formatters': 'bson = bandit_bson:formatter'}
DOCS_REQUIRE = [
'sphinx',
'sphinx-automodapi',
'sphinx-rtd-theme',
'stsci-rtd-theme',
'sphinx-astropy',
'sphinx-asdf',
]
TESTS_REQUIRE = [
'pytest>=4.6,<6',
'pytest-doctestplus',
'scipy',
]
setup(name=PACKAGENAME,
use_scm_version=True,
setup_requires=['setuptools_scm'],
description=DESCRIPTION,
install_requires=[
'astropy>=4.1',
'numpy',
'scipy',
'asdf'],
packages=find_packages(),
extras_require={
'test': TESTS_REQUIRE,
'docs': DOCS_REQUIRE,
},
tests_require=TESTS_REQUIRE,
author=AUTHOR,
author_email=AUTHOR_EMAIL,
license=LICENSE,
url=URL,
package_data=PACKAGE_DATA,
entry_points=entry_points,
) | 0.310067 | 0.11187 |
import math
import random
from utils.NoteUtils import NoteUtils
from utils.ScaleUtils import ScaleUtils
class DNA:
# TODO: add a parameter called melody_range_octaves
def __init__(self, genes_length, key_root_note, octave, mode, composition_parameters, underlying_harmony,
is_continuation, is_variation, do_resolution, target_melody, note_to_continue):
self.genes = []
self.fitness = None
self.genes_length = genes_length
self.key_root_note = key_root_note
self.octave = octave
self.mode = mode
self.key_root_note_octave = NoteUtils.transpose_note_value_to_octave(key_root_note, octave)
self.composition_parameters = composition_parameters
self.melody_range_octaves = composition_parameters.get_melody_range_octaves()
self.scale = ScaleUtils.generate_scale_from_key_root_value(self.key_root_note_octave, mode, self.melody_range_octaves)
self.underlying_harmony = underlying_harmony
self.is_continuation = is_continuation
self.is_variation = is_variation
self.do_resolution = do_resolution
self.target_melody = target_melody
self.note_to_continue = note_to_continue
self.number_of_melodic_elements = 0
self.number_of_note_extensions = 0
self.melody_length = self.genes_length
self.number_of_chord_tones = 0
self.number_of_intervals = 0
self.interval_size_sum = 0
self.melody_range = 0
self.rhythmic_range = 0
self.first_note_value = -1
self.resolution_intensity = 0
self.melody_intervals_list = []
self.melody_lengths_list = []
self.generate_new_genes()
def get_genes(self):
return self.genes
def set_genes(self, new_genes):
self.genes = new_genes
def get_fitness(self):
return self.fitness
def calculate_fitness(self):
score = 0
self.calculate_melody_parameters()
score += 1 - abs(self.composition_parameters.get_melody_amount() - self.calculate_melody_amount())
score += 1 - abs(self.composition_parameters.get_note_extension_amount() - self.calculate_note_extension_amount())
score += ((self.melody_range_octaves*12) - abs(self.composition_parameters.get_average_interval() - self.calculate_average_interval()))/ (self.melody_range_octaves*12)
score += ((self.melody_range_octaves*12) - abs(self.composition_parameters.get_melody_range() - self.calculate_melody_range()))/ (self.melody_range_octaves*12)
score += (1 - abs(self.composition_parameters.get_melody_to_harmony_fit() - self.calculate_melody_to_harmony_fit()))
# score += (self.genes_length - abs(self.composition_parameters.get_rhythmic_range() - self.calculate_rhythmic_range()))/self.genes_length
score += (1 - abs(self.composition_parameters.get_interval_consonance() - self.calculate_interval_consonance()))
if self.do_resolution:
score += (1 - abs(
self.composition_parameters.get_resolution_intensity() - self.calculate_resolution_intensity()))
if self.is_continuation:
score += ((self.melody_range_octaves*12) - abs(
self.composition_parameters.get_average_interval() - self.calculate_continuation())) / (self.melody_range_octaves*12)
if self.is_variation:
score += self.calculate_similarity()
self.fitness = score
def generate_new_genes(self):
# we can generate:
# -- note_value in a 2 octave range from key_root_note
# -- e for extended
# -- p for pause
previous_char = "none"
for i in range(self.genes_length):
current_char = self.generate_single_gene_char(previous_char)
previous_char = current_char
self.genes.append(current_char)
def generate_single_gene_char(self, previous_char):
note_or_pause_weights = [self.composition_parameters.get_melody_amount(),
1 - self.composition_parameters.get_melody_amount()]
note_or_pause_choices = ["note", "pause"]
note_or_pause = random.choices(note_or_pause_choices, note_or_pause_weights, k=1)[0]
if note_or_pause == "note":
if previous_char == "none" or previous_char == "p":
generated_char = str(random.choice(self.scale).get_note_value())
else:
new_note_or_extend_weights = [1 * (1 - self.composition_parameters.get_note_extension_amount()),
5 * self.composition_parameters.get_note_extension_amount()]
new_note_or_extend_choices = ["new_note", "extend"]
new_note_or_extend = random.choices(new_note_or_extend_choices, new_note_or_extend_weights, k=1)[0]
if new_note_or_extend == "new_note":
generated_char = str(random.choice(self.scale).get_note_value())
else:
generated_char = "e"
else:
generated_char = "p"
return generated_char
def crossover(self, partner):
child_genes = []
partner_genes = partner.get_genes()
possible_midpoints = []
for i in range(self.genes_length + 1):
if i == 0 or i == self.genes_length:
possible_midpoints.append(i)
elif not (partner_genes[i] == "e" and self.genes[i - 1] == "p"):
possible_midpoints.append(i)
midpoint = random.choice(possible_midpoints)
# no "p" "e"
for i in range(self.genes_length):
if i < midpoint:
child_genes.append(self.genes[i])
else:
child_genes.append(partner_genes[i])
child_dna = DNA(self.genes_length, self.key_root_note, self.octave, self.mode,
self.composition_parameters, self.underlying_harmony, self.is_continuation, self.is_variation,
self.do_resolution, self.target_melody, self.note_to_continue)
child_dna.set_genes(child_genes)
return child_dna
def mutate(self, mutation_rate):
new_genes = []
for i in range(self.genes_length):
if i - 1 < 0:
previous_char = "none"
else:
previous_char = self.genes[i - 1]
current_char = self.genes[i]
if i + 1 > self.genes_length - 1:
next_char = "none"
else:
next_char = self.genes[i + 1]
if random.random() < mutation_rate:
new_genes.append(self.generate_single_gene_char_mutation(previous_char, current_char, next_char))
else:
new_genes.append(current_char)
self.set_genes(new_genes)
def generate_single_gene_char_mutation(self, previous_char, current_char, next_char):
include_e = True
include_p = True
include_num = True
if previous_char == "p" or "none":
include_e = False
if next_char == "e":
include_p = False
choices = []
if include_p:
choices.append("p")
if include_e:
choices.append("e")
if include_num:
choices.append(str(random.choice(self.scale).get_note_value()))
new_char = random.choice(choices)
return new_char
def calculate_note_extension_amount(self):
# percentage of extension type melodic elements in all melodic elements
if self.number_of_melodic_elements == 0:
return 0
else:
extension_percentage = self.number_of_note_extensions/self.number_of_melodic_elements
return extension_percentage
def calculate_melody_amount(self):
# percentage of melodic elements in the whole melody
melody_percentage = self.number_of_melodic_elements/self.melody_length
return melody_percentage
def calculate_melody_to_harmony_fit(self):
# percentage of melodic elements that are chord tones in all melodic elements
if self.number_of_melodic_elements == 0:
return 1
else:
return self.number_of_chord_tones / self.number_of_melodic_elements
def calculate_melody_range(self):
return self.melody_range
def calculate_average_interval(self):
# average interval size in the melody
if self.number_of_intervals == 0:
return 0
else:
average_interval = self.interval_size_sum / self.number_of_intervals
return average_interval
def calculate_continuation(self):
continuation_interval = abs(self.first_note_value - self.note_to_continue)
return continuation_interval
def calculate_resolution_intensity(self):
# a metric that calculates the intensity of resolution in the last note of the melody
last_note_length = 0
chord_tones_in_last_note = 0
for i in range(self.genes_length-1, -1, -1):
if self.genes[i].isdigit():
last_note_value = int(self.genes[i])
underlying_chord_root = int(self.underlying_harmony[-1].get_root_note_value())
chord_tones_in_last_note += NoteUtils.are_note_values_the_same_note(last_note_value,
underlying_chord_root)
last_note_length = 1
while i+1 < self.genes_length and self.genes[i+1] == "e":
chord_tones_in_last_note += NoteUtils.are_note_values_the_same_note(last_note_value,
underlying_chord_root)
last_note_length += 1
i += 1
break
if last_note_length <= 4:
note_length_weight = last_note_length
else:
note_length_weight = 5
if last_note_length == 0:
return 1
else:
self.resolution_intensity = (note_length_weight * (chord_tones_in_last_note/last_note_length))/5
return self.resolution_intensity
def calculate_melody_parameters(self):
self.melody_intervals_list, self.melody_lengths_list = self.get_melody_intervals_list_and_melody_lengths_list(self.genes)
note_playing = None
note_playing_length = 0
highest_note = 0
lowest_note = 128
first_note_found = False
for i in range(self.genes_length):
if self.genes[i].isdigit():
self.number_of_melodic_elements += 1
if not first_note_found:
self.first_note_value = int(self.genes[i])
first_note_found = True
if note_playing is not None:
self.interval_size_sum += abs(int(note_playing) - int(self.genes[i]))
self.number_of_intervals += 1
note_playing = self.genes[i]
for note in self.underlying_harmony[i].get_notes():
if NoteUtils.are_note_values_the_same_note(int(note_playing), note.get_note_value()):
self.number_of_chord_tones += 1
break
current_note = int(self.genes[i])
if current_note < lowest_note:
lowest_note = current_note
if current_note > highest_note:
highest_note = current_note
elif self.genes[i] == "e":
self.number_of_melodic_elements += 1
self.number_of_note_extensions += 1
note_playing_length += 1
for note in self.underlying_harmony[i].get_notes():
if NoteUtils.are_note_values_the_same_note(int(note_playing), note.get_note_value()):
self.number_of_chord_tones += 1
break
elif self.genes[i] == "p":
note_playing = -1
self.melody_range = highest_note - lowest_note
def calculate_similarity(self):
# a metric that calculates similarity between the generated melody and the target melody. Used for creating
# variations on a motif-form
target_melody_intervals_list, target_melody_lengths_list = self.get_melody_intervals_list_and_melody_lengths_list(self.target_melody)
generated_melody_intervals_list = self.melody_intervals_list
generated_melody_lengths_list = self.melody_lengths_list
interval_distances_sum = 0
num_of_intervals = 0
num_of_lengths = 0
lengths_distances_sum = 0
for i in range(len(generated_melody_intervals_list)):
if not (generated_melody_intervals_list[i] is None or target_melody_intervals_list[i] is None):
num_of_intervals += 1
interval_distances_sum += abs(int(generated_melody_intervals_list[i]) - int(target_melody_intervals_list[i]))
elif not (generated_melody_intervals_list[i] is None and target_melody_intervals_list[i] is None):
num_of_intervals += 1
for i in range(len(generated_melody_lengths_list)):
if not (generated_melody_lengths_list[i] is None or target_melody_lengths_list[i] is None):
num_of_lengths += 1
lengths_distances_sum += abs(int(generated_melody_lengths_list[i]) - int(target_melody_lengths_list[i]))
elif not (generated_melody_lengths_list[i] is None and target_melody_lengths_list[i] is None):
num_of_lengths += 1
if num_of_intervals == 0:
interval_similarity = 1
else:
interval_similarity = 1 - (interval_distances_sum / (num_of_intervals * self.melody_range_octaves * 12))
lengths_similarity = 1 - (lengths_distances_sum/self.genes_length)
similarity = 0.7*interval_similarity + 0.3*lengths_similarity
return similarity
def get_melody_intervals_list_and_melody_lengths_list(self, melody):
interval_list_length = len(melody) - 1
interval_list = [None] * interval_list_length
interval_list_index = 0
previous_note = None
current_element = None
melody_lengths_list_length = len(melody)
melody_lengths_list = [None] * melody_lengths_list_length
melody_lengths_list_index = -1
for i in range(len(melody)):
if melody[i].isdigit():
current_note = int(melody[i])
if previous_note is not None:
interval_list[interval_list_index] = current_note - previous_note
interval_list_index += 1
previous_note = current_note
current_element = melody[i]
melody_lengths_list_index += 1
melody_lengths_list[melody_lengths_list_index] = 1
if melody[i] == "p" and current_element == "p":
melody_lengths_list[melody_lengths_list_index] += 1
elif melody[i] == "p":
current_element = melody[i]
melody_lengths_list_index += 1
melody_lengths_list[melody_lengths_list_index] = 1
if melody[i] == "e":
melody_lengths_list[melody_lengths_list_index] += 1
return interval_list, melody_lengths_list
def calculate_rhythmic_range(self):
shortest_note = self.genes_length
longest_note = 0
for note_length in self.melody_lengths_list:
if note_length is not None:
if note_length > longest_note:
longest_note = longest_note
if note_length < shortest_note:
shortest_note = note_length
else:
break
rhythmic_range = longest_note - shortest_note
return rhythmic_range
def calculate_interval_consonance(self):
consonance_score = 0
intervals = 0
for interval in self.melody_intervals_list:
if interval is not None:
intervals += 1
interval_quality = abs(interval) % 12
if interval_quality == 0 or interval_quality == 7 or interval_quality == 5:
consonance_score += 1
elif interval_quality == 3 or interval_quality == 4 or interval_quality == 2 or \
interval_quality == 8 or interval_quality == 9:
consonance_score += 0.5
elif interval_quality == 1 or interval_quality == 6 or interval_quality == 10 or interval_quality == 11:
consonance_score += 0
if intervals == 0:
return 1
else:
return consonance_score / intervals | composingAlgorithms/DNA.py | import math
import random
from utils.NoteUtils import NoteUtils
from utils.ScaleUtils import ScaleUtils
class DNA:
# TODO: add a parameter called melody_range_octaves
def __init__(self, genes_length, key_root_note, octave, mode, composition_parameters, underlying_harmony,
is_continuation, is_variation, do_resolution, target_melody, note_to_continue):
self.genes = []
self.fitness = None
self.genes_length = genes_length
self.key_root_note = key_root_note
self.octave = octave
self.mode = mode
self.key_root_note_octave = NoteUtils.transpose_note_value_to_octave(key_root_note, octave)
self.composition_parameters = composition_parameters
self.melody_range_octaves = composition_parameters.get_melody_range_octaves()
self.scale = ScaleUtils.generate_scale_from_key_root_value(self.key_root_note_octave, mode, self.melody_range_octaves)
self.underlying_harmony = underlying_harmony
self.is_continuation = is_continuation
self.is_variation = is_variation
self.do_resolution = do_resolution
self.target_melody = target_melody
self.note_to_continue = note_to_continue
self.number_of_melodic_elements = 0
self.number_of_note_extensions = 0
self.melody_length = self.genes_length
self.number_of_chord_tones = 0
self.number_of_intervals = 0
self.interval_size_sum = 0
self.melody_range = 0
self.rhythmic_range = 0
self.first_note_value = -1
self.resolution_intensity = 0
self.melody_intervals_list = []
self.melody_lengths_list = []
self.generate_new_genes()
def get_genes(self):
return self.genes
def set_genes(self, new_genes):
self.genes = new_genes
def get_fitness(self):
return self.fitness
def calculate_fitness(self):
score = 0
self.calculate_melody_parameters()
score += 1 - abs(self.composition_parameters.get_melody_amount() - self.calculate_melody_amount())
score += 1 - abs(self.composition_parameters.get_note_extension_amount() - self.calculate_note_extension_amount())
score += ((self.melody_range_octaves*12) - abs(self.composition_parameters.get_average_interval() - self.calculate_average_interval()))/ (self.melody_range_octaves*12)
score += ((self.melody_range_octaves*12) - abs(self.composition_parameters.get_melody_range() - self.calculate_melody_range()))/ (self.melody_range_octaves*12)
score += (1 - abs(self.composition_parameters.get_melody_to_harmony_fit() - self.calculate_melody_to_harmony_fit()))
# score += (self.genes_length - abs(self.composition_parameters.get_rhythmic_range() - self.calculate_rhythmic_range()))/self.genes_length
score += (1 - abs(self.composition_parameters.get_interval_consonance() - self.calculate_interval_consonance()))
if self.do_resolution:
score += (1 - abs(
self.composition_parameters.get_resolution_intensity() - self.calculate_resolution_intensity()))
if self.is_continuation:
score += ((self.melody_range_octaves*12) - abs(
self.composition_parameters.get_average_interval() - self.calculate_continuation())) / (self.melody_range_octaves*12)
if self.is_variation:
score += self.calculate_similarity()
self.fitness = score
def generate_new_genes(self):
# we can generate:
# -- note_value in a 2 octave range from key_root_note
# -- e for extended
# -- p for pause
previous_char = "none"
for i in range(self.genes_length):
current_char = self.generate_single_gene_char(previous_char)
previous_char = current_char
self.genes.append(current_char)
def generate_single_gene_char(self, previous_char):
note_or_pause_weights = [self.composition_parameters.get_melody_amount(),
1 - self.composition_parameters.get_melody_amount()]
note_or_pause_choices = ["note", "pause"]
note_or_pause = random.choices(note_or_pause_choices, note_or_pause_weights, k=1)[0]
if note_or_pause == "note":
if previous_char == "none" or previous_char == "p":
generated_char = str(random.choice(self.scale).get_note_value())
else:
new_note_or_extend_weights = [1 * (1 - self.composition_parameters.get_note_extension_amount()),
5 * self.composition_parameters.get_note_extension_amount()]
new_note_or_extend_choices = ["new_note", "extend"]
new_note_or_extend = random.choices(new_note_or_extend_choices, new_note_or_extend_weights, k=1)[0]
if new_note_or_extend == "new_note":
generated_char = str(random.choice(self.scale).get_note_value())
else:
generated_char = "e"
else:
generated_char = "p"
return generated_char
def crossover(self, partner):
child_genes = []
partner_genes = partner.get_genes()
possible_midpoints = []
for i in range(self.genes_length + 1):
if i == 0 or i == self.genes_length:
possible_midpoints.append(i)
elif not (partner_genes[i] == "e" and self.genes[i - 1] == "p"):
possible_midpoints.append(i)
midpoint = random.choice(possible_midpoints)
# no "p" "e"
for i in range(self.genes_length):
if i < midpoint:
child_genes.append(self.genes[i])
else:
child_genes.append(partner_genes[i])
child_dna = DNA(self.genes_length, self.key_root_note, self.octave, self.mode,
self.composition_parameters, self.underlying_harmony, self.is_continuation, self.is_variation,
self.do_resolution, self.target_melody, self.note_to_continue)
child_dna.set_genes(child_genes)
return child_dna
def mutate(self, mutation_rate):
new_genes = []
for i in range(self.genes_length):
if i - 1 < 0:
previous_char = "none"
else:
previous_char = self.genes[i - 1]
current_char = self.genes[i]
if i + 1 > self.genes_length - 1:
next_char = "none"
else:
next_char = self.genes[i + 1]
if random.random() < mutation_rate:
new_genes.append(self.generate_single_gene_char_mutation(previous_char, current_char, next_char))
else:
new_genes.append(current_char)
self.set_genes(new_genes)
def generate_single_gene_char_mutation(self, previous_char, current_char, next_char):
include_e = True
include_p = True
include_num = True
if previous_char == "p" or "none":
include_e = False
if next_char == "e":
include_p = False
choices = []
if include_p:
choices.append("p")
if include_e:
choices.append("e")
if include_num:
choices.append(str(random.choice(self.scale).get_note_value()))
new_char = random.choice(choices)
return new_char
def calculate_note_extension_amount(self):
# percentage of extension type melodic elements in all melodic elements
if self.number_of_melodic_elements == 0:
return 0
else:
extension_percentage = self.number_of_note_extensions/self.number_of_melodic_elements
return extension_percentage
def calculate_melody_amount(self):
# percentage of melodic elements in the whole melody
melody_percentage = self.number_of_melodic_elements/self.melody_length
return melody_percentage
def calculate_melody_to_harmony_fit(self):
# percentage of melodic elements that are chord tones in all melodic elements
if self.number_of_melodic_elements == 0:
return 1
else:
return self.number_of_chord_tones / self.number_of_melodic_elements
def calculate_melody_range(self):
return self.melody_range
def calculate_average_interval(self):
# average interval size in the melody
if self.number_of_intervals == 0:
return 0
else:
average_interval = self.interval_size_sum / self.number_of_intervals
return average_interval
def calculate_continuation(self):
continuation_interval = abs(self.first_note_value - self.note_to_continue)
return continuation_interval
def calculate_resolution_intensity(self):
# a metric that calculates the intensity of resolution in the last note of the melody
last_note_length = 0
chord_tones_in_last_note = 0
for i in range(self.genes_length-1, -1, -1):
if self.genes[i].isdigit():
last_note_value = int(self.genes[i])
underlying_chord_root = int(self.underlying_harmony[-1].get_root_note_value())
chord_tones_in_last_note += NoteUtils.are_note_values_the_same_note(last_note_value,
underlying_chord_root)
last_note_length = 1
while i+1 < self.genes_length and self.genes[i+1] == "e":
chord_tones_in_last_note += NoteUtils.are_note_values_the_same_note(last_note_value,
underlying_chord_root)
last_note_length += 1
i += 1
break
if last_note_length <= 4:
note_length_weight = last_note_length
else:
note_length_weight = 5
if last_note_length == 0:
return 1
else:
self.resolution_intensity = (note_length_weight * (chord_tones_in_last_note/last_note_length))/5
return self.resolution_intensity
def calculate_melody_parameters(self):
self.melody_intervals_list, self.melody_lengths_list = self.get_melody_intervals_list_and_melody_lengths_list(self.genes)
note_playing = None
note_playing_length = 0
highest_note = 0
lowest_note = 128
first_note_found = False
for i in range(self.genes_length):
if self.genes[i].isdigit():
self.number_of_melodic_elements += 1
if not first_note_found:
self.first_note_value = int(self.genes[i])
first_note_found = True
if note_playing is not None:
self.interval_size_sum += abs(int(note_playing) - int(self.genes[i]))
self.number_of_intervals += 1
note_playing = self.genes[i]
for note in self.underlying_harmony[i].get_notes():
if NoteUtils.are_note_values_the_same_note(int(note_playing), note.get_note_value()):
self.number_of_chord_tones += 1
break
current_note = int(self.genes[i])
if current_note < lowest_note:
lowest_note = current_note
if current_note > highest_note:
highest_note = current_note
elif self.genes[i] == "e":
self.number_of_melodic_elements += 1
self.number_of_note_extensions += 1
note_playing_length += 1
for note in self.underlying_harmony[i].get_notes():
if NoteUtils.are_note_values_the_same_note(int(note_playing), note.get_note_value()):
self.number_of_chord_tones += 1
break
elif self.genes[i] == "p":
note_playing = -1
self.melody_range = highest_note - lowest_note
def calculate_similarity(self):
# a metric that calculates similarity between the generated melody and the target melody. Used for creating
# variations on a motif-form
target_melody_intervals_list, target_melody_lengths_list = self.get_melody_intervals_list_and_melody_lengths_list(self.target_melody)
generated_melody_intervals_list = self.melody_intervals_list
generated_melody_lengths_list = self.melody_lengths_list
interval_distances_sum = 0
num_of_intervals = 0
num_of_lengths = 0
lengths_distances_sum = 0
for i in range(len(generated_melody_intervals_list)):
if not (generated_melody_intervals_list[i] is None or target_melody_intervals_list[i] is None):
num_of_intervals += 1
interval_distances_sum += abs(int(generated_melody_intervals_list[i]) - int(target_melody_intervals_list[i]))
elif not (generated_melody_intervals_list[i] is None and target_melody_intervals_list[i] is None):
num_of_intervals += 1
for i in range(len(generated_melody_lengths_list)):
if not (generated_melody_lengths_list[i] is None or target_melody_lengths_list[i] is None):
num_of_lengths += 1
lengths_distances_sum += abs(int(generated_melody_lengths_list[i]) - int(target_melody_lengths_list[i]))
elif not (generated_melody_lengths_list[i] is None and target_melody_lengths_list[i] is None):
num_of_lengths += 1
if num_of_intervals == 0:
interval_similarity = 1
else:
interval_similarity = 1 - (interval_distances_sum / (num_of_intervals * self.melody_range_octaves * 12))
lengths_similarity = 1 - (lengths_distances_sum/self.genes_length)
similarity = 0.7*interval_similarity + 0.3*lengths_similarity
return similarity
def get_melody_intervals_list_and_melody_lengths_list(self, melody):
interval_list_length = len(melody) - 1
interval_list = [None] * interval_list_length
interval_list_index = 0
previous_note = None
current_element = None
melody_lengths_list_length = len(melody)
melody_lengths_list = [None] * melody_lengths_list_length
melody_lengths_list_index = -1
for i in range(len(melody)):
if melody[i].isdigit():
current_note = int(melody[i])
if previous_note is not None:
interval_list[interval_list_index] = current_note - previous_note
interval_list_index += 1
previous_note = current_note
current_element = melody[i]
melody_lengths_list_index += 1
melody_lengths_list[melody_lengths_list_index] = 1
if melody[i] == "p" and current_element == "p":
melody_lengths_list[melody_lengths_list_index] += 1
elif melody[i] == "p":
current_element = melody[i]
melody_lengths_list_index += 1
melody_lengths_list[melody_lengths_list_index] = 1
if melody[i] == "e":
melody_lengths_list[melody_lengths_list_index] += 1
return interval_list, melody_lengths_list
def calculate_rhythmic_range(self):
shortest_note = self.genes_length
longest_note = 0
for note_length in self.melody_lengths_list:
if note_length is not None:
if note_length > longest_note:
longest_note = longest_note
if note_length < shortest_note:
shortest_note = note_length
else:
break
rhythmic_range = longest_note - shortest_note
return rhythmic_range
def calculate_interval_consonance(self):
consonance_score = 0
intervals = 0
for interval in self.melody_intervals_list:
if interval is not None:
intervals += 1
interval_quality = abs(interval) % 12
if interval_quality == 0 or interval_quality == 7 or interval_quality == 5:
consonance_score += 1
elif interval_quality == 3 or interval_quality == 4 or interval_quality == 2 or \
interval_quality == 8 or interval_quality == 9:
consonance_score += 0.5
elif interval_quality == 1 or interval_quality == 6 or interval_quality == 10 or interval_quality == 11:
consonance_score += 0
if intervals == 0:
return 1
else:
return consonance_score / intervals | 0.30013 | 0.32122 |
import os
import shutil
import logging
import subprocess
import zipfile
import requests
class Downloader:
def __init__(self, outdir, bookdir, cookies):
self.outdir = outdir
self.bookdir = bookdir
self.cookies = cookies
def save_bytes(self, bts, name):
fpath = os.path.join(self.bookdir, name)
dirpath = os.path.dirname(fpath)
if not os.path.exists(dirpath):
os.makedirs(dirpath)
fout = open(fpath, "wb")
fout.write(bts)
fout.close()
def request_url(self, url):
logging.debug("downloading %s ..." % url)
response = requests.get(url, cookies=self.cookies)
logging.debug("response:%s", response)
assert response.status_code in [200], response.status_code
return response
def path(self, sub):
return os.path.join(self.bookdir, sub)
def delete_downloaded(self):
shutil.rmtree(self.bookdir)
def delete_css(self):
for root, dirs, files in os.walk(".", topdown=False):
for name in files:
if name.lower().endswith(".css"):
f = open(os.path.join(root, name), "w")
f.write("")
f.close()
def make_epub(self):
assert os.path.exists(self.bookdir), self.bookdir
epubfpath = self.bookdir + ".epub"
zipf = zipfile.ZipFile(epubfpath, 'w', zipfile.ZIP_DEFLATED)
zip_dir(self.bookdir, zipf)
zipf.close()
return epubfpath
def human_name(self, title):
human_name = "{}/{}.epub".format(self.outdir, title)
os.rename("{}.epub".format(self.bookdir), human_name)
return human_name
def autofix(self):
book_file = "{}.epub".format(self.bookdir)
temp_file = "{}/temporary.epub".format(self.outdir)
try:
ebook = subprocess.check_output(['which', 'ebook-convert'])
ebook = ebook.strip(b'\n')
except subprocess.CalledProcessError:
logging.error("ebook-convert not found in system. it's are part of Calibre application, try to install its")
return False
try:
subprocess.check_call([ebook, os.path.abspath(book_file), os.path.abspath(temp_file)], shell=False,
stdout=subprocess.DEVNULL)
os.unlink(book_file)
os.rename(temp_file, book_file)
except subprocess.CalledProcessError:
logging.error(
"Autofix finished with error. Try manualy call autofix: `which ebook-convert` input_file output_file")
def zip_dir(path, ziph):
# zip_dir is zipfile handle
top = path
for root, dirs, files in os.walk(path):
for file in files:
src = os.path.join(root, file)
ziph.write(filename=src, arcname=os.path.relpath(src, top)) | bookmate/downloader.py | import os
import shutil
import logging
import subprocess
import zipfile
import requests
class Downloader:
def __init__(self, outdir, bookdir, cookies):
self.outdir = outdir
self.bookdir = bookdir
self.cookies = cookies
def save_bytes(self, bts, name):
fpath = os.path.join(self.bookdir, name)
dirpath = os.path.dirname(fpath)
if not os.path.exists(dirpath):
os.makedirs(dirpath)
fout = open(fpath, "wb")
fout.write(bts)
fout.close()
def request_url(self, url):
logging.debug("downloading %s ..." % url)
response = requests.get(url, cookies=self.cookies)
logging.debug("response:%s", response)
assert response.status_code in [200], response.status_code
return response
def path(self, sub):
return os.path.join(self.bookdir, sub)
def delete_downloaded(self):
shutil.rmtree(self.bookdir)
def delete_css(self):
for root, dirs, files in os.walk(".", topdown=False):
for name in files:
if name.lower().endswith(".css"):
f = open(os.path.join(root, name), "w")
f.write("")
f.close()
def make_epub(self):
assert os.path.exists(self.bookdir), self.bookdir
epubfpath = self.bookdir + ".epub"
zipf = zipfile.ZipFile(epubfpath, 'w', zipfile.ZIP_DEFLATED)
zip_dir(self.bookdir, zipf)
zipf.close()
return epubfpath
def human_name(self, title):
human_name = "{}/{}.epub".format(self.outdir, title)
os.rename("{}.epub".format(self.bookdir), human_name)
return human_name
def autofix(self):
book_file = "{}.epub".format(self.bookdir)
temp_file = "{}/temporary.epub".format(self.outdir)
try:
ebook = subprocess.check_output(['which', 'ebook-convert'])
ebook = ebook.strip(b'\n')
except subprocess.CalledProcessError:
logging.error("ebook-convert not found in system. it's are part of Calibre application, try to install its")
return False
try:
subprocess.check_call([ebook, os.path.abspath(book_file), os.path.abspath(temp_file)], shell=False,
stdout=subprocess.DEVNULL)
os.unlink(book_file)
os.rename(temp_file, book_file)
except subprocess.CalledProcessError:
logging.error(
"Autofix finished with error. Try manualy call autofix: `which ebook-convert` input_file output_file")
def zip_dir(path, ziph):
# zip_dir is zipfile handle
top = path
for root, dirs, files in os.walk(path):
for file in files:
src = os.path.join(root, file)
ziph.write(filename=src, arcname=os.path.relpath(src, top)) | 0.242385 | 0.106551 |
import math
import random
import json
class Neuron():
""" Represents a single Neuron/Node """
def __init__(self, alpha, weights):
""" Initialise a neuron with INPUT weights and a specific alpha """
self._weights = weights
self._alpha = alpha
def _input(self, inputs):
""" Returns the sum of the inputs multiplied by the correct weight """
return sum([w*i for w,i in zip(self._weights, inputs)])
def _g(self, x):
"""
Performs the sigmoid function equation from lectures with the defined
alpha value corresponding to this object
"""
e_x = math.exp(-self._alpha * x)
return (1.0 / (1 + e_x))
def _g_prime(self, x):
"""
Performs the derivative of the sigmoid function for a given x
value using the alpha value associated with this neuron
"""
return self._g(x)*(1 - self._g(x))
def output(self, inputs):
"""
Outputs the sigmoid function applied to the weighted sum of the
inputs, once more using the alpha value associated with this neuron
"""
self._in_j = self._input(inputs) #Previous weighted inputs
return self._g(self._in_j)
def _update_weights(self, alpha, delta):
""" Internal method for adapting weights """
res = []
for j, weight in enumerate(self._weights):
self._weights[j] = weight + (alpha * delta * self._g_prime(self._in_j))
#print("Prev weight: {} New weight: {}".format(weight, self._weights[j]))
res.append(self._weights[j] - weight)
return res[0]
class Layer():
""" Represents a single layer (hidden or otherwise) """
def __init__(self, weights, alphas):
""" Weights is a 2-D list, alphas is a 1-D list """
self._neurons = [Neuron(a, w) for w, a in zip(weights, alphas)]
def calc(self, inputs):
""" Calculates the result of these inputs on this layer """
return [neuron.output(inputs) for neuron in self._neurons]
class NeuralNetwork():
""" Represents the entire Neural Network """
def __init__(self, weights=[], alphas=[]):
"""
Weights is a 3-D list. First index for the layer, second for the neuron
which will have a weight for each input.
Alphas is a 2-D list. One for each layer, and each neuron has exactly
1 alpha.
"""
self._layers = [Layer(w, a) for w, a in zip(weights, alphas)]
def new_layer(self, nodes, inputs, alpha=0.1):
"""
The number of nodes in this layer, with the number of inputs to each node
"""
weights = [[random.uniform(-0.1, 0.1) for _ in range(inputs)] for i in range(nodes)]
alphas = [alpha for _ in range(nodes)]
self._layers.append(Layer(weights, alphas))
def new_initial_layer(self, nodes, inputs, alpha=0.1):
""" Creates weightings for the initial layer as 1 """
weights = [[1 for _ in range(inputs)] for i in range(nodes)]
alphas = [alpha for _ in range(nodes)]
self._layers.insert(0, Layer(weights, alphas))
def new_random_layer(self, nodes, inputs, alpha=0.1):
""" Creates a random weighting for a layer in the range -1, 1 """
weights = [[random.uniform(-1, 1) for _ in range(inputs)] for i in range(nodes)]
alphas = [alpha for _ in range(nodes)]
self._layers.append(Layer(weights, alphas))
def run(self, inputs):
""" Run a neural network with these given inputs """
for layer in self._layers:
inputs = layer.calc(inputs)
return inputs
def _back_prop_outer(self, layer, alpha, expected, outputs, inputs):
""" Internal method for back propogation of the outer layer """
res = []
for k, outer_neuron in enumerate(layer._neurons):
err_k = expected[k] - outputs[k]
err_sig = err_k * outer_neuron._g_prime(outputs[k])
res.append(outer_neuron._update_weights(alpha, err_sig))
return res
def _delta(self, output, err, neuron):
""" Calculate the necessary delta value """
return neuron._g_prime(output) * err
def _back_prop_hidden(self, layer, alpha, next_layer, outputs, inputs, output_delta):
"""
Internal method for calculating the back propogation for a hidden layer
"""
res = []
for i, neuron in enumerate(layer._neurons):
err_sig = sum([neuron._weights[i] * output_delta[j] for j, neuron in enumerate(next_layer._neurons)])
res.append(neuron._update_weights(alpha, err_sig))
return res
def back_prop(self, data, alpha=2):
"""
Data is a list of (input, expected)
Both input and expected should be lists
"""
reversed_layers = reversed(self._layers[1:]) #Exclude the first layer from back prop
for inputs, expected in data:
outputs = self.run(inputs)
output_delta = None
next_layer = None
for i, layer in enumerate(reversed_layers):
if i == 0:
output_delta = self._back_prop_outer(layer, \
alpha, expected, outputs, inputs)
next_layer = layer
else:
output_delta = self._back_prop_hidden(layer, alpha, next_layer, outputs, inputs, output_delta)
next_layer = layer
def output(self, filename):
""" Output the neural network as is to JSON """
with open(filename, 'w') as f:
op = {}
layer_res = []
alphas_res = []
for layer in self._layers:
weights = []
alphas = []
for neuron in layer._neurons:
weights.append(neuron._weights)
alphas.append(neuron._alpha)
layer_res.append(weights)
alphas_res.append(alphas)
op['layers'] = layer_res
op['alphas'] = alphas_res
json.dump(op, f, indent='\t')
def adapt_alpha(self, alpha):
for layers in self._layers:
for neurons in layers._neurons:
neurons._alpha = alpha | neuralnetwork.py | import math
import random
import json
class Neuron():
""" Represents a single Neuron/Node """
def __init__(self, alpha, weights):
""" Initialise a neuron with INPUT weights and a specific alpha """
self._weights = weights
self._alpha = alpha
def _input(self, inputs):
""" Returns the sum of the inputs multiplied by the correct weight """
return sum([w*i for w,i in zip(self._weights, inputs)])
def _g(self, x):
"""
Performs the sigmoid function equation from lectures with the defined
alpha value corresponding to this object
"""
e_x = math.exp(-self._alpha * x)
return (1.0 / (1 + e_x))
def _g_prime(self, x):
"""
Performs the derivative of the sigmoid function for a given x
value using the alpha value associated with this neuron
"""
return self._g(x)*(1 - self._g(x))
def output(self, inputs):
"""
Outputs the sigmoid function applied to the weighted sum of the
inputs, once more using the alpha value associated with this neuron
"""
self._in_j = self._input(inputs) #Previous weighted inputs
return self._g(self._in_j)
def _update_weights(self, alpha, delta):
""" Internal method for adapting weights """
res = []
for j, weight in enumerate(self._weights):
self._weights[j] = weight + (alpha * delta * self._g_prime(self._in_j))
#print("Prev weight: {} New weight: {}".format(weight, self._weights[j]))
res.append(self._weights[j] - weight)
return res[0]
class Layer():
""" Represents a single layer (hidden or otherwise) """
def __init__(self, weights, alphas):
""" Weights is a 2-D list, alphas is a 1-D list """
self._neurons = [Neuron(a, w) for w, a in zip(weights, alphas)]
def calc(self, inputs):
""" Calculates the result of these inputs on this layer """
return [neuron.output(inputs) for neuron in self._neurons]
class NeuralNetwork():
""" Represents the entire Neural Network """
def __init__(self, weights=[], alphas=[]):
"""
Weights is a 3-D list. First index for the layer, second for the neuron
which will have a weight for each input.
Alphas is a 2-D list. One for each layer, and each neuron has exactly
1 alpha.
"""
self._layers = [Layer(w, a) for w, a in zip(weights, alphas)]
def new_layer(self, nodes, inputs, alpha=0.1):
"""
The number of nodes in this layer, with the number of inputs to each node
"""
weights = [[random.uniform(-0.1, 0.1) for _ in range(inputs)] for i in range(nodes)]
alphas = [alpha for _ in range(nodes)]
self._layers.append(Layer(weights, alphas))
def new_initial_layer(self, nodes, inputs, alpha=0.1):
""" Creates weightings for the initial layer as 1 """
weights = [[1 for _ in range(inputs)] for i in range(nodes)]
alphas = [alpha for _ in range(nodes)]
self._layers.insert(0, Layer(weights, alphas))
def new_random_layer(self, nodes, inputs, alpha=0.1):
""" Creates a random weighting for a layer in the range -1, 1 """
weights = [[random.uniform(-1, 1) for _ in range(inputs)] for i in range(nodes)]
alphas = [alpha for _ in range(nodes)]
self._layers.append(Layer(weights, alphas))
def run(self, inputs):
""" Run a neural network with these given inputs """
for layer in self._layers:
inputs = layer.calc(inputs)
return inputs
def _back_prop_outer(self, layer, alpha, expected, outputs, inputs):
""" Internal method for back propogation of the outer layer """
res = []
for k, outer_neuron in enumerate(layer._neurons):
err_k = expected[k] - outputs[k]
err_sig = err_k * outer_neuron._g_prime(outputs[k])
res.append(outer_neuron._update_weights(alpha, err_sig))
return res
def _delta(self, output, err, neuron):
""" Calculate the necessary delta value """
return neuron._g_prime(output) * err
def _back_prop_hidden(self, layer, alpha, next_layer, outputs, inputs, output_delta):
"""
Internal method for calculating the back propogation for a hidden layer
"""
res = []
for i, neuron in enumerate(layer._neurons):
err_sig = sum([neuron._weights[i] * output_delta[j] for j, neuron in enumerate(next_layer._neurons)])
res.append(neuron._update_weights(alpha, err_sig))
return res
def back_prop(self, data, alpha=2):
"""
Data is a list of (input, expected)
Both input and expected should be lists
"""
reversed_layers = reversed(self._layers[1:]) #Exclude the first layer from back prop
for inputs, expected in data:
outputs = self.run(inputs)
output_delta = None
next_layer = None
for i, layer in enumerate(reversed_layers):
if i == 0:
output_delta = self._back_prop_outer(layer, \
alpha, expected, outputs, inputs)
next_layer = layer
else:
output_delta = self._back_prop_hidden(layer, alpha, next_layer, outputs, inputs, output_delta)
next_layer = layer
def output(self, filename):
""" Output the neural network as is to JSON """
with open(filename, 'w') as f:
op = {}
layer_res = []
alphas_res = []
for layer in self._layers:
weights = []
alphas = []
for neuron in layer._neurons:
weights.append(neuron._weights)
alphas.append(neuron._alpha)
layer_res.append(weights)
alphas_res.append(alphas)
op['layers'] = layer_res
op['alphas'] = alphas_res
json.dump(op, f, indent='\t')
def adapt_alpha(self, alpha):
for layers in self._layers:
for neurons in layers._neurons:
neurons._alpha = alpha | 0.788257 | 0.719137 |
from django.test import TestCase
from django.test import Client
from django.urls import reverse
from django.contrib.auth.models import User
from .models import bc_sector
class TestSector(TestCase):
def setUp(self):
# Set up data for the whole TestCase
bc_sector.objects.create(name="sector1", description='desc. sector1').save()
bc_sector.objects.create(name="sector2", description='desc. sector2').save()
self.username = '<EMAIL>'
self.password = '<PASSWORD>'
user = User.objects.create(username=self.username)
user.set_password(self.password)
user.save()
self.c = Client()
user_login = self.c.login(username=self.username, password=self.password)
self.assertTrue(user_login)
def testFormGet(self):
response = self.c.get(reverse('sector_list'))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response=response, template_name='base.html')
self.assertTemplateUsed(response=response, template_name='navbar.html')
def testFormPost(self):
post_data = {
'name': 'sector3',
'description': 'desc. sector3',
}
response = self.c.post('/companies/sector/new/', post_data)
self.assertEqual(response.status_code, 302)
self.assertRedirects(response, '/companies/sector/list/')
ret = bc_sector.objects.get(name='sector3')
self.assertTrue(ret)
def testFormUpdate(self):
post_data = {
'name': 'sector4',
'description': 'desc. sector3',
}
ret = bc_sector.objects.get(name='sector1')
response = self.c.post(f'/companies/sector/update/{ret.id}/', post_data)
self.assertEqual(response.status_code, 302)
self.assertRedirects(response, '/companies/sector/list/')
ret = bc_sector.objects.get(name='sector4')
self.assertTrue(ret)
def testFormDelete(self):
ret = bc_sector.objects.get(name='sector2')
response = self.c.post(f'/companies/sector/delete/{ret.id}/')
self.assertEqual(response.status_code, 302)
self.assertRedirects(response, '/companies/sector/list/')
ret = bc_sector.objects.get(name='sector2', is_active=False)
self.assertTrue(ret) | companies/tests.py | from django.test import TestCase
from django.test import Client
from django.urls import reverse
from django.contrib.auth.models import User
from .models import bc_sector
class TestSector(TestCase):
def setUp(self):
# Set up data for the whole TestCase
bc_sector.objects.create(name="sector1", description='desc. sector1').save()
bc_sector.objects.create(name="sector2", description='desc. sector2').save()
self.username = '<EMAIL>'
self.password = '<PASSWORD>'
user = User.objects.create(username=self.username)
user.set_password(self.password)
user.save()
self.c = Client()
user_login = self.c.login(username=self.username, password=self.password)
self.assertTrue(user_login)
def testFormGet(self):
response = self.c.get(reverse('sector_list'))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response=response, template_name='base.html')
self.assertTemplateUsed(response=response, template_name='navbar.html')
def testFormPost(self):
post_data = {
'name': 'sector3',
'description': 'desc. sector3',
}
response = self.c.post('/companies/sector/new/', post_data)
self.assertEqual(response.status_code, 302)
self.assertRedirects(response, '/companies/sector/list/')
ret = bc_sector.objects.get(name='sector3')
self.assertTrue(ret)
def testFormUpdate(self):
post_data = {
'name': 'sector4',
'description': 'desc. sector3',
}
ret = bc_sector.objects.get(name='sector1')
response = self.c.post(f'/companies/sector/update/{ret.id}/', post_data)
self.assertEqual(response.status_code, 302)
self.assertRedirects(response, '/companies/sector/list/')
ret = bc_sector.objects.get(name='sector4')
self.assertTrue(ret)
def testFormDelete(self):
ret = bc_sector.objects.get(name='sector2')
response = self.c.post(f'/companies/sector/delete/{ret.id}/')
self.assertEqual(response.status_code, 302)
self.assertRedirects(response, '/companies/sector/list/')
ret = bc_sector.objects.get(name='sector2', is_active=False)
self.assertTrue(ret) | 0.448185 | 0.239327 |
from trash import app
from flask import render_template, redirect, url_for, request, Flask
from PIL import Image
from os import path
import json
from PIL import Image
import requests
from io import BytesIO
from clarifai.rest import ClarifaiApp
from clarifai.rest import Image as ClImage
# app = Flask(__name__)
capp = ClarifaiApp(api_key='<KEY>')
model = capp.models.get('trashsorter')
def max_concept(l):
max_confidence = 0
best_concept = None
for concept in l:
if concept['value'] > max_confidence:
best_concept = concept['name']
return best_concept
@app.route("/sort", methods=["GET", "POST"])
def sort():
if request.method == "POST":
if request.files:
img = Image.open(request.files['trash_pic'])
img.thumbnail((300,300), Image.ANTIALIAS)
f = path.join(app.config['UPLOAD_FOLDER'], 'image.png')
img.save(f)
res = model.predict_by_filename(path.join(app.config['UPLOAD_FOLDER'], 'image.png'))
print(res['outputs'][0]['data']['concepts'])
type_of_trash = max_concept(res['outputs'][0]['data']['concepts'])
print(type_of_trash)
place = ''
if type_of_trash == 'plastic':
place = 'trash'
elif type_of_trash == 'cardboard':
place = 'compost'
elif type_of_trash == 'paper':
place = 'recycle'
elif type_of_trash == 'metal':
place = 'trash'
elif type_of_trash == 'trash':
place = 'trash'
elif type_of_trash == 'glass':
place = 'recycle'
else:
raise Exception
return render_template('index.html', var=place)
return render_template('index.html', var=None)
@app.route("/", methods=["GET"])
def home():
return render_template('index.html', var=None)
@app.route("/upload", methods=["POST"])
def upload():
img = Image.open(request.files['file'])
f = path.join(app.config['UPLOAD_FOLDER'], 'image.png')
img.save(f)
return 'Success!' | trash/views.py | from trash import app
from flask import render_template, redirect, url_for, request, Flask
from PIL import Image
from os import path
import json
from PIL import Image
import requests
from io import BytesIO
from clarifai.rest import ClarifaiApp
from clarifai.rest import Image as ClImage
# app = Flask(__name__)
capp = ClarifaiApp(api_key='<KEY>')
model = capp.models.get('trashsorter')
def max_concept(l):
max_confidence = 0
best_concept = None
for concept in l:
if concept['value'] > max_confidence:
best_concept = concept['name']
return best_concept
@app.route("/sort", methods=["GET", "POST"])
def sort():
if request.method == "POST":
if request.files:
img = Image.open(request.files['trash_pic'])
img.thumbnail((300,300), Image.ANTIALIAS)
f = path.join(app.config['UPLOAD_FOLDER'], 'image.png')
img.save(f)
res = model.predict_by_filename(path.join(app.config['UPLOAD_FOLDER'], 'image.png'))
print(res['outputs'][0]['data']['concepts'])
type_of_trash = max_concept(res['outputs'][0]['data']['concepts'])
print(type_of_trash)
place = ''
if type_of_trash == 'plastic':
place = 'trash'
elif type_of_trash == 'cardboard':
place = 'compost'
elif type_of_trash == 'paper':
place = 'recycle'
elif type_of_trash == 'metal':
place = 'trash'
elif type_of_trash == 'trash':
place = 'trash'
elif type_of_trash == 'glass':
place = 'recycle'
else:
raise Exception
return render_template('index.html', var=place)
return render_template('index.html', var=None)
@app.route("/", methods=["GET"])
def home():
return render_template('index.html', var=None)
@app.route("/upload", methods=["POST"])
def upload():
img = Image.open(request.files['file'])
f = path.join(app.config['UPLOAD_FOLDER'], 'image.png')
img.save(f)
return 'Success!' | 0.275812 | 0.063978 |
import unittest
from copy import deepcopy
from apronpy.texpr1 import PyTexpr1
from apronpy.coeff import PyDoubleScalarCoeff, PyMPQScalarCoeff
from apronpy.environment import PyEnvironment
from apronpy.lincons0 import ConsTyp
from apronpy.lincons1 import PyLincons1
from apronpy.linexpr1 import PyLinexpr1
from apronpy.tcons1 import PyTcons1
from apronpy.var import PyVar
class TestPyTcons1(unittest.TestCase):
def test_init(self):
e = PyEnvironment([PyVar('x0'), PyVar('y')], [PyVar('z')])
x = PyLinexpr1(e)
x.set_coeff(PyVar('x0'), PyDoubleScalarCoeff(3))
x.set_coeff(PyVar('z'), PyDoubleScalarCoeff(-9))
x.set_cst(PyDoubleScalarCoeff(8))
c = PyLincons1(ConsTyp.AP_CONS_SUPEQ, x)
self.assertEqual(str(PyTcons1(c)), '8.0 + 3.0 · x0 - 9.0 · z >= 0')
self.assertEqual(str(PyTcons1.unsat(e)), '-1.0 >= 0')
z = PyLincons1(ConsTyp.AP_CONS_DISEQ, PyLinexpr1(e))
self.assertEqual(str(PyTcons1(z)), '0.0 != 0')
def test_make(self):
e = PyEnvironment([PyVar('x0'), PyVar('y')], [PyVar('z')])
x = PyLinexpr1(e)
x.set_coeff(PyVar('x0'), PyDoubleScalarCoeff(3))
x.set_coeff(PyVar('z'), PyDoubleScalarCoeff(-9))
x.set_cst(PyDoubleScalarCoeff(8))
c = PyTcons1.make(PyTexpr1(x), ConsTyp.AP_CONS_SUPEQ)
self.assertEqual(str(c), '8.0 + 3.0 · x0 - 9.0 · z >= 0')
def test_deepcopy(self):
e = PyEnvironment([PyVar('x0'), PyVar('y')], [PyVar('z')])
x = PyLinexpr1(e)
x.set_coeff(PyVar('x0'), PyDoubleScalarCoeff(3))
x.set_coeff(PyVar('z'), PyDoubleScalarCoeff(-9))
x.set_cst(PyDoubleScalarCoeff(8))
c0 = PyTcons1.make(PyTexpr1(x), ConsTyp.AP_CONS_SUPEQ)
c1 = deepcopy(c0)
c2 = c0
self.assertNotEqual(id(c0), id(c1))
self.assertEqual(id(c0), id(c2))
def test_substitute(self):
e = PyEnvironment([PyVar('x0'), PyVar('y')], [PyVar('z')])
x0 = PyLinexpr1(e)
x0.set_coeff(PyVar('x0'), PyMPQScalarCoeff(1))
x0.set_cst(PyMPQScalarCoeff(3))
t0 = PyTexpr1(x0)
c0 = PyTcons1.make(t0, ConsTyp.AP_CONS_SUPEQ)
self.assertEqual(str(c0), '3 + 1 · x0 >= 0')
x1 = PyLinexpr1(e)
x1.set_coeff(PyVar('x0'), PyMPQScalarCoeff(1))
x1.set_cst(PyMPQScalarCoeff(-1))
t1 = PyTexpr1(x1)
c1 = PyTcons1.make(t1, ConsTyp.AP_CONS_SUPEQ)
self.assertEqual(str(c1), '-1 + 1 · x0 >= 0')
self.assertEqual(str(c0.substitute(PyVar('x0'), t1)), '3 + 1 · (-1 + 1 · x0) >= 0')
if __name__ == '__main__':
unittest.main() | tests/test_tcons1.py | import unittest
from copy import deepcopy
from apronpy.texpr1 import PyTexpr1
from apronpy.coeff import PyDoubleScalarCoeff, PyMPQScalarCoeff
from apronpy.environment import PyEnvironment
from apronpy.lincons0 import ConsTyp
from apronpy.lincons1 import PyLincons1
from apronpy.linexpr1 import PyLinexpr1
from apronpy.tcons1 import PyTcons1
from apronpy.var import PyVar
class TestPyTcons1(unittest.TestCase):
def test_init(self):
e = PyEnvironment([PyVar('x0'), PyVar('y')], [PyVar('z')])
x = PyLinexpr1(e)
x.set_coeff(PyVar('x0'), PyDoubleScalarCoeff(3))
x.set_coeff(PyVar('z'), PyDoubleScalarCoeff(-9))
x.set_cst(PyDoubleScalarCoeff(8))
c = PyLincons1(ConsTyp.AP_CONS_SUPEQ, x)
self.assertEqual(str(PyTcons1(c)), '8.0 + 3.0 · x0 - 9.0 · z >= 0')
self.assertEqual(str(PyTcons1.unsat(e)), '-1.0 >= 0')
z = PyLincons1(ConsTyp.AP_CONS_DISEQ, PyLinexpr1(e))
self.assertEqual(str(PyTcons1(z)), '0.0 != 0')
def test_make(self):
e = PyEnvironment([PyVar('x0'), PyVar('y')], [PyVar('z')])
x = PyLinexpr1(e)
x.set_coeff(PyVar('x0'), PyDoubleScalarCoeff(3))
x.set_coeff(PyVar('z'), PyDoubleScalarCoeff(-9))
x.set_cst(PyDoubleScalarCoeff(8))
c = PyTcons1.make(PyTexpr1(x), ConsTyp.AP_CONS_SUPEQ)
self.assertEqual(str(c), '8.0 + 3.0 · x0 - 9.0 · z >= 0')
def test_deepcopy(self):
e = PyEnvironment([PyVar('x0'), PyVar('y')], [PyVar('z')])
x = PyLinexpr1(e)
x.set_coeff(PyVar('x0'), PyDoubleScalarCoeff(3))
x.set_coeff(PyVar('z'), PyDoubleScalarCoeff(-9))
x.set_cst(PyDoubleScalarCoeff(8))
c0 = PyTcons1.make(PyTexpr1(x), ConsTyp.AP_CONS_SUPEQ)
c1 = deepcopy(c0)
c2 = c0
self.assertNotEqual(id(c0), id(c1))
self.assertEqual(id(c0), id(c2))
def test_substitute(self):
e = PyEnvironment([PyVar('x0'), PyVar('y')], [PyVar('z')])
x0 = PyLinexpr1(e)
x0.set_coeff(PyVar('x0'), PyMPQScalarCoeff(1))
x0.set_cst(PyMPQScalarCoeff(3))
t0 = PyTexpr1(x0)
c0 = PyTcons1.make(t0, ConsTyp.AP_CONS_SUPEQ)
self.assertEqual(str(c0), '3 + 1 · x0 >= 0')
x1 = PyLinexpr1(e)
x1.set_coeff(PyVar('x0'), PyMPQScalarCoeff(1))
x1.set_cst(PyMPQScalarCoeff(-1))
t1 = PyTexpr1(x1)
c1 = PyTcons1.make(t1, ConsTyp.AP_CONS_SUPEQ)
self.assertEqual(str(c1), '-1 + 1 · x0 >= 0')
self.assertEqual(str(c0.substitute(PyVar('x0'), t1)), '3 + 1 · (-1 + 1 · x0) >= 0')
if __name__ == '__main__':
unittest.main() | 0.446012 | 0.40751 |
import os.path
import itertools
import Tools
import random
import numpy as np
import scipy
import scipy.stats
NBTESTS = 10
VECDIM = [12,14,20]
def entropyTest(config,nb):
inputs = []
outputs = []
vecDim = VECDIM[nb % len(VECDIM)]
dims=np.array([NBTESTS,vecDim])
for _ in range(0,NBTESTS):
v = np.random.rand(vecDim)
v = v / np.sum(v)
e = scipy.stats.entropy(v)
inputs += list(v)
outputs.append(e)
inputs = np.array(inputs)
outputs = np.array(outputs)
config.writeInput(nb, inputs,"Input")
config.writeInputS16(nb, dims,"Dims")
config.writeReference(nb, outputs,"RefEntropy")
def logsumexpTest(config,nb):
inputs = []
outputs = []
vecDim = VECDIM[nb % len(VECDIM)]
dims=np.array([NBTESTS,vecDim])
for _ in range(0,NBTESTS):
v = np.random.rand(vecDim)
v = v / np.sum(v)
e = scipy.special.logsumexp(v)
inputs += list(v)
outputs.append(e)
inputs = np.array(inputs)
outputs = np.array(outputs)
config.writeInput(nb, inputs,"Input")
config.writeInputS16(nb, dims,"Dims")
config.writeReference(nb, outputs,"RefLogSumExp")
def klTest(config,nb):
inputsA = []
inputsB = []
outputs = []
vecDim = VECDIM[nb % len(VECDIM)]
dims=np.array([NBTESTS,vecDim])
for _ in range(0,NBTESTS):
va = np.random.rand(vecDim)
va = va / np.sum(va)
vb = np.random.rand(vecDim)
vb = vb / np.sum(vb)
e = scipy.stats.entropy(va,vb)
inputsA += list(va)
inputsB += list(vb)
outputs.append(e)
inputsA = np.array(inputsA)
inputsB = np.array(inputsB)
outputs = np.array(outputs)
config.writeInput(nb, inputsA,"InputA")
config.writeInput(nb, inputsB,"InputB")
config.writeInputS16(nb, dims,"Dims")
config.writeReference(nb, outputs,"RefKL")
def logSumExpDotTest(config,nb):
inputsA = []
inputsB = []
outputs = []
vecDim = VECDIM[nb % len(VECDIM)]
dims=np.array([NBTESTS,vecDim])
for _ in range(0,NBTESTS):
va = np.random.rand(vecDim)
va = va / np.sum(va)
vb = np.random.rand(vecDim)
vb = vb / np.sum(vb)
d = 0.001
# It is a proba so must be in [0,1]
# But restricted to ]d,1] so that the log exists
va = (1-d)*va + d
vb = (1-d)*vb + d
e = np.log(np.dot(va,vb))
va = np.log(va)
vb = np.log(vb)
inputsA += list(va)
inputsB += list(vb)
outputs.append(e)
inputsA = np.array(inputsA)
inputsB = np.array(inputsB)
outputs = np.array(outputs)
config.writeInput(nb, inputsA,"InputA")
config.writeInput(nb, inputsB,"InputB")
config.writeInputS16(nb, dims,"Dims")
config.writeReference(nb, outputs,"RefLogSumExpDot")
def writeTests(config):
entropyTest(config,1)
logsumexpTest(config,2)
klTest(config,3)
logSumExpDotTest(config,4)
PATTERNDIR = os.path.join("Patterns","DSP","Stats","Stats")
PARAMDIR = os.path.join("Parameters","DSP","Stats","Stats")
configf32=Tools.Config(PATTERNDIR,PARAMDIR,"f32")
writeTests(configf32) | CMSIS/DSP/Testing/PatternGeneration/Stats.py | import os.path
import itertools
import Tools
import random
import numpy as np
import scipy
import scipy.stats
NBTESTS = 10
VECDIM = [12,14,20]
def entropyTest(config,nb):
inputs = []
outputs = []
vecDim = VECDIM[nb % len(VECDIM)]
dims=np.array([NBTESTS,vecDim])
for _ in range(0,NBTESTS):
v = np.random.rand(vecDim)
v = v / np.sum(v)
e = scipy.stats.entropy(v)
inputs += list(v)
outputs.append(e)
inputs = np.array(inputs)
outputs = np.array(outputs)
config.writeInput(nb, inputs,"Input")
config.writeInputS16(nb, dims,"Dims")
config.writeReference(nb, outputs,"RefEntropy")
def logsumexpTest(config,nb):
inputs = []
outputs = []
vecDim = VECDIM[nb % len(VECDIM)]
dims=np.array([NBTESTS,vecDim])
for _ in range(0,NBTESTS):
v = np.random.rand(vecDim)
v = v / np.sum(v)
e = scipy.special.logsumexp(v)
inputs += list(v)
outputs.append(e)
inputs = np.array(inputs)
outputs = np.array(outputs)
config.writeInput(nb, inputs,"Input")
config.writeInputS16(nb, dims,"Dims")
config.writeReference(nb, outputs,"RefLogSumExp")
def klTest(config,nb):
inputsA = []
inputsB = []
outputs = []
vecDim = VECDIM[nb % len(VECDIM)]
dims=np.array([NBTESTS,vecDim])
for _ in range(0,NBTESTS):
va = np.random.rand(vecDim)
va = va / np.sum(va)
vb = np.random.rand(vecDim)
vb = vb / np.sum(vb)
e = scipy.stats.entropy(va,vb)
inputsA += list(va)
inputsB += list(vb)
outputs.append(e)
inputsA = np.array(inputsA)
inputsB = np.array(inputsB)
outputs = np.array(outputs)
config.writeInput(nb, inputsA,"InputA")
config.writeInput(nb, inputsB,"InputB")
config.writeInputS16(nb, dims,"Dims")
config.writeReference(nb, outputs,"RefKL")
def logSumExpDotTest(config,nb):
inputsA = []
inputsB = []
outputs = []
vecDim = VECDIM[nb % len(VECDIM)]
dims=np.array([NBTESTS,vecDim])
for _ in range(0,NBTESTS):
va = np.random.rand(vecDim)
va = va / np.sum(va)
vb = np.random.rand(vecDim)
vb = vb / np.sum(vb)
d = 0.001
# It is a proba so must be in [0,1]
# But restricted to ]d,1] so that the log exists
va = (1-d)*va + d
vb = (1-d)*vb + d
e = np.log(np.dot(va,vb))
va = np.log(va)
vb = np.log(vb)
inputsA += list(va)
inputsB += list(vb)
outputs.append(e)
inputsA = np.array(inputsA)
inputsB = np.array(inputsB)
outputs = np.array(outputs)
config.writeInput(nb, inputsA,"InputA")
config.writeInput(nb, inputsB,"InputB")
config.writeInputS16(nb, dims,"Dims")
config.writeReference(nb, outputs,"RefLogSumExpDot")
def writeTests(config):
entropyTest(config,1)
logsumexpTest(config,2)
klTest(config,3)
logSumExpDotTest(config,4)
PATTERNDIR = os.path.join("Patterns","DSP","Stats","Stats")
PARAMDIR = os.path.join("Parameters","DSP","Stats","Stats")
configf32=Tools.Config(PATTERNDIR,PARAMDIR,"f32")
writeTests(configf32) | 0.262653 | 0.27211 |
from pm4py.algo.enhancement.sna.variants.log import handover as log_handover, jointactivities as log_jointactivities, \
subcontracting as log_subcontracting, working_together as log_workingtogether
from pm4py.algo.enhancement.sna.variants.pandas import handover as pd_handover, subcontracting as pd_subcontracting, \
working_together as pd_workingtogether, jointactivities as pd_jointactivities
from pm4py.objects.conversion.log import converter as log_conversion
from pm4py.algo.enhancement.sna.parameters import Parameters
from pm4py.util import exec_utils
from enum import Enum
import numpy as np
import deprecation
class Variants(Enum):
HANDOVER_LOG = log_handover
WORKING_TOGETHER_LOG = log_workingtogether
SUBCONTRACTING_LOG = log_subcontracting
JOINTACTIVITIES_LOG = log_jointactivities
HANDOVER_PANDAS = pd_handover
WORKING_TOGETHER_PANDAS = pd_workingtogether
SUBCONTRACTING_PANDAS = pd_subcontracting
JOINTACTIVITIES_PANDAS = pd_jointactivities
@deprecation.deprecated('2.2.5', '3.0.0', details='use pm4py.algo.organizational_mining.sna.algorithm instead')
def apply(log, parameters=None, variant=Variants.HANDOVER_LOG):
"""
Calculates a SNA metric
Parameters
------------
log
Log
parameters
Possible parameters of the algorithm
variant
Variant of the algorithm to apply. Possible values:
- Variants.HANDOVER_LOG
- Variants.WORKING_TOGETHER_LOG
- Variants.SUBCONTRACTING_LOG
- Variants.JOINTACTIVITIES_LOG
- Variants.HANDOVER_PANDAS
- Variants.WORKING_TOGETHER_PANDAS
- Variants.SUBCONTRACTING_PANDAS
- Variants.JOINTACTIVITIES_PANDAS
Returns
-----------
tuple
Tuple containing the metric matrix and the resources list
"""
if parameters is None:
parameters = {}
enable_metric_normalization = exec_utils.get_param_value(Parameters.METRIC_NORMALIZATION, parameters, False)
if variant in [Variants.HANDOVER_LOG, Variants.WORKING_TOGETHER_LOG, Variants.JOINTACTIVITIES_LOG,
Variants.SUBCONTRACTING_LOG]:
log = log_conversion.apply(log, parameters=parameters)
sna = exec_utils.get_variant(variant).apply(log, parameters=parameters)
abs_max = np.max(np.abs(sna[0]))
if enable_metric_normalization and abs_max > 0:
sna[0] = sna[0] / abs_max
return sna | ws2122-lspm/Lib/site-packages/pm4py/algo/enhancement/sna/algorithm.py | from pm4py.algo.enhancement.sna.variants.log import handover as log_handover, jointactivities as log_jointactivities, \
subcontracting as log_subcontracting, working_together as log_workingtogether
from pm4py.algo.enhancement.sna.variants.pandas import handover as pd_handover, subcontracting as pd_subcontracting, \
working_together as pd_workingtogether, jointactivities as pd_jointactivities
from pm4py.objects.conversion.log import converter as log_conversion
from pm4py.algo.enhancement.sna.parameters import Parameters
from pm4py.util import exec_utils
from enum import Enum
import numpy as np
import deprecation
class Variants(Enum):
HANDOVER_LOG = log_handover
WORKING_TOGETHER_LOG = log_workingtogether
SUBCONTRACTING_LOG = log_subcontracting
JOINTACTIVITIES_LOG = log_jointactivities
HANDOVER_PANDAS = pd_handover
WORKING_TOGETHER_PANDAS = pd_workingtogether
SUBCONTRACTING_PANDAS = pd_subcontracting
JOINTACTIVITIES_PANDAS = pd_jointactivities
@deprecation.deprecated('2.2.5', '3.0.0', details='use pm4py.algo.organizational_mining.sna.algorithm instead')
def apply(log, parameters=None, variant=Variants.HANDOVER_LOG):
"""
Calculates a SNA metric
Parameters
------------
log
Log
parameters
Possible parameters of the algorithm
variant
Variant of the algorithm to apply. Possible values:
- Variants.HANDOVER_LOG
- Variants.WORKING_TOGETHER_LOG
- Variants.SUBCONTRACTING_LOG
- Variants.JOINTACTIVITIES_LOG
- Variants.HANDOVER_PANDAS
- Variants.WORKING_TOGETHER_PANDAS
- Variants.SUBCONTRACTING_PANDAS
- Variants.JOINTACTIVITIES_PANDAS
Returns
-----------
tuple
Tuple containing the metric matrix and the resources list
"""
if parameters is None:
parameters = {}
enable_metric_normalization = exec_utils.get_param_value(Parameters.METRIC_NORMALIZATION, parameters, False)
if variant in [Variants.HANDOVER_LOG, Variants.WORKING_TOGETHER_LOG, Variants.JOINTACTIVITIES_LOG,
Variants.SUBCONTRACTING_LOG]:
log = log_conversion.apply(log, parameters=parameters)
sna = exec_utils.get_variant(variant).apply(log, parameters=parameters)
abs_max = np.max(np.abs(sna[0]))
if enable_metric_normalization and abs_max > 0:
sna[0] = sna[0] / abs_max
return sna | 0.872958 | 0.282573 |
import time
import requests
import json
import sys
import getopt
mytimeinterval = 30
runcount = 1
def get_key_auth_token(ip,ver,uname,pword):
# The url for the post ticket API request
post_url = "https://partners.dnaspaces.io/client/v1/partner/activateOnPremiseApp"
print(post_url)
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
# All APIC-EM REST API query and response content type is JSON
headers = {'content-type': 'application/json',
'Authorization': '<KEY>RZK8S3A2Qej6_TEZScfxDm47RV8uUugXogV7sjaQJt0t8-sF6r3Lu2NGoM'}
mydate = {"appId": "app-4909E65EF28A42D3A46C180EFF06B0E6", "activationRefId": "BC0829D5C61043A5B71B413015F6792E"}
# POST request and response
try:
r = requests.post(post_url, data=json.dumps(mydate), headers=headers)
# Remove '#' if need to print out response
print("get_X_auth_token\n")
# data = r.json()
print(t.text)
return
except:
# Something wrong, cannot get service ticket
print("Status: %s" % r.status_code)
print("Response: %s" % r.text)
sys.exit()
def get_firehose_evt(ip,ver,uname,pword):
print('get_devicelist')
ticket = get_key_auth_token(ip,ver,uname,pword)
#headers = {"X-Auth-Token": ticket}
headers = {'content-type':'application/json','X-Auth-Token':ticket}
url = "https://partners.dnaspaces.io/api/partners/v1/firehose/events"
#print ("\nExecuting get_devicelist:\n")
#print(url)
params={'family':'Unified AP'}
try:
# The request and response of "GET" request
requests.packages.urllib3.disable_warnings()
resp= requests.get(url,headers=headers,params=params,verify = False)
print ("GET '%s' Status: "%url,resp.status_code,'\n') # This is the http request status
return(resp)
except:
print ("Something wrong with GET get_devicelist")
sys.exit()
def getfirehose_evtinfo(ip,ver,uname,pword):
dnac_device = {}
dnac_device['time']=time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
dnac_aplist=[]
resp = get_firehose_evt(ip,ver,uname,pword)
if (resp.status_code == 200) :
# Get the json-encoded content from response
response_json = resp.json()
#print(json.dumps(response_json,indent=2))
for siteitem in response_json['response']:
tmp_device = {}
tmp_device['macAddress']=siteitem['macAddress']
tmp_device['platformId']=siteitem['platformId']
tmp_device['reachabilityStatus']=siteitem['reachabilityStatus']
tmp_device['hostname']=siteitem['hostname']
tmp_resp = get_devicedetailbymacaddr(ip,ver,uname,pword,siteitem['macAddress'])
if (tmp_resp.status_code == 200) :
try:
tmp_resp_json = tmp_resp.json()
#print('tmp_device json track\n')
#print(json.dumps(tmp_resp_json,indent=2))
tmp_device['location']=tmp_resp_json['response'].get('location','')
tmp_device['overallHealth']=tmp_resp_json['response'].get('overallHealth','0')
#tmp_device['location']=tmp_resp_json['response'].['location']
#tmp_device['overallHealth']=tmp_resp_json['response']['overallHealth']
except:
print('tmp_device json track Error\n')
#print(tmp_device)
dnac_aplist.append(tmp_device)
time.sleep(0.5)
dnac_device['AP'] = dnac_aplist
#print(json.dumps(dnac_device,indent=2) )
#write to file
millis = int(round(time.time() * 1000))
fielname = device_Folder+'device'+str(millis)+'.json'
#print(fielname)
with open(fielname,"w") as f:
json.dump(dnac_device,f)
print("write file finish\n")
else :
print("get allsiteoverall error code %d" % status)
def main(argv):
#处理 返回值options是以元组为元素的列表。
for opt, arg in opts:
if opt in ("-h", "--help"):
print('test_arg.py -u <username> -p <password> -i <ip address> -t <timeinterval> -c <runcount>')
print('or: test_arg.py --username=<username> --password=<password> --ip <ip address> --timer <timeinterval> --count <runcount>')
sys.exit()
elif opt in ("-u", "--username"):
m_USERNAME = arg
elif opt in ("-p", "--password"):
m_PASSWORD = arg
elif opt in ("-i", "--ip"):
m_APICEM_IP = arg
elif opt in ("-t", "--timer"):
mytimeinterval = int(arg)
elif opt in ("-c", "--count"):
runcount = int(arg)
print('username:%s \n' % m_USERNAME)
print('password:%s \n' % m_PASSWORD)
print('IP Address:%s \n' % m_APICEM_IP)
print('Time Inteval:%d \n' % mytimeinterval)
print('runcount:%d \n' % runcount)
#getdnacinfo()
set_dnacrate(m_APICEM_IP,VERSION,m_USERNAME,m_PASSWORD)
if (runcount == 0) :
print('Run forever\n')
while True:
get_firehose_evt(m_APICEM_IP,VERSION,m_USERNAME,m_PASSWORD)
time.sleep(1)
getfirehose_evtinfo(m_APICEM_IP,VERSION,m_USERNAME,m_PASSWORD)
time.sleep(mytimeinterval*60)
else :
while (runcount > 0) :
get_firehose_evt(m_APICEM_IP,VERSION,m_USERNAME,m_PASSWORD)
getfirehose_evtinfo(m_APICEM_IP,VERSION,m_USERNAME,m_PASSWORD)
runcount = runcount -1
if (runcount == 0):
print('Run finish and exit\n')
sys.exit(2)
else:
time.sleep(mytimeinterval*60)
if __name__ == "__main__":
# sys.argv[1:]为要处理的参数列表,sys.argv[0]为脚本名,所以用sys.argv[1:]过滤掉脚本名。
print(sys.argv)
main(sys.argv[1:]) | dnaspace_data_Adapt.py | import time
import requests
import json
import sys
import getopt
mytimeinterval = 30
runcount = 1
def get_key_auth_token(ip,ver,uname,pword):
# The url for the post ticket API request
post_url = "https://partners.dnaspaces.io/client/v1/partner/activateOnPremiseApp"
print(post_url)
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
# All APIC-EM REST API query and response content type is JSON
headers = {'content-type': 'application/json',
'Authorization': '<KEY>RZK8S3A2Qej6_TEZScfxDm47RV8uUugXogV7sjaQJt0t8-sF6r3Lu2NGoM'}
mydate = {"appId": "app-4909E65EF28A42D3A46C180EFF06B0E6", "activationRefId": "BC0829D5C61043A5B71B413015F6792E"}
# POST request and response
try:
r = requests.post(post_url, data=json.dumps(mydate), headers=headers)
# Remove '#' if need to print out response
print("get_X_auth_token\n")
# data = r.json()
print(t.text)
return
except:
# Something wrong, cannot get service ticket
print("Status: %s" % r.status_code)
print("Response: %s" % r.text)
sys.exit()
def get_firehose_evt(ip,ver,uname,pword):
print('get_devicelist')
ticket = get_key_auth_token(ip,ver,uname,pword)
#headers = {"X-Auth-Token": ticket}
headers = {'content-type':'application/json','X-Auth-Token':ticket}
url = "https://partners.dnaspaces.io/api/partners/v1/firehose/events"
#print ("\nExecuting get_devicelist:\n")
#print(url)
params={'family':'Unified AP'}
try:
# The request and response of "GET" request
requests.packages.urllib3.disable_warnings()
resp= requests.get(url,headers=headers,params=params,verify = False)
print ("GET '%s' Status: "%url,resp.status_code,'\n') # This is the http request status
return(resp)
except:
print ("Something wrong with GET get_devicelist")
sys.exit()
def getfirehose_evtinfo(ip,ver,uname,pword):
dnac_device = {}
dnac_device['time']=time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
dnac_aplist=[]
resp = get_firehose_evt(ip,ver,uname,pword)
if (resp.status_code == 200) :
# Get the json-encoded content from response
response_json = resp.json()
#print(json.dumps(response_json,indent=2))
for siteitem in response_json['response']:
tmp_device = {}
tmp_device['macAddress']=siteitem['macAddress']
tmp_device['platformId']=siteitem['platformId']
tmp_device['reachabilityStatus']=siteitem['reachabilityStatus']
tmp_device['hostname']=siteitem['hostname']
tmp_resp = get_devicedetailbymacaddr(ip,ver,uname,pword,siteitem['macAddress'])
if (tmp_resp.status_code == 200) :
try:
tmp_resp_json = tmp_resp.json()
#print('tmp_device json track\n')
#print(json.dumps(tmp_resp_json,indent=2))
tmp_device['location']=tmp_resp_json['response'].get('location','')
tmp_device['overallHealth']=tmp_resp_json['response'].get('overallHealth','0')
#tmp_device['location']=tmp_resp_json['response'].['location']
#tmp_device['overallHealth']=tmp_resp_json['response']['overallHealth']
except:
print('tmp_device json track Error\n')
#print(tmp_device)
dnac_aplist.append(tmp_device)
time.sleep(0.5)
dnac_device['AP'] = dnac_aplist
#print(json.dumps(dnac_device,indent=2) )
#write to file
millis = int(round(time.time() * 1000))
fielname = device_Folder+'device'+str(millis)+'.json'
#print(fielname)
with open(fielname,"w") as f:
json.dump(dnac_device,f)
print("write file finish\n")
else :
print("get allsiteoverall error code %d" % status)
def main(argv):
#处理 返回值options是以元组为元素的列表。
for opt, arg in opts:
if opt in ("-h", "--help"):
print('test_arg.py -u <username> -p <password> -i <ip address> -t <timeinterval> -c <runcount>')
print('or: test_arg.py --username=<username> --password=<password> --ip <ip address> --timer <timeinterval> --count <runcount>')
sys.exit()
elif opt in ("-u", "--username"):
m_USERNAME = arg
elif opt in ("-p", "--password"):
m_PASSWORD = arg
elif opt in ("-i", "--ip"):
m_APICEM_IP = arg
elif opt in ("-t", "--timer"):
mytimeinterval = int(arg)
elif opt in ("-c", "--count"):
runcount = int(arg)
print('username:%s \n' % m_USERNAME)
print('password:%s \n' % m_PASSWORD)
print('IP Address:%s \n' % m_APICEM_IP)
print('Time Inteval:%d \n' % mytimeinterval)
print('runcount:%d \n' % runcount)
#getdnacinfo()
set_dnacrate(m_APICEM_IP,VERSION,m_USERNAME,m_PASSWORD)
if (runcount == 0) :
print('Run forever\n')
while True:
get_firehose_evt(m_APICEM_IP,VERSION,m_USERNAME,m_PASSWORD)
time.sleep(1)
getfirehose_evtinfo(m_APICEM_IP,VERSION,m_USERNAME,m_PASSWORD)
time.sleep(mytimeinterval*60)
else :
while (runcount > 0) :
get_firehose_evt(m_APICEM_IP,VERSION,m_USERNAME,m_PASSWORD)
getfirehose_evtinfo(m_APICEM_IP,VERSION,m_USERNAME,m_PASSWORD)
runcount = runcount -1
if (runcount == 0):
print('Run finish and exit\n')
sys.exit(2)
else:
time.sleep(mytimeinterval*60)
if __name__ == "__main__":
# sys.argv[1:]为要处理的参数列表,sys.argv[0]为脚本名,所以用sys.argv[1:]过滤掉脚本名。
print(sys.argv)
main(sys.argv[1:]) | 0.036813 | 0.084003 |
import logging
import os
import re
import shutil
from pprint import pformat
from base64 import urlsafe_b64encode
import boto3
import yaml
from botocore.errorfactory import ClientError
from .aws_facts import get_vpc_facts
logger = logging.getLogger(__name__)
def ensure_aws_facts(self):
self.vpc_facts = get_vpc_facts(vpc_id=self.vpc_id)
logger.debug('vpc_facts -> \n%s', pformat(self.vpc_facts, indent=4, width=120))
def ensure_kops_k8s_version_consistency(self):
# ensure bin dependencies
BIN_DEPS = (
'kops',
'kubectl',
)
for bin in BIN_DEPS:
bin_path = shutil.which(bin)
if bin_path is None or not os.access(bin_path, os.X_OK):
raise RuntimeError('`{}` is NOT installed!'.format(bin))
kops_version = None
k8s_version = None
try:
# ensure kops and k8s has same major and minor version!
kops_version = re.search('Version\s*([\d.]+)', self._kops_cmd('version')).group(1)
with open(os.path.join(self.DIR_TEMPLATE, 'values.yaml.j2')) as f:
k8s_version = re.search('kubernetesVersion:\s*([\d.]+)', f.read()).group(1)
assert kops_version.split('.')[:2] == k8s_version.split('.')[:2]
except Exception as e:
e.args += (
(
'kops supports the equivalent Kubernetes `minor` release '
'number. `MAJOR.MINOR.PATCH` - https://github.com/kubernetes/kops'
'\nVersion mismatch: kops -> {kops_v}, k8s -> {k8s_v}'
).format(kops_v=kops_version, k8s_v=k8s_version),
)
raise e
def ensure_region(self):
# ugly but useful
os.environ['AWS_DEFAULT_REGION'] = os.environ.get('AWS_DEFAULT_REGION', None) or self.region
def ensure_tmp_dir_existing_and_empty(self):
# ensure ./tmp is there and empty before run
try:
logger.debug('removing %s', self.DIR_TMP)
shutil.rmtree(self.DIR_TMP)
except FileNotFoundError:
...
finally:
os.makedirs(self.DIR_TMP)
def ensure_ssh_pair(self):
# ensure aws ec2 key pair
public_key_name = 'publicKey'
try:
with open(self.current_value_file_path) as f:
public_key_material = yaml.load(f)[public_key_name]
except (KeyError, TypeError) as e:
e.args += ('`{}` is a required var, define it in {}'.format(public_key_name, self.current_value_file_path), )
raise e
ec2_key_pair_key = self.cluster_name
try:
ec2 = boto3.client('ec2')
ec2.import_key_pair(KeyName=ec2_key_pair_key, PublicKeyMaterial=public_key_material)
except ClientError as e:
if e.response['Error']['Code'] != 'InvalidKeyPair.Duplicate':
raise e
logger.warn('Key pair -> `%s` is already there', ec2_key_pair_key)
kops_default_admin_name = 'admin'
def create_kops_secret_ssh_key():
# create `kops` secret
cmd = 'create secret sshpublickey {kops_u} '.format(kops_u=kops_default_admin_name)
ssh_public_key_path = os.path.join(
self.DIR_TMP,
urlsafe_b64encode(ec2_key_pair_key.encode()).decode() + '.pub'
)
with open(ssh_public_key_path, 'w') as f:
f.write(public_key_material)
cmd += ' -i {ssh_public_key_path}'.format(ssh_public_key_path=ssh_public_key_path, )
self._kops_cmd(cmd)
def is_kops_secret_ssh_key_exits():
try:
cmd = 'get secret --type SSHPublicKey {kops_u} '.format(kops_u=kops_default_admin_name)
return kops_default_admin_name in (self._kops_cmd(cmd) or '')
except RuntimeError as e:
if 'not found' in e.args[0]:
return False
raise e
if not is_kops_secret_ssh_key_exits():
create_kops_secret_ssh_key()
def ensure_state_store(self):
s3 = boto3.resource('s3')
bucket = s3.Bucket(self.state_store_name)
try:
bucket.create(ACL='private', CreateBucketConfiguration=dict(LocationConstraint=self.region))
bucket_versioning = s3.BucketVersioning(self.state_store_name)
bucket_versioning.enable()
except ClientError as e:
if e.response['Error']['Code'] == 'BucketAlreadyOwnedByYou':
logger.debug('state store <%s> exists, ignore...', self.state_store_name)
return
raise e | kforce/pre_steps.py | import logging
import os
import re
import shutil
from pprint import pformat
from base64 import urlsafe_b64encode
import boto3
import yaml
from botocore.errorfactory import ClientError
from .aws_facts import get_vpc_facts
logger = logging.getLogger(__name__)
def ensure_aws_facts(self):
self.vpc_facts = get_vpc_facts(vpc_id=self.vpc_id)
logger.debug('vpc_facts -> \n%s', pformat(self.vpc_facts, indent=4, width=120))
def ensure_kops_k8s_version_consistency(self):
# ensure bin dependencies
BIN_DEPS = (
'kops',
'kubectl',
)
for bin in BIN_DEPS:
bin_path = shutil.which(bin)
if bin_path is None or not os.access(bin_path, os.X_OK):
raise RuntimeError('`{}` is NOT installed!'.format(bin))
kops_version = None
k8s_version = None
try:
# ensure kops and k8s has same major and minor version!
kops_version = re.search('Version\s*([\d.]+)', self._kops_cmd('version')).group(1)
with open(os.path.join(self.DIR_TEMPLATE, 'values.yaml.j2')) as f:
k8s_version = re.search('kubernetesVersion:\s*([\d.]+)', f.read()).group(1)
assert kops_version.split('.')[:2] == k8s_version.split('.')[:2]
except Exception as e:
e.args += (
(
'kops supports the equivalent Kubernetes `minor` release '
'number. `MAJOR.MINOR.PATCH` - https://github.com/kubernetes/kops'
'\nVersion mismatch: kops -> {kops_v}, k8s -> {k8s_v}'
).format(kops_v=kops_version, k8s_v=k8s_version),
)
raise e
def ensure_region(self):
# ugly but useful
os.environ['AWS_DEFAULT_REGION'] = os.environ.get('AWS_DEFAULT_REGION', None) or self.region
def ensure_tmp_dir_existing_and_empty(self):
# ensure ./tmp is there and empty before run
try:
logger.debug('removing %s', self.DIR_TMP)
shutil.rmtree(self.DIR_TMP)
except FileNotFoundError:
...
finally:
os.makedirs(self.DIR_TMP)
def ensure_ssh_pair(self):
# ensure aws ec2 key pair
public_key_name = 'publicKey'
try:
with open(self.current_value_file_path) as f:
public_key_material = yaml.load(f)[public_key_name]
except (KeyError, TypeError) as e:
e.args += ('`{}` is a required var, define it in {}'.format(public_key_name, self.current_value_file_path), )
raise e
ec2_key_pair_key = self.cluster_name
try:
ec2 = boto3.client('ec2')
ec2.import_key_pair(KeyName=ec2_key_pair_key, PublicKeyMaterial=public_key_material)
except ClientError as e:
if e.response['Error']['Code'] != 'InvalidKeyPair.Duplicate':
raise e
logger.warn('Key pair -> `%s` is already there', ec2_key_pair_key)
kops_default_admin_name = 'admin'
def create_kops_secret_ssh_key():
# create `kops` secret
cmd = 'create secret sshpublickey {kops_u} '.format(kops_u=kops_default_admin_name)
ssh_public_key_path = os.path.join(
self.DIR_TMP,
urlsafe_b64encode(ec2_key_pair_key.encode()).decode() + '.pub'
)
with open(ssh_public_key_path, 'w') as f:
f.write(public_key_material)
cmd += ' -i {ssh_public_key_path}'.format(ssh_public_key_path=ssh_public_key_path, )
self._kops_cmd(cmd)
def is_kops_secret_ssh_key_exits():
try:
cmd = 'get secret --type SSHPublicKey {kops_u} '.format(kops_u=kops_default_admin_name)
return kops_default_admin_name in (self._kops_cmd(cmd) or '')
except RuntimeError as e:
if 'not found' in e.args[0]:
return False
raise e
if not is_kops_secret_ssh_key_exits():
create_kops_secret_ssh_key()
def ensure_state_store(self):
s3 = boto3.resource('s3')
bucket = s3.Bucket(self.state_store_name)
try:
bucket.create(ACL='private', CreateBucketConfiguration=dict(LocationConstraint=self.region))
bucket_versioning = s3.BucketVersioning(self.state_store_name)
bucket_versioning.enable()
except ClientError as e:
if e.response['Error']['Code'] == 'BucketAlreadyOwnedByYou':
logger.debug('state store <%s> exists, ignore...', self.state_store_name)
return
raise e | 0.307774 | 0.110759 |
from decouple import config
import os
from pathlib import Path
import cv2
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = config('SECRET_KEY')
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = config('DEBUG', default=False, cast=bool)
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'corsheaders',
'rest_framework',
'rest_framework.authtoken',
'rest_auth',
'django.contrib.sites',
'allauth',
'allauth.account',
'rest_auth.registration',
"nested_admin",
'drf_yasg',
'widget_tweaks',
'channels',
'accounts',
'core',
'social',
'pose_analyser',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'corsheaders.middleware.CorsMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'yoga_pose_analyser.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'yoga_pose_analyser.wsgi.application'
ASGI_APPLICATION = 'yoga_pose_analyser.asgi.application'
# Database
# https://docs.djangoproject.com/en/3.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = "Asia/Kolkata"
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.0/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = [os.path.join(BASE_DIR, "static")]
MEDIA_URL = "/media/"
MEDIA_ROOT = os.path.join(BASE_DIR, "media")
PROJECT_DIR = os.path.dirname(os.path.abspath(__file__))
STATIC_ROOT = os.path.join(PROJECT_DIR, "static")
CORS_ORIGIN_ALLOW_ALL = True
SITE_ID = 1
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
REST_FRAMEWORK = {
"DEFAULT_PERMISSION_CLASSES": ("rest_framework.permissions.AllowAny",),
"DEFAULT_AUTHENTICATION_CLASSES": (
"rest_framework.authentication.TokenAuthentication",
"rest_framework.authentication.SessionAuthentication",
),
"DEFAULT_FILTER_BACKENDS": ["django_filters.rest_framework.DjangoFilterBackend"],
"DATETIME_FORMAT": "%b %d %Y %H:%M:%S",
'DEFAULT_THROTTLE_RATES': {
'anon': '10000/day',
'user': '100000/day'
},
'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.LimitOffsetPagination',
'PAGE_SIZE': 20
}
REST_AUTH_SERIALIZERS = {
"USER_DETAILS_SERIALIZER": "accounts.serializers.UserSerializer",
'TOKEN_SERIALIZER': 'accounts.serializers.TokenSerializer',
}
REST_AUTH_REGISTER_SERIALIZERS = {
'REGISTER_SERIALIZER': 'accounts.serializers.CustomRegisterSerializer',
}
CSRF_COOKIE_NAME = "csrftoken"
ACCOUNT_UNIQUE_EMAIL = True
ACCOUNT_EMAIL_REQUIRED = False
ACCOUNT_AUTHENTICATION_METHOD = "username_email"
ACCOUNT_EMAIL_VERIFICATION = "optional"
ACCOUNT_USERNAME_REQUIRED = False
AUTH_USER_MODEL = "accounts.User"
# EMAIL
EMAIL_USE_TLS = config("EMAIL_USE_TLS")
EMAIL_USE_SSL = config("EMAIL_USE_SSL")
EMAIL_HOST = config("EMAIL_HOST")
EMAIL_PORT = config("EMAIL_PORT")
EMAIL_HOST_USER = config("EMAIL_HOST_USER")
EMAIL_HOST_PASSWORD = config("EMAIL_HOST_PASSWORD")
DEFAULT_FROM_EMAIL = config("DEFAULT_FROM_EMAIL")
# CELERY
CELERY_BROKER_URL = os.environ.get("CELERY_BROKER", "redis://redis:6379/0")
CELERY_RESULT_BACKEND = os.environ.get("CELERY_BROKER", "redis://redis:6379/0")
CELERY_TIMEZONE = 'Asia/Kolkata'
CELERY_TASK_TRACK_STARTED = True
CELERY_TASK_TIME_LIMIT = 30 * 60
# Model
# Specify the paths for the 2 files
PROTO_FILE = "pose_analyser/pose/mpi/pose_deploy_linevec_faster_4_stages.prototxt"
WEIGHTS_FILE = "pose_analyser/pose/mpi/pose_iter_160000.caffemodel"
# Read the network into Memory
NETWORK = cv2.dnn.readNetFromCaffe(PROTO_FILE, WEIGHTS_FILE) | Backend/yoga_pose_analyser/settings.py | from decouple import config
import os
from pathlib import Path
import cv2
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = config('SECRET_KEY')
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = config('DEBUG', default=False, cast=bool)
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'corsheaders',
'rest_framework',
'rest_framework.authtoken',
'rest_auth',
'django.contrib.sites',
'allauth',
'allauth.account',
'rest_auth.registration',
"nested_admin",
'drf_yasg',
'widget_tweaks',
'channels',
'accounts',
'core',
'social',
'pose_analyser',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'corsheaders.middleware.CorsMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'yoga_pose_analyser.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'yoga_pose_analyser.wsgi.application'
ASGI_APPLICATION = 'yoga_pose_analyser.asgi.application'
# Database
# https://docs.djangoproject.com/en/3.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = "Asia/Kolkata"
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.0/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = [os.path.join(BASE_DIR, "static")]
MEDIA_URL = "/media/"
MEDIA_ROOT = os.path.join(BASE_DIR, "media")
PROJECT_DIR = os.path.dirname(os.path.abspath(__file__))
STATIC_ROOT = os.path.join(PROJECT_DIR, "static")
CORS_ORIGIN_ALLOW_ALL = True
SITE_ID = 1
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
REST_FRAMEWORK = {
"DEFAULT_PERMISSION_CLASSES": ("rest_framework.permissions.AllowAny",),
"DEFAULT_AUTHENTICATION_CLASSES": (
"rest_framework.authentication.TokenAuthentication",
"rest_framework.authentication.SessionAuthentication",
),
"DEFAULT_FILTER_BACKENDS": ["django_filters.rest_framework.DjangoFilterBackend"],
"DATETIME_FORMAT": "%b %d %Y %H:%M:%S",
'DEFAULT_THROTTLE_RATES': {
'anon': '10000/day',
'user': '100000/day'
},
'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.LimitOffsetPagination',
'PAGE_SIZE': 20
}
REST_AUTH_SERIALIZERS = {
"USER_DETAILS_SERIALIZER": "accounts.serializers.UserSerializer",
'TOKEN_SERIALIZER': 'accounts.serializers.TokenSerializer',
}
REST_AUTH_REGISTER_SERIALIZERS = {
'REGISTER_SERIALIZER': 'accounts.serializers.CustomRegisterSerializer',
}
CSRF_COOKIE_NAME = "csrftoken"
ACCOUNT_UNIQUE_EMAIL = True
ACCOUNT_EMAIL_REQUIRED = False
ACCOUNT_AUTHENTICATION_METHOD = "username_email"
ACCOUNT_EMAIL_VERIFICATION = "optional"
ACCOUNT_USERNAME_REQUIRED = False
AUTH_USER_MODEL = "accounts.User"
# EMAIL
EMAIL_USE_TLS = config("EMAIL_USE_TLS")
EMAIL_USE_SSL = config("EMAIL_USE_SSL")
EMAIL_HOST = config("EMAIL_HOST")
EMAIL_PORT = config("EMAIL_PORT")
EMAIL_HOST_USER = config("EMAIL_HOST_USER")
EMAIL_HOST_PASSWORD = config("EMAIL_HOST_PASSWORD")
DEFAULT_FROM_EMAIL = config("DEFAULT_FROM_EMAIL")
# CELERY
CELERY_BROKER_URL = os.environ.get("CELERY_BROKER", "redis://redis:6379/0")
CELERY_RESULT_BACKEND = os.environ.get("CELERY_BROKER", "redis://redis:6379/0")
CELERY_TIMEZONE = 'Asia/Kolkata'
CELERY_TASK_TRACK_STARTED = True
CELERY_TASK_TIME_LIMIT = 30 * 60
# Model
# Specify the paths for the 2 files
PROTO_FILE = "pose_analyser/pose/mpi/pose_deploy_linevec_faster_4_stages.prototxt"
WEIGHTS_FILE = "pose_analyser/pose/mpi/pose_iter_160000.caffemodel"
# Read the network into Memory
NETWORK = cv2.dnn.readNetFromCaffe(PROTO_FILE, WEIGHTS_FILE) | 0.462959 | 0.086131 |
import sys
def set_path(path: str):
try:
sys.path.index(path)
except ValueError:
sys.path.insert(0, path)
# set programatically the path to 'openai_ros' directory (alternately can also set PYTHONPATH)
set_path('/media/suresh/research/awesome-robotics/active-slam/catkin_ws/src/openai-rosbot-env/openai_ros/src')
from openai_ros.task_envs.turtlebot3 import turtlebot3_localize
import gym
import rospy
import argparse
import datetime
import stable_baselines3
import custom_baselines
from stable_baselines3.common.callbacks import CallbackList, CheckpointCallback, EvalCallback
from stable_baselines3.common.env_checker import check_env
from stable_baselines3.common.monitor import Monitor
def train_network(env, file_path: str, agent: str):
"""
Train the RL agent for localization task and store the policy/agent
:params env: openai gym (TurtleBot3LocalizeEnv) instance
str file_path: location to store the trained agent
agent: stable_baselines3 agent to be used for training
"""
dt_str = datetime.datetime.now().strftime('%d_%m_%Y_%H_%M')
env = Monitor(env, filename=None)
train_steps = 30000
if agent == 'PPO':
log_dir = './logs/PPO/'
model = stable_baselines3.PPO('MlpPolicy', env, verbose=1,
tensorboard_log=log_dir + 'tensorboard/')
elif agent == 'RAND':
log_dir = './logs/RAND/'
model = custom_baselines.RAND(env, verbose=1,
tensorboard_log=log_dir + 'tensorboard/')
else:
return
checkpoint_callback = CheckpointCallback(save_freq=1000,
save_path=log_dir + 'checkpoints/',
name_prefix=dt_str + 'rl_model')
eval_callback = EvalCallback(env,
best_model_save_path=log_dir + 'best_model',
log_path=log_dir + 'results',
eval_freq=500,
deterministic=True,
render=False)
# create the callback listeners list
callback_list = CallbackList([checkpoint_callback, eval_callback])
model.learn(total_timesteps=train_steps, callback=callback_list,
tb_log_name=dt_str + '_run')
model.save(file_path)
print('training finished')
def eval_network(env, file_path: str, agent: str):
"""
Evaluate the pretrained RL agent for localization task
:params env: openai gym (TurtleBot3LocalizeEnv) instance
str file_path: location to load the pretrained agent
agent: stable_baselines3 agent to be used for evaluation
"""
eval_steps = 500
if agent == 'PPO':
model = stable_baselines3.PPO.load(file_path)
elif agent == 'RAND':
model = custom_baselines.RAND.load(file_path)
else:
return
obs = env.reset()
for i in range(eval_steps):
action, _states = model.predict(obs, deterministic=True)
obs, reward, done, info = env.step(action)
env.render()
if done:
obs = env.reset()
env.close()
print('evaluation finished')
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Train/Evaluate localization RL agent')
parser.add_argument('--file_path', dest='file_path', \
required=False, help='full path for location to store/load agent', \
default='./rand_turtlebot3_localize')
parser.add_argument('--train', dest='is_train', required=False, \
default=True, help='whether to train the agent')
parser.add_argument('--agent', dest='agent', required=False, \
default='RAND', help='agent to use for train/eval')
args = parser.parse_args()
# create a new ros node
rospy.init_node('turtlebot3_localization')
# create a new gym turtlebot3 localization environment
env = gym.make('TurtleBot3Localize-v0')
# check out environment follows the gym interface
check_env(env)
if args.is_train:
train_network(env, args.file_path, args.agent)
eval_network(env, args.file_path, args.agent)
# prevent te code from exiting until an shutdown signal (ctrl+c) is received
rospy.spin() | localization/src/localization_rl_agent.py |
import sys
def set_path(path: str):
try:
sys.path.index(path)
except ValueError:
sys.path.insert(0, path)
# set programatically the path to 'openai_ros' directory (alternately can also set PYTHONPATH)
set_path('/media/suresh/research/awesome-robotics/active-slam/catkin_ws/src/openai-rosbot-env/openai_ros/src')
from openai_ros.task_envs.turtlebot3 import turtlebot3_localize
import gym
import rospy
import argparse
import datetime
import stable_baselines3
import custom_baselines
from stable_baselines3.common.callbacks import CallbackList, CheckpointCallback, EvalCallback
from stable_baselines3.common.env_checker import check_env
from stable_baselines3.common.monitor import Monitor
def train_network(env, file_path: str, agent: str):
"""
Train the RL agent for localization task and store the policy/agent
:params env: openai gym (TurtleBot3LocalizeEnv) instance
str file_path: location to store the trained agent
agent: stable_baselines3 agent to be used for training
"""
dt_str = datetime.datetime.now().strftime('%d_%m_%Y_%H_%M')
env = Monitor(env, filename=None)
train_steps = 30000
if agent == 'PPO':
log_dir = './logs/PPO/'
model = stable_baselines3.PPO('MlpPolicy', env, verbose=1,
tensorboard_log=log_dir + 'tensorboard/')
elif agent == 'RAND':
log_dir = './logs/RAND/'
model = custom_baselines.RAND(env, verbose=1,
tensorboard_log=log_dir + 'tensorboard/')
else:
return
checkpoint_callback = CheckpointCallback(save_freq=1000,
save_path=log_dir + 'checkpoints/',
name_prefix=dt_str + 'rl_model')
eval_callback = EvalCallback(env,
best_model_save_path=log_dir + 'best_model',
log_path=log_dir + 'results',
eval_freq=500,
deterministic=True,
render=False)
# create the callback listeners list
callback_list = CallbackList([checkpoint_callback, eval_callback])
model.learn(total_timesteps=train_steps, callback=callback_list,
tb_log_name=dt_str + '_run')
model.save(file_path)
print('training finished')
def eval_network(env, file_path: str, agent: str):
"""
Evaluate the pretrained RL agent for localization task
:params env: openai gym (TurtleBot3LocalizeEnv) instance
str file_path: location to load the pretrained agent
agent: stable_baselines3 agent to be used for evaluation
"""
eval_steps = 500
if agent == 'PPO':
model = stable_baselines3.PPO.load(file_path)
elif agent == 'RAND':
model = custom_baselines.RAND.load(file_path)
else:
return
obs = env.reset()
for i in range(eval_steps):
action, _states = model.predict(obs, deterministic=True)
obs, reward, done, info = env.step(action)
env.render()
if done:
obs = env.reset()
env.close()
print('evaluation finished')
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Train/Evaluate localization RL agent')
parser.add_argument('--file_path', dest='file_path', \
required=False, help='full path for location to store/load agent', \
default='./rand_turtlebot3_localize')
parser.add_argument('--train', dest='is_train', required=False, \
default=True, help='whether to train the agent')
parser.add_argument('--agent', dest='agent', required=False, \
default='RAND', help='agent to use for train/eval')
args = parser.parse_args()
# create a new ros node
rospy.init_node('turtlebot3_localization')
# create a new gym turtlebot3 localization environment
env = gym.make('TurtleBot3Localize-v0')
# check out environment follows the gym interface
check_env(env)
if args.is_train:
train_network(env, args.file_path, args.agent)
eval_network(env, args.file_path, args.agent)
# prevent te code from exiting until an shutdown signal (ctrl+c) is received
rospy.spin() | 0.457137 | 0.198724 |
from typing import Iterable, Dict
class RepositoryStat:
""" Contribution statistics for a repository """
def __init__(self, name_with_owner: str):
self.name_with_owner: str = name_with_owner
self.pull_count: int = 0
self.issue_count: int = 0
self.commit_count: int = 0
self.review_count: int = 0
self.stars: int = 0
def contrib_sum(self):
return (
self.pull_count + self.issue_count + self.commit_count + self.review_count
)
def __repr__(self):
return (
f"RepoStat({self.name_with_owner}, ★{self.stars}, PRs={self.pull_count}"
f", issues={self.issue_count}, commits={self.commit_count}"
f", reviews={self.review_count})"
)
def __str__(self):
return (
f"{self.name_with_owner}: ★{self.stars}, PRs={self.pull_count}"
f", issues={self.issue_count}, commits={self.commit_count}"
f", reviews={self.review_count}"
)
class StatProcessor:
""" compute statistics for a specific data set """
def __init__(self, contributions):
self.c = contributions
self.m: Dict[str, RepositoryStat] = {}
def get_stats(self) -> Iterable[RepositoryStat]:
self.process_issues()
self.process_pulls()
self.process_reviews()
self.process_commits()
return sorted(self.m.values(), key=lambda x: x.contrib_sum(), reverse=True)
def process_issues(self):
for c in self.c:
issues = c["data"]["viewer"]["contributionsCollection"][
"issueContributions"
]["edges"]
for i in issues:
n = i["node"]["issue"]
# n["title"]
nwo = n["repository"]["nameWithOwner"]
repo = self.m.setdefault(nwo, RepositoryStat(nwo))
repo.issue_count += 1
repo.stars = n["repository"]["stargazers"]["totalCount"]
def process_pulls(self):
for c in self.c:
prs = c["data"]["viewer"]["contributionsCollection"][
"pullRequestContributions"
]["edges"]
for p in prs:
if not p: continue
n = p["node"]["pullRequest"]
# n["title"]
nwo = n["repository"]["nameWithOwner"]
repo = self.m.setdefault(nwo, RepositoryStat(nwo))
repo.pull_count += 1
repo.stars = n["repository"]["stargazers"]["totalCount"]
def process_commits(self):
for c in self.c:
commit_contrib = c["data"]["viewer"]["contributionsCollection"][
"commitContributionsByRepository"
]
for repo_contrib in commit_contrib:
contributions = repo_contrib["contributions"]["edges"]
for c in contributions:
n = c["node"]
nwo = n["repository"]["nameWithOwner"]
repo = self.m.setdefault(nwo, RepositoryStat(nwo))
total_commits = n["commitCount"]
repo.commit_count += total_commits
repo.stars = n["repository"]["stargazers"]["totalCount"]
def process_reviews(self):
for c in self.c:
reviews = c["data"]["viewer"]["contributionsCollection"][
"pullRequestReviewContributions"
]["edges"]
for r in reviews:
n = r["node"]
nwo = n["repository"]["nameWithOwner"]
repo = self.m.setdefault(nwo, RepositoryStat(nwo))
repo.review_count += 1
repo.stars = n["repository"]["stargazers"]["totalCount"] | show_me/db.py |
from typing import Iterable, Dict
class RepositoryStat:
""" Contribution statistics for a repository """
def __init__(self, name_with_owner: str):
self.name_with_owner: str = name_with_owner
self.pull_count: int = 0
self.issue_count: int = 0
self.commit_count: int = 0
self.review_count: int = 0
self.stars: int = 0
def contrib_sum(self):
return (
self.pull_count + self.issue_count + self.commit_count + self.review_count
)
def __repr__(self):
return (
f"RepoStat({self.name_with_owner}, ★{self.stars}, PRs={self.pull_count}"
f", issues={self.issue_count}, commits={self.commit_count}"
f", reviews={self.review_count})"
)
def __str__(self):
return (
f"{self.name_with_owner}: ★{self.stars}, PRs={self.pull_count}"
f", issues={self.issue_count}, commits={self.commit_count}"
f", reviews={self.review_count}"
)
class StatProcessor:
""" compute statistics for a specific data set """
def __init__(self, contributions):
self.c = contributions
self.m: Dict[str, RepositoryStat] = {}
def get_stats(self) -> Iterable[RepositoryStat]:
self.process_issues()
self.process_pulls()
self.process_reviews()
self.process_commits()
return sorted(self.m.values(), key=lambda x: x.contrib_sum(), reverse=True)
def process_issues(self):
for c in self.c:
issues = c["data"]["viewer"]["contributionsCollection"][
"issueContributions"
]["edges"]
for i in issues:
n = i["node"]["issue"]
# n["title"]
nwo = n["repository"]["nameWithOwner"]
repo = self.m.setdefault(nwo, RepositoryStat(nwo))
repo.issue_count += 1
repo.stars = n["repository"]["stargazers"]["totalCount"]
def process_pulls(self):
for c in self.c:
prs = c["data"]["viewer"]["contributionsCollection"][
"pullRequestContributions"
]["edges"]
for p in prs:
if not p: continue
n = p["node"]["pullRequest"]
# n["title"]
nwo = n["repository"]["nameWithOwner"]
repo = self.m.setdefault(nwo, RepositoryStat(nwo))
repo.pull_count += 1
repo.stars = n["repository"]["stargazers"]["totalCount"]
def process_commits(self):
for c in self.c:
commit_contrib = c["data"]["viewer"]["contributionsCollection"][
"commitContributionsByRepository"
]
for repo_contrib in commit_contrib:
contributions = repo_contrib["contributions"]["edges"]
for c in contributions:
n = c["node"]
nwo = n["repository"]["nameWithOwner"]
repo = self.m.setdefault(nwo, RepositoryStat(nwo))
total_commits = n["commitCount"]
repo.commit_count += total_commits
repo.stars = n["repository"]["stargazers"]["totalCount"]
def process_reviews(self):
for c in self.c:
reviews = c["data"]["viewer"]["contributionsCollection"][
"pullRequestReviewContributions"
]["edges"]
for r in reviews:
n = r["node"]
nwo = n["repository"]["nameWithOwner"]
repo = self.m.setdefault(nwo, RepositoryStat(nwo))
repo.review_count += 1
repo.stars = n["repository"]["stargazers"]["totalCount"] | 0.790571 | 0.173919 |
import cv2 as cv
import numpy as np
import torch
from detectron2.modeling import GeneralizedRCNN
from detectron2.layers import batched_nms
from torch.onnx import export
from train_net import DetectionCheckpointer, default_argument_parser, setup
class RCNNExporter(GeneralizedRCNN):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def preprocess_image(self, x):
"""
Normalize, pad and batch the input images.
"""
x -= self.pixel_mean.view(-1, 3, 1, 1)
x /= self.pixel_std.view(-1, 3, 1, 1)
return x
def forward(self, inference_type, *x):
if inference_type == 'fpn':
inputs = self.preprocess_image(*x)
features = self.backbone(inputs)
return features
elif inference_type == 'fcoshead':
logits_pred, reg_pred, ctrness_pred, _ = self.proposal_generator.fcos_head(*x)
strides = self.proposal_generator.strides
bundle = (self.precal_locations, logits_pred, reg_pred, ctrness_pred, strides)
scores = []
centerness = []
detections = []
for l, o, r, c, s in zip(*bundle):
r *= s
N, C, H, W = o.shape
box_cls = o.permute(0, 2, 3, 1).reshape(N, -1, C).sigmoid()
box_regression = r.view(N, 4, H, W).permute(0, 2, 3, 1).reshape(N, -1, 4)
ctrness = c.view(N, 1, H, W).permute(0, 2, 3, 1).reshape(N, -1).sigmoid()
detection = torch.stack([
l[:, 0][None, ...] - box_regression[..., 0],
l[:, 1][None, ...] - box_regression[..., 1],
l[:, 0][None, ...] + box_regression[..., 2],
l[:, 1][None, ...] + box_regression[..., 3],
], dim=-1)
scores.append(box_cls)
centerness.append(ctrness)
detections.append(detection)
scores = torch.cat(scores, 1)
centerness = torch.cat(centerness, 1)
detections = torch.cat(detections, 1)
return scores, detections / 128, centerness
elif inference_type == 'centerhead':
mask = self.roi_heads.mask_head(*x).sigmoid()
return mask
def export_fpn(self, input_shape, export_name):
inputs = torch.randn(*input_shape)
features = self.forward('fpn', inputs)
export(self, ('fpn', inputs),
export_name,
input_names=['images'],
output_names=list(features.keys()),
opset_version=11)
return features
def export_fcoshead(self, features, export_name, input_names=None, output_names=None):
self.precal_locations = self.proposal_generator.compute_locations(features)
proposals = self.forward('fcoshead', features)
export(self, ('fcoshead', features),
export_name,
input_names=input_names,
output_names=output_names,
opset_version=11)
return proposals
def export_roihead(self, input_shape, export_name):
inputs = torch.randn(*input_shape)
mask = self.forward('centerhead', inputs)
export(self, ('centerhead', inputs), export_name,
input_names=['roipool'],
output_names=['mask'],
opset_version=11)
return mask
def export(self, cfg, export_dir='.'):
feat = self.export_fpn(
(1, 3, 128, 128),
f'{export_dir}/centermask_fpn.onnx')
feat = [feat[k] for k in cfg.MODEL.FCOS.IN_FEATURES]
scores, detections, centerness = self.export_fcoshead(
feat,
f'{export_dir}/centermask_fcos.onnx',
cfg.MODEL.FCOS.IN_FEATURES,
['logit', 'box', 'centerness'])
mask_inc = cfg.MODEL.ROI_MASK_HEAD.CONV_DIM
mask_size = cfg.MODEL.ROI_MASK_HEAD.POOLER_RESOLUTION
self.export_roihead(
(1, mask_inc, mask_size, mask_size),
f'{export_dir}/centermask_roi.onnx')
def main(args):
cfg = setup(args)
model = RCNNExporter(cfg)
DetectionCheckpointer(model, save_dir=cfg.OUTPUT_DIR).resume_or_load(
cfg.MODEL.WEIGHTS, resume=args.resume
)
model = model.eval().to('cpu')
model.export(cfg, cfg.OUTPUT_DIR)
if __name__ == "__main__":
args = default_argument_parser().parse_args()
main(args) | rcnn_export.py | import cv2 as cv
import numpy as np
import torch
from detectron2.modeling import GeneralizedRCNN
from detectron2.layers import batched_nms
from torch.onnx import export
from train_net import DetectionCheckpointer, default_argument_parser, setup
class RCNNExporter(GeneralizedRCNN):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def preprocess_image(self, x):
"""
Normalize, pad and batch the input images.
"""
x -= self.pixel_mean.view(-1, 3, 1, 1)
x /= self.pixel_std.view(-1, 3, 1, 1)
return x
def forward(self, inference_type, *x):
if inference_type == 'fpn':
inputs = self.preprocess_image(*x)
features = self.backbone(inputs)
return features
elif inference_type == 'fcoshead':
logits_pred, reg_pred, ctrness_pred, _ = self.proposal_generator.fcos_head(*x)
strides = self.proposal_generator.strides
bundle = (self.precal_locations, logits_pred, reg_pred, ctrness_pred, strides)
scores = []
centerness = []
detections = []
for l, o, r, c, s in zip(*bundle):
r *= s
N, C, H, W = o.shape
box_cls = o.permute(0, 2, 3, 1).reshape(N, -1, C).sigmoid()
box_regression = r.view(N, 4, H, W).permute(0, 2, 3, 1).reshape(N, -1, 4)
ctrness = c.view(N, 1, H, W).permute(0, 2, 3, 1).reshape(N, -1).sigmoid()
detection = torch.stack([
l[:, 0][None, ...] - box_regression[..., 0],
l[:, 1][None, ...] - box_regression[..., 1],
l[:, 0][None, ...] + box_regression[..., 2],
l[:, 1][None, ...] + box_regression[..., 3],
], dim=-1)
scores.append(box_cls)
centerness.append(ctrness)
detections.append(detection)
scores = torch.cat(scores, 1)
centerness = torch.cat(centerness, 1)
detections = torch.cat(detections, 1)
return scores, detections / 128, centerness
elif inference_type == 'centerhead':
mask = self.roi_heads.mask_head(*x).sigmoid()
return mask
def export_fpn(self, input_shape, export_name):
inputs = torch.randn(*input_shape)
features = self.forward('fpn', inputs)
export(self, ('fpn', inputs),
export_name,
input_names=['images'],
output_names=list(features.keys()),
opset_version=11)
return features
def export_fcoshead(self, features, export_name, input_names=None, output_names=None):
self.precal_locations = self.proposal_generator.compute_locations(features)
proposals = self.forward('fcoshead', features)
export(self, ('fcoshead', features),
export_name,
input_names=input_names,
output_names=output_names,
opset_version=11)
return proposals
def export_roihead(self, input_shape, export_name):
inputs = torch.randn(*input_shape)
mask = self.forward('centerhead', inputs)
export(self, ('centerhead', inputs), export_name,
input_names=['roipool'],
output_names=['mask'],
opset_version=11)
return mask
def export(self, cfg, export_dir='.'):
feat = self.export_fpn(
(1, 3, 128, 128),
f'{export_dir}/centermask_fpn.onnx')
feat = [feat[k] for k in cfg.MODEL.FCOS.IN_FEATURES]
scores, detections, centerness = self.export_fcoshead(
feat,
f'{export_dir}/centermask_fcos.onnx',
cfg.MODEL.FCOS.IN_FEATURES,
['logit', 'box', 'centerness'])
mask_inc = cfg.MODEL.ROI_MASK_HEAD.CONV_DIM
mask_size = cfg.MODEL.ROI_MASK_HEAD.POOLER_RESOLUTION
self.export_roihead(
(1, mask_inc, mask_size, mask_size),
f'{export_dir}/centermask_roi.onnx')
def main(args):
cfg = setup(args)
model = RCNNExporter(cfg)
DetectionCheckpointer(model, save_dir=cfg.OUTPUT_DIR).resume_or_load(
cfg.MODEL.WEIGHTS, resume=args.resume
)
model = model.eval().to('cpu')
model.export(cfg, cfg.OUTPUT_DIR)
if __name__ == "__main__":
args = default_argument_parser().parse_args()
main(args) | 0.877798 | 0.395718 |
from common_fixtures import * # NOQA
def test_register_physical_host(admin_client):
uri = 'sim://{}'.format(random_str())
agent = admin_client.create_agent(uri=uri)
agent = admin_client.wait_success(agent)
assert agent.state == 'active'
hosts = agent.hosts()
for _ in range(10):
hosts = agent.hosts()
if len(hosts) == 0:
time.sleep(0.5)
else:
break
assert len(hosts) == 1
host = hosts[0]
assert host.physicalHostId is not None
assert hosts[0].physicalHost() is not None
def test_register_multiple_physical_host(admin_client):
scope = 'io.cattle.platform.agent.connection.simulator' \
'.AgentConnectionSimulator'
uri = 'sim://{}'.format(random_str())
agent = admin_client.create_agent(uri=uri, data={
scope: {
'hosts': 2
}
})
agent = admin_client.wait_success(agent)
assert agent.state == 'active'
hosts = agent.hosts()
for _ in range(10):
hosts = agent.hosts()
if len(hosts) == 0:
time.sleep(0.5)
else:
break
assert len(hosts) == 2
host1 = hosts[0]
host2 = hosts[1]
assert host1.physicalHostId is not None
assert host2.physicalHostId is not None
assert host1.physicalHostId == host2.physicalHostId
def test_add_physical_host(admin_client):
scope = 'io.cattle.platform.agent.connection.simulator' \
'.AgentConnectionSimulator'
uri = 'sim://{}'.format(random_str())
agent = admin_client.create_agent(uri=uri, data={
scope: {
'addPhysicalHost': False
}
})
agent = admin_client.wait_success(agent)
assert agent.state == 'active'
hosts = agent.hosts()
for _ in range(10):
hosts = agent.hosts()
if len(hosts) == 0:
time.sleep(0.5)
else:
break
assert len(hosts) == 1
host1 = hosts[0]
assert host1.physicalHostId is None
agent.data[scope]['addPhysicalHost'] = True
agent = admin_client.update(agent, {
'data': agent.data
})
assert agent.data[scope]['addPhysicalHost']
agent = admin_client.wait_success(agent.reconnect())
assert agent.state == 'active'
hosts = agent.hosts()
assert len(hosts) == 1
assert hosts[0].physicalHostId is not None
assert hosts[0].physicalHost() is not None | tests/integration/cattletest/core/test_physical_host.py | from common_fixtures import * # NOQA
def test_register_physical_host(admin_client):
uri = 'sim://{}'.format(random_str())
agent = admin_client.create_agent(uri=uri)
agent = admin_client.wait_success(agent)
assert agent.state == 'active'
hosts = agent.hosts()
for _ in range(10):
hosts = agent.hosts()
if len(hosts) == 0:
time.sleep(0.5)
else:
break
assert len(hosts) == 1
host = hosts[0]
assert host.physicalHostId is not None
assert hosts[0].physicalHost() is not None
def test_register_multiple_physical_host(admin_client):
scope = 'io.cattle.platform.agent.connection.simulator' \
'.AgentConnectionSimulator'
uri = 'sim://{}'.format(random_str())
agent = admin_client.create_agent(uri=uri, data={
scope: {
'hosts': 2
}
})
agent = admin_client.wait_success(agent)
assert agent.state == 'active'
hosts = agent.hosts()
for _ in range(10):
hosts = agent.hosts()
if len(hosts) == 0:
time.sleep(0.5)
else:
break
assert len(hosts) == 2
host1 = hosts[0]
host2 = hosts[1]
assert host1.physicalHostId is not None
assert host2.physicalHostId is not None
assert host1.physicalHostId == host2.physicalHostId
def test_add_physical_host(admin_client):
scope = 'io.cattle.platform.agent.connection.simulator' \
'.AgentConnectionSimulator'
uri = 'sim://{}'.format(random_str())
agent = admin_client.create_agent(uri=uri, data={
scope: {
'addPhysicalHost': False
}
})
agent = admin_client.wait_success(agent)
assert agent.state == 'active'
hosts = agent.hosts()
for _ in range(10):
hosts = agent.hosts()
if len(hosts) == 0:
time.sleep(0.5)
else:
break
assert len(hosts) == 1
host1 = hosts[0]
assert host1.physicalHostId is None
agent.data[scope]['addPhysicalHost'] = True
agent = admin_client.update(agent, {
'data': agent.data
})
assert agent.data[scope]['addPhysicalHost']
agent = admin_client.wait_success(agent.reconnect())
assert agent.state == 'active'
hosts = agent.hosts()
assert len(hosts) == 1
assert hosts[0].physicalHostId is not None
assert hosts[0].physicalHost() is not None | 0.484624 | 0.366845 |
from feedreader.feeds.base import (PREFERRED_LINK_TYPES, PREFERRED_CONTENT_TYPES,
Feed, Item, get_element_text, get_attribute, search_child,
get_descendant, get_descendant_text, get_descendant_datetime,
safe_strip, normalize_spaces, unescape_html)
class Atom03Feed(Feed):
__feed__ = 'Atom 0.3'
@property
def is_valid(self):
# <feed version="0.3" xmlns="http://purl.org/atom/ns#">
return self._element.tag == '{http://purl.org/atom/ns#}feed'
@property
def id(self):
return safe_strip(get_descendant_text(self._element, 'id'))
@property
def title(self):
return normalize_spaces(get_descendant_text(self._element, 'title'))
@property
def link(self):
link = search_child(self._element, '{http://purl.org/atom/ns#}link',
('rel', 'alternate', 'type', PREFERRED_LINK_TYPES))
return safe_strip(get_attribute(link, 'href', is_url=True))
@property
def description(self):
return get_descendant_text(self._element, 'tagline')
@property
def published(self):
return get_descendant_datetime(self._element, 'issued')
@property
def updated(self):
return get_descendant_datetime(self._element, 'modified')
@property
def entries(self):
node_name = '{http://purl.org/atom/ns#}entry'
return [Atom03Item(item) for item in self._element.iterchildren(tag=node_name)]
class Atom03Item(Item):
@property
def id(self):
return safe_strip(get_descendant_text(self._element, 'id'))
@property
def title(self):
return normalize_spaces(unescape_html(get_descendant_text(self._element, 'title')))
@property
def link(self):
link = search_child(self._element, '{http://purl.org/atom/ns#}link',
('rel', 'alternate', 'type', PREFERRED_LINK_TYPES))
return safe_strip(get_attribute(link, 'href', is_url=True))
@property
def author_name(self):
return normalize_spaces(get_descendant_text(self._element, 'author', 'name'))
@property
def author_email(self):
return safe_strip(get_descendant_text(self._element, 'author', 'email'))
@property
def author_link(self):
return safe_strip(get_descendant_text(self._element, 'author', 'uri', is_url=True))
@property
def description(self):
content = search_child(self._element, '{http://purl.org/atom/ns#}content',
('type', PREFERRED_CONTENT_TYPES))
if content is None:
content = search_child(self._element, '{http://purl.org/atom/ns#}summary',
('type', PREFERRED_CONTENT_TYPES))
return get_element_text(content)
@property
def published(self):
return get_descendant_datetime(self._element, 'issued')
@property
def updated(self):
return get_descendant_datetime(self._element, 'modified') | feedreader/feeds/atom03.py | from feedreader.feeds.base import (PREFERRED_LINK_TYPES, PREFERRED_CONTENT_TYPES,
Feed, Item, get_element_text, get_attribute, search_child,
get_descendant, get_descendant_text, get_descendant_datetime,
safe_strip, normalize_spaces, unescape_html)
class Atom03Feed(Feed):
__feed__ = 'Atom 0.3'
@property
def is_valid(self):
# <feed version="0.3" xmlns="http://purl.org/atom/ns#">
return self._element.tag == '{http://purl.org/atom/ns#}feed'
@property
def id(self):
return safe_strip(get_descendant_text(self._element, 'id'))
@property
def title(self):
return normalize_spaces(get_descendant_text(self._element, 'title'))
@property
def link(self):
link = search_child(self._element, '{http://purl.org/atom/ns#}link',
('rel', 'alternate', 'type', PREFERRED_LINK_TYPES))
return safe_strip(get_attribute(link, 'href', is_url=True))
@property
def description(self):
return get_descendant_text(self._element, 'tagline')
@property
def published(self):
return get_descendant_datetime(self._element, 'issued')
@property
def updated(self):
return get_descendant_datetime(self._element, 'modified')
@property
def entries(self):
node_name = '{http://purl.org/atom/ns#}entry'
return [Atom03Item(item) for item in self._element.iterchildren(tag=node_name)]
class Atom03Item(Item):
@property
def id(self):
return safe_strip(get_descendant_text(self._element, 'id'))
@property
def title(self):
return normalize_spaces(unescape_html(get_descendant_text(self._element, 'title')))
@property
def link(self):
link = search_child(self._element, '{http://purl.org/atom/ns#}link',
('rel', 'alternate', 'type', PREFERRED_LINK_TYPES))
return safe_strip(get_attribute(link, 'href', is_url=True))
@property
def author_name(self):
return normalize_spaces(get_descendant_text(self._element, 'author', 'name'))
@property
def author_email(self):
return safe_strip(get_descendant_text(self._element, 'author', 'email'))
@property
def author_link(self):
return safe_strip(get_descendant_text(self._element, 'author', 'uri', is_url=True))
@property
def description(self):
content = search_child(self._element, '{http://purl.org/atom/ns#}content',
('type', PREFERRED_CONTENT_TYPES))
if content is None:
content = search_child(self._element, '{http://purl.org/atom/ns#}summary',
('type', PREFERRED_CONTENT_TYPES))
return get_element_text(content)
@property
def published(self):
return get_descendant_datetime(self._element, 'issued')
@property
def updated(self):
return get_descendant_datetime(self._element, 'modified') | 0.737158 | 0.119229 |
import collections
import os.path as osp
import sys
import PIL.Image
import numpy as np
import torch
from torch.utils import data
from .transforms import ImageTransformType, FlipType, apply_transform
class CityScape(data.Dataset):
class_names = np.array([
'ego vehicle',
'rectification border',
'out of roi',
'static',
'dynamic',
'ground',
'road',
'sidewalk',
'parking',
'rail track',
'building',
'wall',
'fence',
'guard rail',
'bridge',
'tunnel',
'pole',
'polegroup',
'traffic light',
'traffic sign',
'vegetation',
'terrain',
'sky',
'person',
'rider',
'car',
'truck',
'bus',
'caravan',
'trailer',
'train',
'motorcycle',
'bicycle',
'license plate'
])
mean_bgr = np.array([104.00698793, 116.66876762, 122.67891434])
train_flip_types = [FlipType.Unknown, FlipType.Vertical, FlipType.Horizontal]
train_transform_types = [
ImageTransformType.CenterCrop,
ImageTransformType.BottomRightCrop,
ImageTransformType.BottomLeftCrop,
ImageTransformType.TopRightCrop,
ImageTransformType.TopLeftCrop,
ImageTransformType.Resize
]
val_flip_types = [FlipType.Unknown]
val_transform_types = [
ImageTransformType.Resize
]
final_h = 256
final_w = 512
def __init__(self, root, split='train', transform=False):
self.root = root
self.split = split
self._transform = transform
dataset_dir = osp.join(self.root, 'CityScapes/CityScapes')
self.files = collections.defaultdict(list)
for split in ['train', 'val']:
imgsets_file = osp.join(
dataset_dir, 'CityScapes_%s.txt' % split)
for did in open(imgsets_file):
did = did.strip()
city = did.partition('_')[0]
img_file = osp.join(
dataset_dir,
"{0}/{1}/{2}/{3}_{0}.png".format("leftImg8bit", split, city, did))
lbl_file = osp.join(
dataset_dir,
"{0}/{1}/{2}/{3}_{0}_labelIds.png".format("gtFine", split, city, did))
if split == 'train':
for flip_type in self.train_flip_types:
for transform_type in self.train_transform_types:
self.files[split].append({
'img': img_file,
'lbl': lbl_file,
'flip_type': flip_type,
'transform_type': transform_type
})
else:
for flip_type in self.val_flip_types:
for transform_type in self.val_transform_types:
self.files[split].append({
'img': img_file,
'lbl': lbl_file,
'flip_type': flip_type,
'transform_type': transform_type
})
def __len__(self):
return len(self.files[self.split])
def __getitem__(self, index):
data_file = self.files[self.split][index]
img_file = data_file['img']
lbl_file = data_file['lbl']
flip_type = data_file['flip_type']
transform_type = data_file['transform_type']
with PIL.Image.open(img_file) as img, PIL.Image.open(lbl_file) as lbl:
try:
new_img, new_lbl = apply_transform(img, lbl, transform_type, flip_type, self.final_h, self.final_w)
new_img = np.array(new_img, dtype=np.uint8)
new_lbl = np.array(new_lbl, dtype=np.int32)
new_lbl[new_lbl == 255] = -1
except:
print("Unexpected error:", sys.exc_info()[0])
print(f"Current index {index} img_file: {img_file} type(image)={type(new_img)}")
print(f"Current index {index} lbl_file: {lbl_file} type(lbl)={type(new_lbl)}")
raise
if self._transform:
return self.transform(new_img, new_lbl)
else:
return new_img, new_lbl
def transform(self, img, lbl):
img = img[:, :, ::-1] # RGB -> BGR
img = img.astype(np.float64)
img -= self.mean_bgr
img = img.transpose(2, 0, 1)
img = torch.from_numpy(img).float()
lbl = torch.from_numpy(lbl).long()
return img, lbl
def untransform(self, img, lbl):
img = img.numpy()
img = img.transpose(1, 2, 0)
img += self.mean_bgr
img = img.astype(np.uint8)
img = img[:, :, ::-1]
lbl = lbl.numpy()
return img, lbl | torchfcn/datasets/cityscape.py | import collections
import os.path as osp
import sys
import PIL.Image
import numpy as np
import torch
from torch.utils import data
from .transforms import ImageTransformType, FlipType, apply_transform
class CityScape(data.Dataset):
class_names = np.array([
'ego vehicle',
'rectification border',
'out of roi',
'static',
'dynamic',
'ground',
'road',
'sidewalk',
'parking',
'rail track',
'building',
'wall',
'fence',
'guard rail',
'bridge',
'tunnel',
'pole',
'polegroup',
'traffic light',
'traffic sign',
'vegetation',
'terrain',
'sky',
'person',
'rider',
'car',
'truck',
'bus',
'caravan',
'trailer',
'train',
'motorcycle',
'bicycle',
'license plate'
])
mean_bgr = np.array([104.00698793, 116.66876762, 122.67891434])
train_flip_types = [FlipType.Unknown, FlipType.Vertical, FlipType.Horizontal]
train_transform_types = [
ImageTransformType.CenterCrop,
ImageTransformType.BottomRightCrop,
ImageTransformType.BottomLeftCrop,
ImageTransformType.TopRightCrop,
ImageTransformType.TopLeftCrop,
ImageTransformType.Resize
]
val_flip_types = [FlipType.Unknown]
val_transform_types = [
ImageTransformType.Resize
]
final_h = 256
final_w = 512
def __init__(self, root, split='train', transform=False):
self.root = root
self.split = split
self._transform = transform
dataset_dir = osp.join(self.root, 'CityScapes/CityScapes')
self.files = collections.defaultdict(list)
for split in ['train', 'val']:
imgsets_file = osp.join(
dataset_dir, 'CityScapes_%s.txt' % split)
for did in open(imgsets_file):
did = did.strip()
city = did.partition('_')[0]
img_file = osp.join(
dataset_dir,
"{0}/{1}/{2}/{3}_{0}.png".format("leftImg8bit", split, city, did))
lbl_file = osp.join(
dataset_dir,
"{0}/{1}/{2}/{3}_{0}_labelIds.png".format("gtFine", split, city, did))
if split == 'train':
for flip_type in self.train_flip_types:
for transform_type in self.train_transform_types:
self.files[split].append({
'img': img_file,
'lbl': lbl_file,
'flip_type': flip_type,
'transform_type': transform_type
})
else:
for flip_type in self.val_flip_types:
for transform_type in self.val_transform_types:
self.files[split].append({
'img': img_file,
'lbl': lbl_file,
'flip_type': flip_type,
'transform_type': transform_type
})
def __len__(self):
return len(self.files[self.split])
def __getitem__(self, index):
data_file = self.files[self.split][index]
img_file = data_file['img']
lbl_file = data_file['lbl']
flip_type = data_file['flip_type']
transform_type = data_file['transform_type']
with PIL.Image.open(img_file) as img, PIL.Image.open(lbl_file) as lbl:
try:
new_img, new_lbl = apply_transform(img, lbl, transform_type, flip_type, self.final_h, self.final_w)
new_img = np.array(new_img, dtype=np.uint8)
new_lbl = np.array(new_lbl, dtype=np.int32)
new_lbl[new_lbl == 255] = -1
except:
print("Unexpected error:", sys.exc_info()[0])
print(f"Current index {index} img_file: {img_file} type(image)={type(new_img)}")
print(f"Current index {index} lbl_file: {lbl_file} type(lbl)={type(new_lbl)}")
raise
if self._transform:
return self.transform(new_img, new_lbl)
else:
return new_img, new_lbl
def transform(self, img, lbl):
img = img[:, :, ::-1] # RGB -> BGR
img = img.astype(np.float64)
img -= self.mean_bgr
img = img.transpose(2, 0, 1)
img = torch.from_numpy(img).float()
lbl = torch.from_numpy(lbl).long()
return img, lbl
def untransform(self, img, lbl):
img = img.numpy()
img = img.transpose(1, 2, 0)
img += self.mean_bgr
img = img.astype(np.uint8)
img = img[:, :, ::-1]
lbl = lbl.numpy()
return img, lbl | 0.343452 | 0.30399 |
from password import GMAIL_PASSWORD_KEY
ALTERNATIVE = "alternative"
ASSET = "asset"
ASSET_IDX = 0
BALANCE = "balance"
BINANCE = "Binance"
BOTH = "BOTH"
BNB = "BNB"
BNBUSDT = "BNBUSDT"
BTC = "BTC"
BTCUSDT = "BTCUSDT"
BUY = "BUY"
BYBIT = "Bybit"
CEP = "CEP__"
CEPS_PATH = "/ceps/"
COINBASE = "Coinbase"
CURRENT_SIDE = "current_side"
DEBUG = "debug"
EMITTOR = "<EMAIL>"
EMITTOR_PASSWORD = GMAIL_PASSWORD_KEY
ENTRY_PRICE = "entryPrice"
ERROR = "ERROR"
ERRORS_FILE = "errors.txt"
ERROR_MODE = "ERROR MODE"
ERROR_STRATEGY_PATH_FOLDER = "ERROR STRATEGY PATH FOLDER"
ERROR_STATISTICS_PATH_FOLDER = "ERROR STATISTICS PATH FOLDER"
ETH = "ETH"
ETHUSDT = "ETHUSDT"
FALSE = "false"
FREE = "free"
FROM = "From"
GOODTILLCANCEL = "GoodTillCancel"
HEDGE = 0
HIGH_C = 3
HISTORY_FILE_NAME = "history.csv"
INFO = "INFO"
INFO_C = 1
FUTURES = "FUTURES"
LEVERAGE = "leverage"
LONG = "LONG"
MARK_PRICE = "markPrice"
MARKET = "MARKET"
MASTER = "master"
MAX_RETRY = 20
MIN_WALLET_IN_USDT = 200
MIN_SLAVE_CHAR = 15
MEDIUM_C = 2
NA = "NA"
NO_C = 0
OFFSET_ACCOUNT_TYPE = 4
OFFSET_API_KEY = 1
OFFSET_API_SECRET_KEY = 2
OFFSET_EXCHANGE_PLATFORM = 0
OFFSET_SIDE = 3
OFFSET_SLAVE_IN_STRATEGY_FILE = 2
OFFSET_SYMBOL = 5
ONE_WAY = 1
OUT = "OUT"
PLAIN = "plain"
PORT = 465
POSITION_AMT = "positionAmt"
POSITION_SIDE = "positionSide"
PRECISION_IDX = 1
PRICE = "price"
RECEIVERS = ["<EMAIL>", "<EMAIL>"]
RESULT = 'result'
RISK = 0.80
RUN = "run"
SELL = "SELL"
SHORT = "SHORT"
SIDE = "side"
SIZE = "size"
SLAVE = "slave"
SPOT = "SPOT"
STMP_URL = "smtp.gmail.com"
STOP_MARKET = "STOP_MARKET"
SUBJECT = "Subject"
SUCCESSFUL = "successful"
SYMBOL = "symbol"
TAKE_PROFIT_MARKET = "TAKE_PROFIT_MARKET"
TIMESTAMP = "timestamp"
TO = "To"
TRUE = "true"
TXT = ".txt"
TYPE = "type"
UNDEFINED = "UNDEFINED"
UNSUCCESSFUL = "unsuccessful"
USDT = "USDT"
UTF8 = "UTF8"
WAIT_DEFAULT = 5
WALLET_BALANCE = 'wallet_balance'
WARNING = "WARNING"
WITHDRAW_AVAILABLE = "withdrawAvailable"
API_KEY = "api_key"
SIGN = "sign"
GET = "get"
POST = "post"
# A RENDRE SPECIFIQUE A LA CEP
CORRESPONDANCE_DICT = {INFO_C:"INFO", MEDIUM_C:"WARNING", HIGH_C: "ERROR"} | scripts/constants.py | from password import GMAIL_PASSWORD_KEY
ALTERNATIVE = "alternative"
ASSET = "asset"
ASSET_IDX = 0
BALANCE = "balance"
BINANCE = "Binance"
BOTH = "BOTH"
BNB = "BNB"
BNBUSDT = "BNBUSDT"
BTC = "BTC"
BTCUSDT = "BTCUSDT"
BUY = "BUY"
BYBIT = "Bybit"
CEP = "CEP__"
CEPS_PATH = "/ceps/"
COINBASE = "Coinbase"
CURRENT_SIDE = "current_side"
DEBUG = "debug"
EMITTOR = "<EMAIL>"
EMITTOR_PASSWORD = GMAIL_PASSWORD_KEY
ENTRY_PRICE = "entryPrice"
ERROR = "ERROR"
ERRORS_FILE = "errors.txt"
ERROR_MODE = "ERROR MODE"
ERROR_STRATEGY_PATH_FOLDER = "ERROR STRATEGY PATH FOLDER"
ERROR_STATISTICS_PATH_FOLDER = "ERROR STATISTICS PATH FOLDER"
ETH = "ETH"
ETHUSDT = "ETHUSDT"
FALSE = "false"
FREE = "free"
FROM = "From"
GOODTILLCANCEL = "GoodTillCancel"
HEDGE = 0
HIGH_C = 3
HISTORY_FILE_NAME = "history.csv"
INFO = "INFO"
INFO_C = 1
FUTURES = "FUTURES"
LEVERAGE = "leverage"
LONG = "LONG"
MARK_PRICE = "markPrice"
MARKET = "MARKET"
MASTER = "master"
MAX_RETRY = 20
MIN_WALLET_IN_USDT = 200
MIN_SLAVE_CHAR = 15
MEDIUM_C = 2
NA = "NA"
NO_C = 0
OFFSET_ACCOUNT_TYPE = 4
OFFSET_API_KEY = 1
OFFSET_API_SECRET_KEY = 2
OFFSET_EXCHANGE_PLATFORM = 0
OFFSET_SIDE = 3
OFFSET_SLAVE_IN_STRATEGY_FILE = 2
OFFSET_SYMBOL = 5
ONE_WAY = 1
OUT = "OUT"
PLAIN = "plain"
PORT = 465
POSITION_AMT = "positionAmt"
POSITION_SIDE = "positionSide"
PRECISION_IDX = 1
PRICE = "price"
RECEIVERS = ["<EMAIL>", "<EMAIL>"]
RESULT = 'result'
RISK = 0.80
RUN = "run"
SELL = "SELL"
SHORT = "SHORT"
SIDE = "side"
SIZE = "size"
SLAVE = "slave"
SPOT = "SPOT"
STMP_URL = "smtp.gmail.com"
STOP_MARKET = "STOP_MARKET"
SUBJECT = "Subject"
SUCCESSFUL = "successful"
SYMBOL = "symbol"
TAKE_PROFIT_MARKET = "TAKE_PROFIT_MARKET"
TIMESTAMP = "timestamp"
TO = "To"
TRUE = "true"
TXT = ".txt"
TYPE = "type"
UNDEFINED = "UNDEFINED"
UNSUCCESSFUL = "unsuccessful"
USDT = "USDT"
UTF8 = "UTF8"
WAIT_DEFAULT = 5
WALLET_BALANCE = 'wallet_balance'
WARNING = "WARNING"
WITHDRAW_AVAILABLE = "withdrawAvailable"
API_KEY = "api_key"
SIGN = "sign"
GET = "get"
POST = "post"
# A RENDRE SPECIFIQUE A LA CEP
CORRESPONDANCE_DICT = {INFO_C:"INFO", MEDIUM_C:"WARNING", HIGH_C: "ERROR"} | 0.136234 | 0.057573 |
import unittest
import os
import sys
PACKAGE_PARENT = '..'
SCRIPT_DIR = os.path.dirname(os.path.realpath(os.path.join(os.getcwd(), os.path.expanduser(__file__))))
sys.path.append(os.path.normpath(os.path.join(SCRIPT_DIR, PACKAGE_PARENT)))
from parse_display import ParseDisplayOutput
from command_generator import CommandGenerator
from settings import Settings
import tools
class TestCommendGenerator(unittest.TestCase):
def trace(self, lvl, *argv):
argv = ("TestCommendGenerator: ", ) + argv
return tools.trace(lvl, argv)
def setUp(self):
self.settings = Settings('settings.json')
self.display = ParseDisplayOutput("trace-display.output")
self.cmdgen = CommandGenerator(self.display, self.settings)
self.trace(4, 'setup done')
print(self.cmdgen.add_indv('csta'))
def test_expand(self):
self.assertListEqual(['1','2','3'], self.cmdgen.expand_names('1,2,3'))
self.assertListEqual(['1','2','3','5'], self.cmdgen.expand_names('1-3,5'))
self.assertListEqual(['SIPLP', 'RMP', 'CMP'], self.cmdgen.expand_names('usual'))
self.assertListEqual(['SIPLP', 'RMP', 'CMP', 'extra'], self.cmdgen.expand_names('usual,extra'))
self.assertListEqual(['SIPLP', 'RMP', 'CMP', 'extra'], self.cmdgen.expand_names(['usual,extra']))
self.assertListEqual(['SIPLP', 'RMP', 'CMP', 'extra'], self.cmdgen.expand_names(['usual','extra']))
def test_add_ISUS(self):
isus = self.display.get_individual('ISUS')
self.assertIsNone(isus)
isus_cmd = self.cmdgen.add_indv('ISUS', '4')
self.trace("add(ISUS) cmd ", isus_cmd)
self.assertTrue(isinstance(isus_cmd, list))
self.assertEqual(1, len(isus_cmd))
self.assertEqual(' '.join(isus_cmd[0]), '-lim 4 -unit ISUS')
def test_add_csta_gang(self):
gang_cmd = self.cmdgen.add_indv('csta')
self.assertIn('-lim 1 -unit ISUS'.split(' '), gang_cmd)
# self.assertIn('trace -lim 1 -unit CSTServer', gang_cmd)
self.assertNotIn(['-lim', '1', '-unit', 'SIPLP'], gang_cmd)
def test_set_textlevel(self):
set_cmd = self.cmdgen.set_textlevel('SIPLP', 'full')
self.trace(3, set_cmd)
def test_print_usual(self):
self.trace(3, 'Printing buffers for usual gang: SIPLP, RMP, CMP')
cmd = self.cmdgen.save_cmd(self.settings.expand_to_ids(['usual']), 'sample_')
for args, fname in cmd:
self.trace(3, " " + ", ".join(args) + " filename:" + fname)
self.assertEqual(3, len(cmd))
self.assertEqual('sample_SIPLP.log', cmd[0][1])
if __name__ == '__main__':
unittest.main() | tests/test_command_generator.py | import unittest
import os
import sys
PACKAGE_PARENT = '..'
SCRIPT_DIR = os.path.dirname(os.path.realpath(os.path.join(os.getcwd(), os.path.expanduser(__file__))))
sys.path.append(os.path.normpath(os.path.join(SCRIPT_DIR, PACKAGE_PARENT)))
from parse_display import ParseDisplayOutput
from command_generator import CommandGenerator
from settings import Settings
import tools
class TestCommendGenerator(unittest.TestCase):
def trace(self, lvl, *argv):
argv = ("TestCommendGenerator: ", ) + argv
return tools.trace(lvl, argv)
def setUp(self):
self.settings = Settings('settings.json')
self.display = ParseDisplayOutput("trace-display.output")
self.cmdgen = CommandGenerator(self.display, self.settings)
self.trace(4, 'setup done')
print(self.cmdgen.add_indv('csta'))
def test_expand(self):
self.assertListEqual(['1','2','3'], self.cmdgen.expand_names('1,2,3'))
self.assertListEqual(['1','2','3','5'], self.cmdgen.expand_names('1-3,5'))
self.assertListEqual(['SIPLP', 'RMP', 'CMP'], self.cmdgen.expand_names('usual'))
self.assertListEqual(['SIPLP', 'RMP', 'CMP', 'extra'], self.cmdgen.expand_names('usual,extra'))
self.assertListEqual(['SIPLP', 'RMP', 'CMP', 'extra'], self.cmdgen.expand_names(['usual,extra']))
self.assertListEqual(['SIPLP', 'RMP', 'CMP', 'extra'], self.cmdgen.expand_names(['usual','extra']))
def test_add_ISUS(self):
isus = self.display.get_individual('ISUS')
self.assertIsNone(isus)
isus_cmd = self.cmdgen.add_indv('ISUS', '4')
self.trace("add(ISUS) cmd ", isus_cmd)
self.assertTrue(isinstance(isus_cmd, list))
self.assertEqual(1, len(isus_cmd))
self.assertEqual(' '.join(isus_cmd[0]), '-lim 4 -unit ISUS')
def test_add_csta_gang(self):
gang_cmd = self.cmdgen.add_indv('csta')
self.assertIn('-lim 1 -unit ISUS'.split(' '), gang_cmd)
# self.assertIn('trace -lim 1 -unit CSTServer', gang_cmd)
self.assertNotIn(['-lim', '1', '-unit', 'SIPLP'], gang_cmd)
def test_set_textlevel(self):
set_cmd = self.cmdgen.set_textlevel('SIPLP', 'full')
self.trace(3, set_cmd)
def test_print_usual(self):
self.trace(3, 'Printing buffers for usual gang: SIPLP, RMP, CMP')
cmd = self.cmdgen.save_cmd(self.settings.expand_to_ids(['usual']), 'sample_')
for args, fname in cmd:
self.trace(3, " " + ", ".join(args) + " filename:" + fname)
self.assertEqual(3, len(cmd))
self.assertEqual('sample_SIPLP.log', cmd[0][1])
if __name__ == '__main__':
unittest.main() | 0.163012 | 0.254509 |
from TokenType import *
from Types import *
#begin Interpreter class
class Interpreter:
#Utility functions
#define evaluate function
def evaluate(self, node):
methodName = node.__class__.__name__ #Get method name
method = getattr(self, methodName, self.noVisit) #Get method
return method(node) #Return method's result
#end evaluate function
#define interpret function
def interpret(self, expr):
#begin try
try:
result = self.evaluate(expr)
print(result)
except Error.RTError as e: #Report any runtime errors
Error.reportRTError(e)
except RecursionError: #Catch recursion errors
Error.reportRTError(Error.RecursionError(expr.pos_start, expr.pos_end, "Maximum recursion depth exceeded"))
#end try
#end interpret function
#define noVisit function
def noVisit(self, node):
#begin raise
raise Exception(f"No visit method defined for node {node.__class__.__name__}")
#end raise
#end noVisit function
#Expressions
#define BinaryNode function
def BinaryNode(self, node): #Binary operations
left = self.evaluate(node.left) #Evaluate left and right operands
right = self.evaluate(node.right)
operator = node.operator.type #Get the operator type
result, error = None, None
#Checking operator's type and seeing if there are any matches
operations = {
PLUS: left.opAdd
}
func = operations.get(operator, None) #Get function
#begin if
if func == None: #If there is no function associated with the operator raise an error
result, error = None, Error.RTError(node.operator.pos_start, node.operator.pos_end, f"Operator '{operator}' has not been implemented")
else:
result, error = func(right)
#end if
#begin if
if error: #If error raise it
#begin raise
raise error
#end raise
#end if
return result.setPos(node.pos_start, node.pos_end)
#end BinaryNode function
#define LiteralNode function
def LiteralNode(self, node):
#begin if
if node.token.type == INT: #If int return Integer
result = Integer(int(node.token.lexeme))
elif node.token.type == FLOAT: #If float return Float
result = Float(float(node.token.lexeme))
#end if
return result.setPos(node.pos_start, node.pos_end) #Set literal's position
#end LiteralNode function
#end Interpreter class | src/Interpreter.py | from TokenType import *
from Types import *
#begin Interpreter class
class Interpreter:
#Utility functions
#define evaluate function
def evaluate(self, node):
methodName = node.__class__.__name__ #Get method name
method = getattr(self, methodName, self.noVisit) #Get method
return method(node) #Return method's result
#end evaluate function
#define interpret function
def interpret(self, expr):
#begin try
try:
result = self.evaluate(expr)
print(result)
except Error.RTError as e: #Report any runtime errors
Error.reportRTError(e)
except RecursionError: #Catch recursion errors
Error.reportRTError(Error.RecursionError(expr.pos_start, expr.pos_end, "Maximum recursion depth exceeded"))
#end try
#end interpret function
#define noVisit function
def noVisit(self, node):
#begin raise
raise Exception(f"No visit method defined for node {node.__class__.__name__}")
#end raise
#end noVisit function
#Expressions
#define BinaryNode function
def BinaryNode(self, node): #Binary operations
left = self.evaluate(node.left) #Evaluate left and right operands
right = self.evaluate(node.right)
operator = node.operator.type #Get the operator type
result, error = None, None
#Checking operator's type and seeing if there are any matches
operations = {
PLUS: left.opAdd
}
func = operations.get(operator, None) #Get function
#begin if
if func == None: #If there is no function associated with the operator raise an error
result, error = None, Error.RTError(node.operator.pos_start, node.operator.pos_end, f"Operator '{operator}' has not been implemented")
else:
result, error = func(right)
#end if
#begin if
if error: #If error raise it
#begin raise
raise error
#end raise
#end if
return result.setPos(node.pos_start, node.pos_end)
#end BinaryNode function
#define LiteralNode function
def LiteralNode(self, node):
#begin if
if node.token.type == INT: #If int return Integer
result = Integer(int(node.token.lexeme))
elif node.token.type == FLOAT: #If float return Float
result = Float(float(node.token.lexeme))
#end if
return result.setPos(node.pos_start, node.pos_end) #Set literal's position
#end LiteralNode function
#end Interpreter class | 0.532668 | 0.316343 |
import sys
aux = [0]*(999999)
#Método iterativo (Input: arreglo de denominaciones, monto a cambiar. Output: solución (cantidad mínima de monedas a usar), arreglo que almacena las monedas ocupadas para el cambio)
def bottomUpChange(denom, monto):
# Se inicializa la tabla que almacena el minimo numero de monedas para el monto i
# junto a una tabla auxiliar para almacenar la j denominación usada para el monto i
# Se define tambien la cota inicial como infinito
table = [0 for i in range (monto+1)]
tableaux = [0 for i in range (monto+1)]
cota = sys.maxsize
# Caso base (Si el monto entregado es 0)
table[0] = 0
# Se inicializan todos los valores de la tabla como infinitos (cota)
for i in range(1, monto+1):
table[i] = cota
# Calcular las minímas monedas requeridas para todos los valores desde 1 a monto
for i in range(1, monto+1):
for j in range(len(denom)):
if (i >= denom[j] and table[i- denom[j]]+1 < table[i]):
table[i] = table[i- denom[j]]+1
tableaux[i] = j
solution = table[monto]
# Excepción donde el valor encontrado es igual a la cota
if table[monto] == cota:
return -1
#Generar arreglo de monedas usadas en la solución
solution_coins = []
k = monto
while k:
solution_coins.append(denom[tableaux[k]])
k = k - denom[tableaux[k]]
return solution, solution_coins
#Método recursivo (Input: arreglo de denominaciones, monto a cambiar. Output: solución (cantidad mínima de monedas a usar), arreglo que almacena las monedas ocupadas para el cambio)
def topDownChange(denom, monto):
#Solución relajada (cota = infinito)
solution = sys.maxsize
#Si hay una moneda que complete el monto pedido (si el monto se encuentra en el arreglo de denominaciones)
if monto in denom:
#se retorna 1 (que indica que se ocupara 1 moneda) y el valor de esa moneda como arreglo
return 1, [monto]
#Este auxiliar cuando es mayor a 0, significa que el monto en el que estamos no es una denominación.
#Memoriza la solución para el monto en el que nos encontramos (Memoization)
elif aux[monto] > 0:
return aux[monto], []
else:
#Arreglo que almacenará las monedas ocupadas para el cambio (para la solución óptima)
solution_coins = []
for coin in denom:
if coin < monto:
#Recursión
n_monedas, monedas = topDownChange(denom, monto - coin)
partial_solution = 1 + n_monedas
#Si la solución parcial encontrada es mejor que la solución presente, se la reemplaza
if partial_solution < solution:
solution = partial_solution
solution_coins = monedas + [coin]
aux[monto] = solution
#Si la solución parcial encontrada no es mejor que la solución presente, no se sigue revisando esta "rama" (Branch and Bound)
else:
break
return solution, solution_coins
#Función que recibe los arreglos que almacenan las monedas ocupadas para el cambio y retorna el arreglo de cantidad de cada denominación como es pedido en la tarea
def denominator(denom, solution_coins):
sol = [0 for i in range (len(denom))]
for i in range(len(solution_coins)):
if solution_coins[i] in denom:
ind = denom.index(solution_coins[i])
sol[ind] = sol[ind]+1
return sol
def main():
#Arreglo de denominaciones de moneda hardcodeado
denominations = [365, 91, 52, 28, 13, 7, 4, 1]
#Ingrese 'n' cantidades a procesar:
n = int(input())
for i in range(n):
#Ingrese el monto a obtener:
monto = int(input())
#Método iterativo
# minCoins, denomUsadas = bottomUpChange(denominations, monto)
# solution = denominator(denominations, denomUsadas)
# print(solution)
#Método recursivo
minCoins, denomUsadas = topDownChange(denominations, monto)
solution = denominator(denominations, denomUsadas)
print(solution)
main() | coinChange.py | import sys
aux = [0]*(999999)
#Método iterativo (Input: arreglo de denominaciones, monto a cambiar. Output: solución (cantidad mínima de monedas a usar), arreglo que almacena las monedas ocupadas para el cambio)
def bottomUpChange(denom, monto):
# Se inicializa la tabla que almacena el minimo numero de monedas para el monto i
# junto a una tabla auxiliar para almacenar la j denominación usada para el monto i
# Se define tambien la cota inicial como infinito
table = [0 for i in range (monto+1)]
tableaux = [0 for i in range (monto+1)]
cota = sys.maxsize
# Caso base (Si el monto entregado es 0)
table[0] = 0
# Se inicializan todos los valores de la tabla como infinitos (cota)
for i in range(1, monto+1):
table[i] = cota
# Calcular las minímas monedas requeridas para todos los valores desde 1 a monto
for i in range(1, monto+1):
for j in range(len(denom)):
if (i >= denom[j] and table[i- denom[j]]+1 < table[i]):
table[i] = table[i- denom[j]]+1
tableaux[i] = j
solution = table[monto]
# Excepción donde el valor encontrado es igual a la cota
if table[monto] == cota:
return -1
#Generar arreglo de monedas usadas en la solución
solution_coins = []
k = monto
while k:
solution_coins.append(denom[tableaux[k]])
k = k - denom[tableaux[k]]
return solution, solution_coins
#Método recursivo (Input: arreglo de denominaciones, monto a cambiar. Output: solución (cantidad mínima de monedas a usar), arreglo que almacena las monedas ocupadas para el cambio)
def topDownChange(denom, monto):
#Solución relajada (cota = infinito)
solution = sys.maxsize
#Si hay una moneda que complete el monto pedido (si el monto se encuentra en el arreglo de denominaciones)
if monto in denom:
#se retorna 1 (que indica que se ocupara 1 moneda) y el valor de esa moneda como arreglo
return 1, [monto]
#Este auxiliar cuando es mayor a 0, significa que el monto en el que estamos no es una denominación.
#Memoriza la solución para el monto en el que nos encontramos (Memoization)
elif aux[monto] > 0:
return aux[monto], []
else:
#Arreglo que almacenará las monedas ocupadas para el cambio (para la solución óptima)
solution_coins = []
for coin in denom:
if coin < monto:
#Recursión
n_monedas, monedas = topDownChange(denom, monto - coin)
partial_solution = 1 + n_monedas
#Si la solución parcial encontrada es mejor que la solución presente, se la reemplaza
if partial_solution < solution:
solution = partial_solution
solution_coins = monedas + [coin]
aux[monto] = solution
#Si la solución parcial encontrada no es mejor que la solución presente, no se sigue revisando esta "rama" (Branch and Bound)
else:
break
return solution, solution_coins
#Función que recibe los arreglos que almacenan las monedas ocupadas para el cambio y retorna el arreglo de cantidad de cada denominación como es pedido en la tarea
def denominator(denom, solution_coins):
sol = [0 for i in range (len(denom))]
for i in range(len(solution_coins)):
if solution_coins[i] in denom:
ind = denom.index(solution_coins[i])
sol[ind] = sol[ind]+1
return sol
def main():
#Arreglo de denominaciones de moneda hardcodeado
denominations = [365, 91, 52, 28, 13, 7, 4, 1]
#Ingrese 'n' cantidades a procesar:
n = int(input())
for i in range(n):
#Ingrese el monto a obtener:
monto = int(input())
#Método iterativo
# minCoins, denomUsadas = bottomUpChange(denominations, monto)
# solution = denominator(denominations, denomUsadas)
# print(solution)
#Método recursivo
minCoins, denomUsadas = topDownChange(denominations, monto)
solution = denominator(denominations, denomUsadas)
print(solution)
main() | 0.094788 | 0.698115 |
from operator import attrgetter
import pyangbind.lib.xpathhelper as xpathhelper
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType, RestrictedClassType, TypedListType
from pyangbind.lib.yangtypes import YANGBool, YANGListType, YANGDynClass, ReferenceType
from pyangbind.lib.base import PybindBase
from decimal import Decimal
from bitarray import bitarray
import __builtin__
class ldp_fec_vc_rec_list(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module brocade-mpls - based on the path /brocade_mpls_rpc/show-mpls-ldp-fec-vc/output/ldp-fec-vc-rec-list. Each member element of
the container is represented as a class variable - with a specific
YANG type.
"""
__slots__ = ('_pybind_generated_by', '_path_helper', '_yang_name', '_rest_name', '_extmethods', '__ldp_fec_peer_id','__ldp_fec_peer_lblspc','__ldp_fec_state','__ldp_fec_vc_id','__ldp_fec_peer_vc_type','__ldp_fec_peer_fec_type','__ldp_fec_ingress','__ldp_fec_egress',)
_yang_name = 'ldp-fec-vc-rec-list'
_rest_name = 'ldp-fec-vc-rec-list'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
path_helper_ = kwargs.pop("path_helper", None)
if path_helper_ is False:
self._path_helper = False
elif path_helper_ is not None and isinstance(path_helper_, xpathhelper.YANGPathHelper):
self._path_helper = path_helper_
elif hasattr(self, "_parent"):
path_helper_ = getattr(self._parent, "_path_helper", False)
self._path_helper = path_helper_
else:
self._path_helper = False
extmethods = kwargs.pop("extmethods", None)
if extmethods is False:
self._extmethods = False
elif extmethods is not None and isinstance(extmethods, dict):
self._extmethods = extmethods
elif hasattr(self, "_parent"):
extmethods = getattr(self._parent, "_extmethods", None)
self._extmethods = extmethods
else:
self._extmethods = False
self.__ldp_fec_state = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'current': {'value': 16384}, u'unknown': {'value': 0}, u'retained': {'value': 49152}, u'down': {'value': 32768}},), is_leaf=True, yang_name="ldp-fec-state", rest_name="ldp-fec-state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='ldp-fec-state', is_config=True)
self.__ldp_fec_vc_id = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="ldp-fec-vc-id", rest_name="ldp-fec-vc-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
self.__ldp_fec_peer_fec_type = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="ldp-fec-peer-fec-type", rest_name="ldp-fec-peer-fec-type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
self.__ldp_fec_peer_vc_type = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="ldp-fec-peer-vc-type", rest_name="ldp-fec-peer-vc-type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
self.__ldp_fec_ingress = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'yes': {'value': 1}, u'no': {'value': 0}},), is_leaf=True, yang_name="ldp-fec-ingress", rest_name="ldp-fec-ingress", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='yes-no', is_config=True)
self.__ldp_fec_egress = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'yes': {'value': 1}, u'no': {'value': 0}},), is_leaf=True, yang_name="ldp-fec-egress", rest_name="ldp-fec-egress", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='yes-no', is_config=True)
self.__ldp_fec_peer_lblspc = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="ldp-fec-peer-lblspc", rest_name="ldp-fec-peer-lblspc", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
self.__ldp_fec_peer_id = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'pattern': u'(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'}), is_leaf=True, yang_name="ldp-fec-peer-id", rest_name="ldp-fec-peer-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, is_keyval=True, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='inet:ipv4-address', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return [u'brocade_mpls_rpc', u'show-mpls-ldp-fec-vc', u'output', u'ldp-fec-vc-rec-list']
def _rest_path(self):
if hasattr(self, "_parent"):
if self._rest_name:
return self._parent._rest_path()+[self._rest_name]
else:
return self._parent._rest_path()
else:
return [u'show-mpls-ldp-fec-vc', u'output', u'ldp-fec-vc-rec-list']
def _get_ldp_fec_peer_id(self):
"""
Getter method for ldp_fec_peer_id, mapped from YANG variable /brocade_mpls_rpc/show_mpls_ldp_fec_vc/output/ldp_fec_vc_rec_list/ldp_fec_peer_id (inet:ipv4-address)
YANG Description: Peer LDP ID
"""
return self.__ldp_fec_peer_id
def _set_ldp_fec_peer_id(self, v, load=False):
"""
Setter method for ldp_fec_peer_id, mapped from YANG variable /brocade_mpls_rpc/show_mpls_ldp_fec_vc/output/ldp_fec_vc_rec_list/ldp_fec_peer_id (inet:ipv4-address)
If this variable is read-only (config: false) in the
source YANG file, then _set_ldp_fec_peer_id is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_ldp_fec_peer_id() directly.
YANG Description: Peer LDP ID
"""
parent = getattr(self, "_parent", None)
if parent is not None and load is False:
raise AttributeError("Cannot set keys directly when" +
" within an instantiated list")
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_dict={'pattern': u'(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'}), is_leaf=True, yang_name="ldp-fec-peer-id", rest_name="ldp-fec-peer-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, is_keyval=True, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='inet:ipv4-address', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """ldp_fec_peer_id must be of a type compatible with inet:ipv4-address""",
'defined-type': "inet:ipv4-address",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'pattern': u'(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'}), is_leaf=True, yang_name="ldp-fec-peer-id", rest_name="ldp-fec-peer-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, is_keyval=True, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='inet:ipv4-address', is_config=True)""",
})
self.__ldp_fec_peer_id = t
if hasattr(self, '_set'):
self._set()
def _unset_ldp_fec_peer_id(self):
self.__ldp_fec_peer_id = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'pattern': u'(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'}), is_leaf=True, yang_name="ldp-fec-peer-id", rest_name="ldp-fec-peer-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, is_keyval=True, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='inet:ipv4-address', is_config=True)
def _get_ldp_fec_peer_lblspc(self):
"""
Getter method for ldp_fec_peer_lblspc, mapped from YANG variable /brocade_mpls_rpc/show_mpls_ldp_fec_vc/output/ldp_fec_vc_rec_list/ldp_fec_peer_lblspc (uint32)
YANG Description: Peer Label Space
"""
return self.__ldp_fec_peer_lblspc
def _set_ldp_fec_peer_lblspc(self, v, load=False):
"""
Setter method for ldp_fec_peer_lblspc, mapped from YANG variable /brocade_mpls_rpc/show_mpls_ldp_fec_vc/output/ldp_fec_vc_rec_list/ldp_fec_peer_lblspc (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_ldp_fec_peer_lblspc is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_ldp_fec_peer_lblspc() directly.
YANG Description: Peer Label Space
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="ldp-fec-peer-lblspc", rest_name="ldp-fec-peer-lblspc", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """ldp_fec_peer_lblspc must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="ldp-fec-peer-lblspc", rest_name="ldp-fec-peer-lblspc", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)""",
})
self.__ldp_fec_peer_lblspc = t
if hasattr(self, '_set'):
self._set()
def _unset_ldp_fec_peer_lblspc(self):
self.__ldp_fec_peer_lblspc = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="ldp-fec-peer-lblspc", rest_name="ldp-fec-peer-lblspc", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
def _get_ldp_fec_state(self):
"""
Getter method for ldp_fec_state, mapped from YANG variable /brocade_mpls_rpc/show_mpls_ldp_fec_vc/output/ldp_fec_vc_rec_list/ldp_fec_state (ldp-fec-state)
YANG Description: State
"""
return self.__ldp_fec_state
def _set_ldp_fec_state(self, v, load=False):
"""
Setter method for ldp_fec_state, mapped from YANG variable /brocade_mpls_rpc/show_mpls_ldp_fec_vc/output/ldp_fec_vc_rec_list/ldp_fec_state (ldp-fec-state)
If this variable is read-only (config: false) in the
source YANG file, then _set_ldp_fec_state is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_ldp_fec_state() directly.
YANG Description: State
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'current': {'value': 16384}, u'unknown': {'value': 0}, u'retained': {'value': 49152}, u'down': {'value': 32768}},), is_leaf=True, yang_name="ldp-fec-state", rest_name="ldp-fec-state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='ldp-fec-state', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """ldp_fec_state must be of a type compatible with ldp-fec-state""",
'defined-type': "brocade-mpls:ldp-fec-state",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'current': {'value': 16384}, u'unknown': {'value': 0}, u'retained': {'value': 49152}, u'down': {'value': 32768}},), is_leaf=True, yang_name="ldp-fec-state", rest_name="ldp-fec-state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='ldp-fec-state', is_config=True)""",
})
self.__ldp_fec_state = t
if hasattr(self, '_set'):
self._set()
def _unset_ldp_fec_state(self):
self.__ldp_fec_state = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'current': {'value': 16384}, u'unknown': {'value': 0}, u'retained': {'value': 49152}, u'down': {'value': 32768}},), is_leaf=True, yang_name="ldp-fec-state", rest_name="ldp-fec-state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='ldp-fec-state', is_config=True)
def _get_ldp_fec_vc_id(self):
"""
Getter method for ldp_fec_vc_id, mapped from YANG variable /brocade_mpls_rpc/show_mpls_ldp_fec_vc/output/ldp_fec_vc_rec_list/ldp_fec_vc_id (uint32)
YANG Description: VC-ID
"""
return self.__ldp_fec_vc_id
def _set_ldp_fec_vc_id(self, v, load=False):
"""
Setter method for ldp_fec_vc_id, mapped from YANG variable /brocade_mpls_rpc/show_mpls_ldp_fec_vc/output/ldp_fec_vc_rec_list/ldp_fec_vc_id (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_ldp_fec_vc_id is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_ldp_fec_vc_id() directly.
YANG Description: VC-ID
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="ldp-fec-vc-id", rest_name="ldp-fec-vc-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """ldp_fec_vc_id must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="ldp-fec-vc-id", rest_name="ldp-fec-vc-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)""",
})
self.__ldp_fec_vc_id = t
if hasattr(self, '_set'):
self._set()
def _unset_ldp_fec_vc_id(self):
self.__ldp_fec_vc_id = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="ldp-fec-vc-id", rest_name="ldp-fec-vc-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
def _get_ldp_fec_peer_vc_type(self):
"""
Getter method for ldp_fec_peer_vc_type, mapped from YANG variable /brocade_mpls_rpc/show_mpls_ldp_fec_vc/output/ldp_fec_vc_rec_list/ldp_fec_peer_vc_type (uint32)
YANG Description: VC-Type
"""
return self.__ldp_fec_peer_vc_type
def _set_ldp_fec_peer_vc_type(self, v, load=False):
"""
Setter method for ldp_fec_peer_vc_type, mapped from YANG variable /brocade_mpls_rpc/show_mpls_ldp_fec_vc/output/ldp_fec_vc_rec_list/ldp_fec_peer_vc_type (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_ldp_fec_peer_vc_type is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_ldp_fec_peer_vc_type() directly.
YANG Description: VC-Type
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="ldp-fec-peer-vc-type", rest_name="ldp-fec-peer-vc-type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """ldp_fec_peer_vc_type must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="ldp-fec-peer-vc-type", rest_name="ldp-fec-peer-vc-type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)""",
})
self.__ldp_fec_peer_vc_type = t
if hasattr(self, '_set'):
self._set()
def _unset_ldp_fec_peer_vc_type(self):
self.__ldp_fec_peer_vc_type = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="ldp-fec-peer-vc-type", rest_name="ldp-fec-peer-vc-type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
def _get_ldp_fec_peer_fec_type(self):
"""
Getter method for ldp_fec_peer_fec_type, mapped from YANG variable /brocade_mpls_rpc/show_mpls_ldp_fec_vc/output/ldp_fec_vc_rec_list/ldp_fec_peer_fec_type (uint32)
YANG Description: FEC-Type
"""
return self.__ldp_fec_peer_fec_type
def _set_ldp_fec_peer_fec_type(self, v, load=False):
"""
Setter method for ldp_fec_peer_fec_type, mapped from YANG variable /brocade_mpls_rpc/show_mpls_ldp_fec_vc/output/ldp_fec_vc_rec_list/ldp_fec_peer_fec_type (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_ldp_fec_peer_fec_type is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_ldp_fec_peer_fec_type() directly.
YANG Description: FEC-Type
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="ldp-fec-peer-fec-type", rest_name="ldp-fec-peer-fec-type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """ldp_fec_peer_fec_type must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="ldp-fec-peer-fec-type", rest_name="ldp-fec-peer-fec-type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)""",
})
self.__ldp_fec_peer_fec_type = t
if hasattr(self, '_set'):
self._set()
def _unset_ldp_fec_peer_fec_type(self):
self.__ldp_fec_peer_fec_type = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="ldp-fec-peer-fec-type", rest_name="ldp-fec-peer-fec-type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
def _get_ldp_fec_ingress(self):
"""
Getter method for ldp_fec_ingress, mapped from YANG variable /brocade_mpls_rpc/show_mpls_ldp_fec_vc/output/ldp_fec_vc_rec_list/ldp_fec_ingress (yes-no)
YANG Description: Ingress
"""
return self.__ldp_fec_ingress
def _set_ldp_fec_ingress(self, v, load=False):
"""
Setter method for ldp_fec_ingress, mapped from YANG variable /brocade_mpls_rpc/show_mpls_ldp_fec_vc/output/ldp_fec_vc_rec_list/ldp_fec_ingress (yes-no)
If this variable is read-only (config: false) in the
source YANG file, then _set_ldp_fec_ingress is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_ldp_fec_ingress() directly.
YANG Description: Ingress
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'yes': {'value': 1}, u'no': {'value': 0}},), is_leaf=True, yang_name="ldp-fec-ingress", rest_name="ldp-fec-ingress", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='yes-no', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """ldp_fec_ingress must be of a type compatible with yes-no""",
'defined-type': "brocade-mpls:yes-no",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'yes': {'value': 1}, u'no': {'value': 0}},), is_leaf=True, yang_name="ldp-fec-ingress", rest_name="ldp-fec-ingress", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='yes-no', is_config=True)""",
})
self.__ldp_fec_ingress = t
if hasattr(self, '_set'):
self._set()
def _unset_ldp_fec_ingress(self):
self.__ldp_fec_ingress = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'yes': {'value': 1}, u'no': {'value': 0}},), is_leaf=True, yang_name="ldp-fec-ingress", rest_name="ldp-fec-ingress", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='yes-no', is_config=True)
def _get_ldp_fec_egress(self):
"""
Getter method for ldp_fec_egress, mapped from YANG variable /brocade_mpls_rpc/show_mpls_ldp_fec_vc/output/ldp_fec_vc_rec_list/ldp_fec_egress (yes-no)
YANG Description: Egress
"""
return self.__ldp_fec_egress
def _set_ldp_fec_egress(self, v, load=False):
"""
Setter method for ldp_fec_egress, mapped from YANG variable /brocade_mpls_rpc/show_mpls_ldp_fec_vc/output/ldp_fec_vc_rec_list/ldp_fec_egress (yes-no)
If this variable is read-only (config: false) in the
source YANG file, then _set_ldp_fec_egress is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_ldp_fec_egress() directly.
YANG Description: Egress
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'yes': {'value': 1}, u'no': {'value': 0}},), is_leaf=True, yang_name="ldp-fec-egress", rest_name="ldp-fec-egress", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='yes-no', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """ldp_fec_egress must be of a type compatible with yes-no""",
'defined-type': "brocade-mpls:yes-no",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'yes': {'value': 1}, u'no': {'value': 0}},), is_leaf=True, yang_name="ldp-fec-egress", rest_name="ldp-fec-egress", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='yes-no', is_config=True)""",
})
self.__ldp_fec_egress = t
if hasattr(self, '_set'):
self._set()
def _unset_ldp_fec_egress(self):
self.__ldp_fec_egress = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'yes': {'value': 1}, u'no': {'value': 0}},), is_leaf=True, yang_name="ldp-fec-egress", rest_name="ldp-fec-egress", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='yes-no', is_config=True)
ldp_fec_peer_id = __builtin__.property(_get_ldp_fec_peer_id, _set_ldp_fec_peer_id)
ldp_fec_peer_lblspc = __builtin__.property(_get_ldp_fec_peer_lblspc, _set_ldp_fec_peer_lblspc)
ldp_fec_state = __builtin__.property(_get_ldp_fec_state, _set_ldp_fec_state)
ldp_fec_vc_id = __builtin__.property(_get_ldp_fec_vc_id, _set_ldp_fec_vc_id)
ldp_fec_peer_vc_type = __builtin__.property(_get_ldp_fec_peer_vc_type, _set_ldp_fec_peer_vc_type)
ldp_fec_peer_fec_type = __builtin__.property(_get_ldp_fec_peer_fec_type, _set_ldp_fec_peer_fec_type)
ldp_fec_ingress = __builtin__.property(_get_ldp_fec_ingress, _set_ldp_fec_ingress)
ldp_fec_egress = __builtin__.property(_get_ldp_fec_egress, _set_ldp_fec_egress)
_pyangbind_elements = {'ldp_fec_peer_id': ldp_fec_peer_id, 'ldp_fec_peer_lblspc': ldp_fec_peer_lblspc, 'ldp_fec_state': ldp_fec_state, 'ldp_fec_vc_id': ldp_fec_vc_id, 'ldp_fec_peer_vc_type': ldp_fec_peer_vc_type, 'ldp_fec_peer_fec_type': ldp_fec_peer_fec_type, 'ldp_fec_ingress': ldp_fec_ingress, 'ldp_fec_egress': ldp_fec_egress, } | pybind/slxos/v16r_1_00b/brocade_mpls_rpc/show_mpls_ldp_fec_vc/output/ldp_fec_vc_rec_list/__init__.py | from operator import attrgetter
import pyangbind.lib.xpathhelper as xpathhelper
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType, RestrictedClassType, TypedListType
from pyangbind.lib.yangtypes import YANGBool, YANGListType, YANGDynClass, ReferenceType
from pyangbind.lib.base import PybindBase
from decimal import Decimal
from bitarray import bitarray
import __builtin__
class ldp_fec_vc_rec_list(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module brocade-mpls - based on the path /brocade_mpls_rpc/show-mpls-ldp-fec-vc/output/ldp-fec-vc-rec-list. Each member element of
the container is represented as a class variable - with a specific
YANG type.
"""
__slots__ = ('_pybind_generated_by', '_path_helper', '_yang_name', '_rest_name', '_extmethods', '__ldp_fec_peer_id','__ldp_fec_peer_lblspc','__ldp_fec_state','__ldp_fec_vc_id','__ldp_fec_peer_vc_type','__ldp_fec_peer_fec_type','__ldp_fec_ingress','__ldp_fec_egress',)
_yang_name = 'ldp-fec-vc-rec-list'
_rest_name = 'ldp-fec-vc-rec-list'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
path_helper_ = kwargs.pop("path_helper", None)
if path_helper_ is False:
self._path_helper = False
elif path_helper_ is not None and isinstance(path_helper_, xpathhelper.YANGPathHelper):
self._path_helper = path_helper_
elif hasattr(self, "_parent"):
path_helper_ = getattr(self._parent, "_path_helper", False)
self._path_helper = path_helper_
else:
self._path_helper = False
extmethods = kwargs.pop("extmethods", None)
if extmethods is False:
self._extmethods = False
elif extmethods is not None and isinstance(extmethods, dict):
self._extmethods = extmethods
elif hasattr(self, "_parent"):
extmethods = getattr(self._parent, "_extmethods", None)
self._extmethods = extmethods
else:
self._extmethods = False
self.__ldp_fec_state = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'current': {'value': 16384}, u'unknown': {'value': 0}, u'retained': {'value': 49152}, u'down': {'value': 32768}},), is_leaf=True, yang_name="ldp-fec-state", rest_name="ldp-fec-state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='ldp-fec-state', is_config=True)
self.__ldp_fec_vc_id = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="ldp-fec-vc-id", rest_name="ldp-fec-vc-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
self.__ldp_fec_peer_fec_type = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="ldp-fec-peer-fec-type", rest_name="ldp-fec-peer-fec-type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
self.__ldp_fec_peer_vc_type = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="ldp-fec-peer-vc-type", rest_name="ldp-fec-peer-vc-type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
self.__ldp_fec_ingress = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'yes': {'value': 1}, u'no': {'value': 0}},), is_leaf=True, yang_name="ldp-fec-ingress", rest_name="ldp-fec-ingress", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='yes-no', is_config=True)
self.__ldp_fec_egress = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'yes': {'value': 1}, u'no': {'value': 0}},), is_leaf=True, yang_name="ldp-fec-egress", rest_name="ldp-fec-egress", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='yes-no', is_config=True)
self.__ldp_fec_peer_lblspc = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="ldp-fec-peer-lblspc", rest_name="ldp-fec-peer-lblspc", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
self.__ldp_fec_peer_id = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'pattern': u'(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'}), is_leaf=True, yang_name="ldp-fec-peer-id", rest_name="ldp-fec-peer-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, is_keyval=True, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='inet:ipv4-address', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return [u'brocade_mpls_rpc', u'show-mpls-ldp-fec-vc', u'output', u'ldp-fec-vc-rec-list']
def _rest_path(self):
if hasattr(self, "_parent"):
if self._rest_name:
return self._parent._rest_path()+[self._rest_name]
else:
return self._parent._rest_path()
else:
return [u'show-mpls-ldp-fec-vc', u'output', u'ldp-fec-vc-rec-list']
def _get_ldp_fec_peer_id(self):
"""
Getter method for ldp_fec_peer_id, mapped from YANG variable /brocade_mpls_rpc/show_mpls_ldp_fec_vc/output/ldp_fec_vc_rec_list/ldp_fec_peer_id (inet:ipv4-address)
YANG Description: Peer LDP ID
"""
return self.__ldp_fec_peer_id
def _set_ldp_fec_peer_id(self, v, load=False):
"""
Setter method for ldp_fec_peer_id, mapped from YANG variable /brocade_mpls_rpc/show_mpls_ldp_fec_vc/output/ldp_fec_vc_rec_list/ldp_fec_peer_id (inet:ipv4-address)
If this variable is read-only (config: false) in the
source YANG file, then _set_ldp_fec_peer_id is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_ldp_fec_peer_id() directly.
YANG Description: Peer LDP ID
"""
parent = getattr(self, "_parent", None)
if parent is not None and load is False:
raise AttributeError("Cannot set keys directly when" +
" within an instantiated list")
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_dict={'pattern': u'(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'}), is_leaf=True, yang_name="ldp-fec-peer-id", rest_name="ldp-fec-peer-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, is_keyval=True, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='inet:ipv4-address', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """ldp_fec_peer_id must be of a type compatible with inet:ipv4-address""",
'defined-type': "inet:ipv4-address",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'pattern': u'(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'}), is_leaf=True, yang_name="ldp-fec-peer-id", rest_name="ldp-fec-peer-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, is_keyval=True, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='inet:ipv4-address', is_config=True)""",
})
self.__ldp_fec_peer_id = t
if hasattr(self, '_set'):
self._set()
def _unset_ldp_fec_peer_id(self):
self.__ldp_fec_peer_id = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'pattern': u'(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'}), is_leaf=True, yang_name="ldp-fec-peer-id", rest_name="ldp-fec-peer-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, is_keyval=True, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='inet:ipv4-address', is_config=True)
def _get_ldp_fec_peer_lblspc(self):
"""
Getter method for ldp_fec_peer_lblspc, mapped from YANG variable /brocade_mpls_rpc/show_mpls_ldp_fec_vc/output/ldp_fec_vc_rec_list/ldp_fec_peer_lblspc (uint32)
YANG Description: Peer Label Space
"""
return self.__ldp_fec_peer_lblspc
def _set_ldp_fec_peer_lblspc(self, v, load=False):
"""
Setter method for ldp_fec_peer_lblspc, mapped from YANG variable /brocade_mpls_rpc/show_mpls_ldp_fec_vc/output/ldp_fec_vc_rec_list/ldp_fec_peer_lblspc (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_ldp_fec_peer_lblspc is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_ldp_fec_peer_lblspc() directly.
YANG Description: Peer Label Space
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="ldp-fec-peer-lblspc", rest_name="ldp-fec-peer-lblspc", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """ldp_fec_peer_lblspc must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="ldp-fec-peer-lblspc", rest_name="ldp-fec-peer-lblspc", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)""",
})
self.__ldp_fec_peer_lblspc = t
if hasattr(self, '_set'):
self._set()
def _unset_ldp_fec_peer_lblspc(self):
self.__ldp_fec_peer_lblspc = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="ldp-fec-peer-lblspc", rest_name="ldp-fec-peer-lblspc", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
def _get_ldp_fec_state(self):
"""
Getter method for ldp_fec_state, mapped from YANG variable /brocade_mpls_rpc/show_mpls_ldp_fec_vc/output/ldp_fec_vc_rec_list/ldp_fec_state (ldp-fec-state)
YANG Description: State
"""
return self.__ldp_fec_state
def _set_ldp_fec_state(self, v, load=False):
"""
Setter method for ldp_fec_state, mapped from YANG variable /brocade_mpls_rpc/show_mpls_ldp_fec_vc/output/ldp_fec_vc_rec_list/ldp_fec_state (ldp-fec-state)
If this variable is read-only (config: false) in the
source YANG file, then _set_ldp_fec_state is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_ldp_fec_state() directly.
YANG Description: State
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'current': {'value': 16384}, u'unknown': {'value': 0}, u'retained': {'value': 49152}, u'down': {'value': 32768}},), is_leaf=True, yang_name="ldp-fec-state", rest_name="ldp-fec-state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='ldp-fec-state', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """ldp_fec_state must be of a type compatible with ldp-fec-state""",
'defined-type': "brocade-mpls:ldp-fec-state",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'current': {'value': 16384}, u'unknown': {'value': 0}, u'retained': {'value': 49152}, u'down': {'value': 32768}},), is_leaf=True, yang_name="ldp-fec-state", rest_name="ldp-fec-state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='ldp-fec-state', is_config=True)""",
})
self.__ldp_fec_state = t
if hasattr(self, '_set'):
self._set()
def _unset_ldp_fec_state(self):
self.__ldp_fec_state = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'current': {'value': 16384}, u'unknown': {'value': 0}, u'retained': {'value': 49152}, u'down': {'value': 32768}},), is_leaf=True, yang_name="ldp-fec-state", rest_name="ldp-fec-state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='ldp-fec-state', is_config=True)
def _get_ldp_fec_vc_id(self):
"""
Getter method for ldp_fec_vc_id, mapped from YANG variable /brocade_mpls_rpc/show_mpls_ldp_fec_vc/output/ldp_fec_vc_rec_list/ldp_fec_vc_id (uint32)
YANG Description: VC-ID
"""
return self.__ldp_fec_vc_id
def _set_ldp_fec_vc_id(self, v, load=False):
"""
Setter method for ldp_fec_vc_id, mapped from YANG variable /brocade_mpls_rpc/show_mpls_ldp_fec_vc/output/ldp_fec_vc_rec_list/ldp_fec_vc_id (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_ldp_fec_vc_id is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_ldp_fec_vc_id() directly.
YANG Description: VC-ID
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="ldp-fec-vc-id", rest_name="ldp-fec-vc-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """ldp_fec_vc_id must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="ldp-fec-vc-id", rest_name="ldp-fec-vc-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)""",
})
self.__ldp_fec_vc_id = t
if hasattr(self, '_set'):
self._set()
def _unset_ldp_fec_vc_id(self):
self.__ldp_fec_vc_id = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="ldp-fec-vc-id", rest_name="ldp-fec-vc-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
def _get_ldp_fec_peer_vc_type(self):
"""
Getter method for ldp_fec_peer_vc_type, mapped from YANG variable /brocade_mpls_rpc/show_mpls_ldp_fec_vc/output/ldp_fec_vc_rec_list/ldp_fec_peer_vc_type (uint32)
YANG Description: VC-Type
"""
return self.__ldp_fec_peer_vc_type
def _set_ldp_fec_peer_vc_type(self, v, load=False):
"""
Setter method for ldp_fec_peer_vc_type, mapped from YANG variable /brocade_mpls_rpc/show_mpls_ldp_fec_vc/output/ldp_fec_vc_rec_list/ldp_fec_peer_vc_type (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_ldp_fec_peer_vc_type is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_ldp_fec_peer_vc_type() directly.
YANG Description: VC-Type
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="ldp-fec-peer-vc-type", rest_name="ldp-fec-peer-vc-type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """ldp_fec_peer_vc_type must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="ldp-fec-peer-vc-type", rest_name="ldp-fec-peer-vc-type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)""",
})
self.__ldp_fec_peer_vc_type = t
if hasattr(self, '_set'):
self._set()
def _unset_ldp_fec_peer_vc_type(self):
self.__ldp_fec_peer_vc_type = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="ldp-fec-peer-vc-type", rest_name="ldp-fec-peer-vc-type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
def _get_ldp_fec_peer_fec_type(self):
"""
Getter method for ldp_fec_peer_fec_type, mapped from YANG variable /brocade_mpls_rpc/show_mpls_ldp_fec_vc/output/ldp_fec_vc_rec_list/ldp_fec_peer_fec_type (uint32)
YANG Description: FEC-Type
"""
return self.__ldp_fec_peer_fec_type
def _set_ldp_fec_peer_fec_type(self, v, load=False):
"""
Setter method for ldp_fec_peer_fec_type, mapped from YANG variable /brocade_mpls_rpc/show_mpls_ldp_fec_vc/output/ldp_fec_vc_rec_list/ldp_fec_peer_fec_type (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_ldp_fec_peer_fec_type is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_ldp_fec_peer_fec_type() directly.
YANG Description: FEC-Type
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="ldp-fec-peer-fec-type", rest_name="ldp-fec-peer-fec-type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """ldp_fec_peer_fec_type must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="ldp-fec-peer-fec-type", rest_name="ldp-fec-peer-fec-type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)""",
})
self.__ldp_fec_peer_fec_type = t
if hasattr(self, '_set'):
self._set()
def _unset_ldp_fec_peer_fec_type(self):
self.__ldp_fec_peer_fec_type = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="ldp-fec-peer-fec-type", rest_name="ldp-fec-peer-fec-type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
def _get_ldp_fec_ingress(self):
"""
Getter method for ldp_fec_ingress, mapped from YANG variable /brocade_mpls_rpc/show_mpls_ldp_fec_vc/output/ldp_fec_vc_rec_list/ldp_fec_ingress (yes-no)
YANG Description: Ingress
"""
return self.__ldp_fec_ingress
def _set_ldp_fec_ingress(self, v, load=False):
"""
Setter method for ldp_fec_ingress, mapped from YANG variable /brocade_mpls_rpc/show_mpls_ldp_fec_vc/output/ldp_fec_vc_rec_list/ldp_fec_ingress (yes-no)
If this variable is read-only (config: false) in the
source YANG file, then _set_ldp_fec_ingress is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_ldp_fec_ingress() directly.
YANG Description: Ingress
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'yes': {'value': 1}, u'no': {'value': 0}},), is_leaf=True, yang_name="ldp-fec-ingress", rest_name="ldp-fec-ingress", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='yes-no', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """ldp_fec_ingress must be of a type compatible with yes-no""",
'defined-type': "brocade-mpls:yes-no",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'yes': {'value': 1}, u'no': {'value': 0}},), is_leaf=True, yang_name="ldp-fec-ingress", rest_name="ldp-fec-ingress", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='yes-no', is_config=True)""",
})
self.__ldp_fec_ingress = t
if hasattr(self, '_set'):
self._set()
def _unset_ldp_fec_ingress(self):
self.__ldp_fec_ingress = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'yes': {'value': 1}, u'no': {'value': 0}},), is_leaf=True, yang_name="ldp-fec-ingress", rest_name="ldp-fec-ingress", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='yes-no', is_config=True)
def _get_ldp_fec_egress(self):
"""
Getter method for ldp_fec_egress, mapped from YANG variable /brocade_mpls_rpc/show_mpls_ldp_fec_vc/output/ldp_fec_vc_rec_list/ldp_fec_egress (yes-no)
YANG Description: Egress
"""
return self.__ldp_fec_egress
def _set_ldp_fec_egress(self, v, load=False):
"""
Setter method for ldp_fec_egress, mapped from YANG variable /brocade_mpls_rpc/show_mpls_ldp_fec_vc/output/ldp_fec_vc_rec_list/ldp_fec_egress (yes-no)
If this variable is read-only (config: false) in the
source YANG file, then _set_ldp_fec_egress is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_ldp_fec_egress() directly.
YANG Description: Egress
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'yes': {'value': 1}, u'no': {'value': 0}},), is_leaf=True, yang_name="ldp-fec-egress", rest_name="ldp-fec-egress", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='yes-no', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """ldp_fec_egress must be of a type compatible with yes-no""",
'defined-type': "brocade-mpls:yes-no",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'yes': {'value': 1}, u'no': {'value': 0}},), is_leaf=True, yang_name="ldp-fec-egress", rest_name="ldp-fec-egress", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='yes-no', is_config=True)""",
})
self.__ldp_fec_egress = t
if hasattr(self, '_set'):
self._set()
def _unset_ldp_fec_egress(self):
self.__ldp_fec_egress = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'yes': {'value': 1}, u'no': {'value': 0}},), is_leaf=True, yang_name="ldp-fec-egress", rest_name="ldp-fec-egress", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='yes-no', is_config=True)
ldp_fec_peer_id = __builtin__.property(_get_ldp_fec_peer_id, _set_ldp_fec_peer_id)
ldp_fec_peer_lblspc = __builtin__.property(_get_ldp_fec_peer_lblspc, _set_ldp_fec_peer_lblspc)
ldp_fec_state = __builtin__.property(_get_ldp_fec_state, _set_ldp_fec_state)
ldp_fec_vc_id = __builtin__.property(_get_ldp_fec_vc_id, _set_ldp_fec_vc_id)
ldp_fec_peer_vc_type = __builtin__.property(_get_ldp_fec_peer_vc_type, _set_ldp_fec_peer_vc_type)
ldp_fec_peer_fec_type = __builtin__.property(_get_ldp_fec_peer_fec_type, _set_ldp_fec_peer_fec_type)
ldp_fec_ingress = __builtin__.property(_get_ldp_fec_ingress, _set_ldp_fec_ingress)
ldp_fec_egress = __builtin__.property(_get_ldp_fec_egress, _set_ldp_fec_egress)
_pyangbind_elements = {'ldp_fec_peer_id': ldp_fec_peer_id, 'ldp_fec_peer_lblspc': ldp_fec_peer_lblspc, 'ldp_fec_state': ldp_fec_state, 'ldp_fec_vc_id': ldp_fec_vc_id, 'ldp_fec_peer_vc_type': ldp_fec_peer_vc_type, 'ldp_fec_peer_fec_type': ldp_fec_peer_fec_type, 'ldp_fec_ingress': ldp_fec_ingress, 'ldp_fec_egress': ldp_fec_egress, } | 0.555435 | 0.055669 |
from django.db import models
# Create your models here.
class Location(models.Model):
location_name = models.CharField(max_length = 25)
def __str__(self):
return self.location_name
def save_location(self):
self.save()
def delete_location(location_id):
Location.objects.filter(id = location_id).delete()
def update_location(location_id, location):
Location.objects.filter(id = location_id).update(location_name = location)
class Category(models.Model):
category_name = models.CharField(max_length = 50)
def __str__(self):
return self.category_name
def save_category(self):
self.save()
def delete_category(category_id):
Category.objects.filter(id = category_id).delete()
def update_category(category_id, category):
Category.objects.filter(id = category_id).update(category_name = category)
class Photographer(models.Model):
names = models.CharField(max_length = 50)
email = models.EmailField(blank = True)
ig = models.CharField(max_length = 20, blank = True)
phone_number = models.CharField(max_length = 10,blank =True)
def __str__(self):
return self.names
def save_photographer(self):
self.save()
def delete_photographer(photographer_id):
Photographer.objects.filter(id = photographer_id).delete()
class Image(models.Model):
image_path = models.ImageField(upload_to = 'images/')
name = models.CharField(max_length = 50)
description = models.TextField(blank = True)
location = models.ForeignKey(Location, blank=True)
category = models.ForeignKey(Category, blank=True)
photographer = models.ForeignKey(Photographer)
def __str__(self):
return self.name
def save_image(self):
self.save()
def delete_image(image_id):
Image.objects.filter(id = image_id).delete()
def update_image(image_id, path):
Image.objects.filter(id = image_id).update(image_path = path)
def get_image_by_id(image_id):
image = Image.objects.get(pk = image_id)
return image
@classmethod
def search_image(cls, search_category):
images = cls.objects.filter(category__category_name__icontains=search_category)
return images
@classmethod
def filter_by_location(cls):
images = cls.objects.order_by('location')
return images
class Meta:
ordering = ['name'] | gallery/models.py | from django.db import models
# Create your models here.
class Location(models.Model):
location_name = models.CharField(max_length = 25)
def __str__(self):
return self.location_name
def save_location(self):
self.save()
def delete_location(location_id):
Location.objects.filter(id = location_id).delete()
def update_location(location_id, location):
Location.objects.filter(id = location_id).update(location_name = location)
class Category(models.Model):
category_name = models.CharField(max_length = 50)
def __str__(self):
return self.category_name
def save_category(self):
self.save()
def delete_category(category_id):
Category.objects.filter(id = category_id).delete()
def update_category(category_id, category):
Category.objects.filter(id = category_id).update(category_name = category)
class Photographer(models.Model):
names = models.CharField(max_length = 50)
email = models.EmailField(blank = True)
ig = models.CharField(max_length = 20, blank = True)
phone_number = models.CharField(max_length = 10,blank =True)
def __str__(self):
return self.names
def save_photographer(self):
self.save()
def delete_photographer(photographer_id):
Photographer.objects.filter(id = photographer_id).delete()
class Image(models.Model):
image_path = models.ImageField(upload_to = 'images/')
name = models.CharField(max_length = 50)
description = models.TextField(blank = True)
location = models.ForeignKey(Location, blank=True)
category = models.ForeignKey(Category, blank=True)
photographer = models.ForeignKey(Photographer)
def __str__(self):
return self.name
def save_image(self):
self.save()
def delete_image(image_id):
Image.objects.filter(id = image_id).delete()
def update_image(image_id, path):
Image.objects.filter(id = image_id).update(image_path = path)
def get_image_by_id(image_id):
image = Image.objects.get(pk = image_id)
return image
@classmethod
def search_image(cls, search_category):
images = cls.objects.filter(category__category_name__icontains=search_category)
return images
@classmethod
def filter_by_location(cls):
images = cls.objects.order_by('location')
return images
class Meta:
ordering = ['name'] | 0.510496 | 0.223165 |
import pygame as game
import time
import random
game.init()
color_white = (255, 255, 255)
color_black = (0, 0, 0)
color_red = (255, 0, 0)
green = (0, 155, 0)
display_width = 800
display_height = 600
DisplayScreen = game.display.set_mode((display_width, display_height))
game.display.set_caption('')
image = game.image.load('snakehead.png')
appleimg = game.image.load('apple.png')
objectClock = game.time.Clock()
Width_Apple = 10
pixel_size = 10
FPS = 15
arrow_key = "right"
font_small = game.font.SysFont("comicsansms", 25)
font_medium = game.font.SysFont("comicsansms", 50)
font_large = game.font.SysFont("comicsansms", 80)
def score(score):
text = font_small.render("Score: " + str(score), True, color_black)
DisplayScreen.blit(text, [0, 0])
def randAppleGen():
XpositionApple = round(random.randrange(0, display_width - Width_Apple)) # /10.0)*10.0
YpositionApple = round(random.randrange(0, display_height - Width_Apple)) # /10.0)*10.0
return XpositionApple, YpositionApple
def intro_for_game():
intro_screen = True
while intro_screen:
for eachEvent in game.event.get():
if eachEvent.type == game.QUIT:
game.quit()
quit()
if eachEvent.type == game.KEYDOWN:
if eachEvent.key == game.K_c:
intro_screen = False
if eachEvent.key == game.K_q:
game.quit()
quit()
DisplayScreen.fill(color_white)
display_ScreenMessage("Welcome to drawSnake",
green,
-100,
"large")
display_ScreenMessage("",
color_black,
-30)
display_ScreenMessage("",
color_black,
10)
display_ScreenMessage("Made by Python Programmers",
color_black,
50)
display_ScreenMessage("Press C to play or Q to quit.",
color_red,
180)
game.display.update()
objectClock.tick(15)
def drawSnake(pixel_size, snakeArray):
if arrow_key == "right":
head_of_snake = game.transform.rotate(image, 270)
if arrow_key == "left":
head_of_snake = game.transform.rotate(image, 90)
if arrow_key == "up":
head_of_snake = image
if arrow_key == "down":
head_of_snake = game.transform.rotate(image, 180)
DisplayScreen.blit(head_of_snake, (snakeArray[-1][0], snakeArray[-1][1]))
for eachSegment in snakeArray[:-1]:
game.draw.rect(DisplayScreen, green, [eachSegment[0], eachSegment[1], pixel_size, pixel_size])
def objects_text(sample_text, sample_color, sample_size):
if sample_size == "small":
surface_for_text = font_small.render(sample_text, True, sample_color)
elif sample_size == "medium":
surface_for_text = font_medium.render(sample_text, True, sample_color)
elif sample_size == "large":
surface_for_text = font_large.render(sample_text, True, sample_color)
return surface_for_text, surface_for_text.get_rect()
def display_ScreenMessage(message, font_color, yDisplace=0, font_size="small"):
textSurface, textRectShape = objects_text(message, font_color, font_size)
textRectShape.center = (display_width / 2), (display_height / 2) + yDisplace
DisplayScreen.blit(textSurface, textRectShape)
def MainLoopForGame():
global arrow_key
arrow_key = 'right'
gameOver = False
gameFinish = False
change_x = display_width / 2
change_y = display_height / 2
lead_x_change = 10
lead_y_change = 0
snakeArray = []
snakeLength = 1
XpositionApple, YpositionApple = randAppleGen()
while not gameOver:
while gameFinish == True:
DisplayScreen.fill(color_white)
display_ScreenMessage("Game over",
color_red,
yDisplace=-50,
font_size="large")
display_ScreenMessage("Press C to play again or Q to quit",
color_red,
50,
font_size="medium")
game.display.update()
for anyEvent in game.event.get():
if anyEvent.type == game.QUIT:
gameFinish = False
gameOver = True
if anyEvent.type == game.KEYDOWN:
if anyEvent.key == game.K_q:
gameOver = True
gameFinish = False
if anyEvent.key == game.K_c:
MainLoopForGame()
for anyEvent in game.event.get():
if anyEvent.type == game.QUIT:
gameOver = True
if anyEvent.type == game.KEYDOWN:
if anyEvent.key == game.K_LEFT:
arrow_key = "left"
lead_x_change = -pixel_size
lead_y_change = 0
elif anyEvent.key == game.K_RIGHT:
arrow_key = "right"
lead_x_change = pixel_size
lead_y_change = 0
elif anyEvent.key == game.K_UP:
arrow_key = "up"
lead_y_change = -pixel_size
lead_x_change = 0
elif anyEvent.key == game.K_DOWN:
arrow_key = "down"
lead_y_change = pixel_size
lead_x_change = 0
if change_x >= display_width or change_x < 0 or change_y >= display_height or change_y < 0:
gameFinish = True
change_x += lead_x_change
change_y += lead_y_change
DisplayScreen.fill(color_white)
# game.draw.rect(DisplayScreen, color_red, [XpositionApple, YpositionApple, Width_Apple, Width_Apple])
DisplayScreen.blit(appleimg, (XpositionApple, YpositionApple))
head_of_Snake = []
head_of_Snake.append(change_x)
head_of_Snake.append(change_y)
snakeArray.append(head_of_Snake)
if len(snakeArray) > snakeLength:
del snakeArray[0]
for eachPart in snakeArray[:-1]:
if eachPart == head_of_Snake:
gameFinish = True
drawSnake(pixel_size, snakeArray)
score(snakeLength - 1)
game.display.update()
if change_x > XpositionApple and change_x < XpositionApple + Width_Apple or change_x + pixel_size > XpositionApple and change_x + pixel_size < XpositionApple + Width_Apple:
if change_y > YpositionApple and change_y < YpositionApple + Width_Apple:
XpositionApple, YpositionApple = randAppleGen()
snakeLength += 1
elif change_y + pixel_size > YpositionApple and change_y + pixel_size < YpositionApple + Width_Apple:
XpositionApple, YpositionApple = randAppleGen()
snakeLength += 1
objectClock.tick(FPS)
game.quit()
quit()
intro_for_game()
MainLoopForGame() | Chapter11/SnakeGameFinal.py | import pygame as game
import time
import random
game.init()
color_white = (255, 255, 255)
color_black = (0, 0, 0)
color_red = (255, 0, 0)
green = (0, 155, 0)
display_width = 800
display_height = 600
DisplayScreen = game.display.set_mode((display_width, display_height))
game.display.set_caption('')
image = game.image.load('snakehead.png')
appleimg = game.image.load('apple.png')
objectClock = game.time.Clock()
Width_Apple = 10
pixel_size = 10
FPS = 15
arrow_key = "right"
font_small = game.font.SysFont("comicsansms", 25)
font_medium = game.font.SysFont("comicsansms", 50)
font_large = game.font.SysFont("comicsansms", 80)
def score(score):
text = font_small.render("Score: " + str(score), True, color_black)
DisplayScreen.blit(text, [0, 0])
def randAppleGen():
XpositionApple = round(random.randrange(0, display_width - Width_Apple)) # /10.0)*10.0
YpositionApple = round(random.randrange(0, display_height - Width_Apple)) # /10.0)*10.0
return XpositionApple, YpositionApple
def intro_for_game():
intro_screen = True
while intro_screen:
for eachEvent in game.event.get():
if eachEvent.type == game.QUIT:
game.quit()
quit()
if eachEvent.type == game.KEYDOWN:
if eachEvent.key == game.K_c:
intro_screen = False
if eachEvent.key == game.K_q:
game.quit()
quit()
DisplayScreen.fill(color_white)
display_ScreenMessage("Welcome to drawSnake",
green,
-100,
"large")
display_ScreenMessage("",
color_black,
-30)
display_ScreenMessage("",
color_black,
10)
display_ScreenMessage("Made by Python Programmers",
color_black,
50)
display_ScreenMessage("Press C to play or Q to quit.",
color_red,
180)
game.display.update()
objectClock.tick(15)
def drawSnake(pixel_size, snakeArray):
if arrow_key == "right":
head_of_snake = game.transform.rotate(image, 270)
if arrow_key == "left":
head_of_snake = game.transform.rotate(image, 90)
if arrow_key == "up":
head_of_snake = image
if arrow_key == "down":
head_of_snake = game.transform.rotate(image, 180)
DisplayScreen.blit(head_of_snake, (snakeArray[-1][0], snakeArray[-1][1]))
for eachSegment in snakeArray[:-1]:
game.draw.rect(DisplayScreen, green, [eachSegment[0], eachSegment[1], pixel_size, pixel_size])
def objects_text(sample_text, sample_color, sample_size):
if sample_size == "small":
surface_for_text = font_small.render(sample_text, True, sample_color)
elif sample_size == "medium":
surface_for_text = font_medium.render(sample_text, True, sample_color)
elif sample_size == "large":
surface_for_text = font_large.render(sample_text, True, sample_color)
return surface_for_text, surface_for_text.get_rect()
def display_ScreenMessage(message, font_color, yDisplace=0, font_size="small"):
textSurface, textRectShape = objects_text(message, font_color, font_size)
textRectShape.center = (display_width / 2), (display_height / 2) + yDisplace
DisplayScreen.blit(textSurface, textRectShape)
def MainLoopForGame():
global arrow_key
arrow_key = 'right'
gameOver = False
gameFinish = False
change_x = display_width / 2
change_y = display_height / 2
lead_x_change = 10
lead_y_change = 0
snakeArray = []
snakeLength = 1
XpositionApple, YpositionApple = randAppleGen()
while not gameOver:
while gameFinish == True:
DisplayScreen.fill(color_white)
display_ScreenMessage("Game over",
color_red,
yDisplace=-50,
font_size="large")
display_ScreenMessage("Press C to play again or Q to quit",
color_red,
50,
font_size="medium")
game.display.update()
for anyEvent in game.event.get():
if anyEvent.type == game.QUIT:
gameFinish = False
gameOver = True
if anyEvent.type == game.KEYDOWN:
if anyEvent.key == game.K_q:
gameOver = True
gameFinish = False
if anyEvent.key == game.K_c:
MainLoopForGame()
for anyEvent in game.event.get():
if anyEvent.type == game.QUIT:
gameOver = True
if anyEvent.type == game.KEYDOWN:
if anyEvent.key == game.K_LEFT:
arrow_key = "left"
lead_x_change = -pixel_size
lead_y_change = 0
elif anyEvent.key == game.K_RIGHT:
arrow_key = "right"
lead_x_change = pixel_size
lead_y_change = 0
elif anyEvent.key == game.K_UP:
arrow_key = "up"
lead_y_change = -pixel_size
lead_x_change = 0
elif anyEvent.key == game.K_DOWN:
arrow_key = "down"
lead_y_change = pixel_size
lead_x_change = 0
if change_x >= display_width or change_x < 0 or change_y >= display_height or change_y < 0:
gameFinish = True
change_x += lead_x_change
change_y += lead_y_change
DisplayScreen.fill(color_white)
# game.draw.rect(DisplayScreen, color_red, [XpositionApple, YpositionApple, Width_Apple, Width_Apple])
DisplayScreen.blit(appleimg, (XpositionApple, YpositionApple))
head_of_Snake = []
head_of_Snake.append(change_x)
head_of_Snake.append(change_y)
snakeArray.append(head_of_Snake)
if len(snakeArray) > snakeLength:
del snakeArray[0]
for eachPart in snakeArray[:-1]:
if eachPart == head_of_Snake:
gameFinish = True
drawSnake(pixel_size, snakeArray)
score(snakeLength - 1)
game.display.update()
if change_x > XpositionApple and change_x < XpositionApple + Width_Apple or change_x + pixel_size > XpositionApple and change_x + pixel_size < XpositionApple + Width_Apple:
if change_y > YpositionApple and change_y < YpositionApple + Width_Apple:
XpositionApple, YpositionApple = randAppleGen()
snakeLength += 1
elif change_y + pixel_size > YpositionApple and change_y + pixel_size < YpositionApple + Width_Apple:
XpositionApple, YpositionApple = randAppleGen()
snakeLength += 1
objectClock.tick(FPS)
game.quit()
quit()
intro_for_game()
MainLoopForGame() | 0.330147 | 0.222954 |
import sys
import ds_format as ds
import os
import numpy as np
from alcf.models import META
from alcf import misc
import aquarius_time as aq
VARS = [
'gh',
't',
'ciwc',
'clw',
'tcc',
'sp',
]
VARS_AUX = [
'level',
'time',
'latitude',
'longitude',
]
TRANS = {
'gh': 'zfull',
'latitude': 'lat',
'longitude': 'lon',
'level': 'pfull',
't': 'ta',
'ciwc': 'cli',
'clw': 'clw',
'tcc': 'cl',
'time': 'time',
'sp': 'ps',
}
def read(dirname, track, warnings=[], step=6./24.):
d_ll = ds.read(os.path.join(dirname, 'LL125.nc'), [
'latitude',
'longitude',
'z'
])
lat_ll = d_ll['latitude']
lon_ll = d_ll['longitude']
orog_ll = d_ll['z'][0,:,:]/9.80665
dd_idx = ds.readdir(dirname,
variables=['time', 'latitude', 'longitude'],
jd=True,
full=True,
warnings=warnings,
)
start_time = track['time'][0]
end_time = track['time'][-1]
d_out = {}
for var in VARS:
dd = []
var2 = TRANS[var]
for d_idx in dd_idx:
if var not in d_idx['.']:
continue
time = d_idx['time']
lat = d_idx['latitude']
lon = d_idx['longitude']
filename = d_idx['filename']
ii = np.nonzero(
(time >= start_time - step*0.5) &
(time < end_time + step*0.5)
)[0]
for i in ii:
t = time[i]
i2 = np.argmin(np.abs(track['time'] - time[i]))
lat0 = track['lat'][i2]
lon0 = track['lon'][i2]
j = np.argmin(np.abs(lat - lat0))
k = np.argmin(np.abs(lon - lon0))
j_ll = np.argmin(np.abs(lat_ll - lat0))
k_ll = np.argmin(np.abs(lon_ll - lon0))
d = ds.read(filename, VARS_AUX + [var],
sel={
'time': [i],
'latitude': j,
'longitude': k,
},
jd=True,
)
for a, b in TRANS.items():
if a in d.keys():
ds.rename(d, a, b)
d['lat'] = np.array([d['lat']])
d['lon'] = np.array([d['lon']])
d['orog'] = np.array([orog_ll[j_ll,k_ll]])
d['.']['lat']['.dims'] = ['time']
d['.']['lon']['.dims'] = ['time']
d['.']['orog'] = {'.dims': ['time']}
if 'pfull' in ds.get_vars(d):
d['pfull'] = d['pfull'].reshape([1, len(d['pfull'])])
d['.']['pfull']['.dims'] = ['time', 'pfull']
d['pfull'] = d['pfull'][:,::-1]
d[var2] = d[var2][:,::-1]
ds.select(d, {'pfull': np.arange(27)})
dd.append(d)
d = ds.op.merge(dd, 'time')
for var_aux in VARS_AUX:
if TRANS[var_aux] in ds.get_vars(d_out) \
and TRANS[var_aux] in ds.get_vars(d) \
and not np.all(d_out[TRANS[var_aux]] == d[TRANS[var_aux]]):
raise ValueError('%s: Field differs between input files' % TRANS[var_aux])
d_out.update(d)
d_out['pfull'] = d_out['pfull']*1e2
if 'time' in d_out:
d_out['time_bnds'] = misc.time_bnds(d_out['time'], step, start_time, end_time)
d_out['time'] = np.mean(d_out['time_bnds'], axis=1)
d_out['.'] = META
return d_out | alcf/models/jra55.py | import sys
import ds_format as ds
import os
import numpy as np
from alcf.models import META
from alcf import misc
import aquarius_time as aq
VARS = [
'gh',
't',
'ciwc',
'clw',
'tcc',
'sp',
]
VARS_AUX = [
'level',
'time',
'latitude',
'longitude',
]
TRANS = {
'gh': 'zfull',
'latitude': 'lat',
'longitude': 'lon',
'level': 'pfull',
't': 'ta',
'ciwc': 'cli',
'clw': 'clw',
'tcc': 'cl',
'time': 'time',
'sp': 'ps',
}
def read(dirname, track, warnings=[], step=6./24.):
d_ll = ds.read(os.path.join(dirname, 'LL125.nc'), [
'latitude',
'longitude',
'z'
])
lat_ll = d_ll['latitude']
lon_ll = d_ll['longitude']
orog_ll = d_ll['z'][0,:,:]/9.80665
dd_idx = ds.readdir(dirname,
variables=['time', 'latitude', 'longitude'],
jd=True,
full=True,
warnings=warnings,
)
start_time = track['time'][0]
end_time = track['time'][-1]
d_out = {}
for var in VARS:
dd = []
var2 = TRANS[var]
for d_idx in dd_idx:
if var not in d_idx['.']:
continue
time = d_idx['time']
lat = d_idx['latitude']
lon = d_idx['longitude']
filename = d_idx['filename']
ii = np.nonzero(
(time >= start_time - step*0.5) &
(time < end_time + step*0.5)
)[0]
for i in ii:
t = time[i]
i2 = np.argmin(np.abs(track['time'] - time[i]))
lat0 = track['lat'][i2]
lon0 = track['lon'][i2]
j = np.argmin(np.abs(lat - lat0))
k = np.argmin(np.abs(lon - lon0))
j_ll = np.argmin(np.abs(lat_ll - lat0))
k_ll = np.argmin(np.abs(lon_ll - lon0))
d = ds.read(filename, VARS_AUX + [var],
sel={
'time': [i],
'latitude': j,
'longitude': k,
},
jd=True,
)
for a, b in TRANS.items():
if a in d.keys():
ds.rename(d, a, b)
d['lat'] = np.array([d['lat']])
d['lon'] = np.array([d['lon']])
d['orog'] = np.array([orog_ll[j_ll,k_ll]])
d['.']['lat']['.dims'] = ['time']
d['.']['lon']['.dims'] = ['time']
d['.']['orog'] = {'.dims': ['time']}
if 'pfull' in ds.get_vars(d):
d['pfull'] = d['pfull'].reshape([1, len(d['pfull'])])
d['.']['pfull']['.dims'] = ['time', 'pfull']
d['pfull'] = d['pfull'][:,::-1]
d[var2] = d[var2][:,::-1]
ds.select(d, {'pfull': np.arange(27)})
dd.append(d)
d = ds.op.merge(dd, 'time')
for var_aux in VARS_AUX:
if TRANS[var_aux] in ds.get_vars(d_out) \
and TRANS[var_aux] in ds.get_vars(d) \
and not np.all(d_out[TRANS[var_aux]] == d[TRANS[var_aux]]):
raise ValueError('%s: Field differs between input files' % TRANS[var_aux])
d_out.update(d)
d_out['pfull'] = d_out['pfull']*1e2
if 'time' in d_out:
d_out['time_bnds'] = misc.time_bnds(d_out['time'], step, start_time, end_time)
d_out['time'] = np.mean(d_out['time_bnds'], axis=1)
d_out['.'] = META
return d_out | 0.094469 | 0.213152 |
import json
from iamheadless_projects.lookups.pagination import ALLOWED_FORMATS
from .. import utils
from ..pydantic_models import ItemSchema, NestedItemSchema
from .index_filters import filter_by_lookup_indexes
def retrieve_item(
item_id,
lookup_field='id',
format='queryset',
item_pydantic_model=None,
nested_item_pydantic_model=None,
):
# --
Item = utils.get_item_model()
ItemRelation = utils.get_item_relation_model()
# --
if item_pydantic_model is None:
item_pydantic_model = ItemSchema
if nested_item_pydantic_model is None:
nested_item_pydantic_model = NestedItemSchema
if format not in ALLOWED_FORMATS:
raise ValueError(f'format "{format}" is not supported')
# --
if lookup_field != 'id':
# XXXX fix this
# .get()
queryset = Item.objects.all().prefetch_related('parents')
queryset = filter_by_lookup_indexes(queryset, lookup_field)
instance = queryset.first()
else:
# XXXX fix this
# .get()
try:
instance = Item.objects.get(id=item_id)
except Item.DoesNotExist:
instance = None
# --
if instance is None:
return None
if format in ['dict', 'json']:
parent_relations = ItemRelation.objects.filter(child=instance).distinct()
pydantic_model = item_pydantic_model.from_django(instance)
dict_value = pydantic_model.dict()
if format == 'dict':
dict_value['parents'] = {}
for x in parent_relations:
if x.status not in dict_value['parents'].keys():
dict_value['parents'][x.status] = []
parent = nested_item_pydantic_model.from_django(x.parent)
dict_value['parents'][x.status].append(parent.dict())
return dict_value
json_value = pydantic_model.json()
dict_value = json.loads(json_value)
for x in parent_relations:
if x.status not in dict_value['parents'].keys():
dict_value['parents'][x.status] = []
parent = nested_item_pydantic_model.from_django(x.parent)
dict_value['parents'][x.status].append(json.loads(parent.json()))
return json.dumps(dict_value)
return instance | iamheadless_publisher/lookups/item_retrieve.py | import json
from iamheadless_projects.lookups.pagination import ALLOWED_FORMATS
from .. import utils
from ..pydantic_models import ItemSchema, NestedItemSchema
from .index_filters import filter_by_lookup_indexes
def retrieve_item(
item_id,
lookup_field='id',
format='queryset',
item_pydantic_model=None,
nested_item_pydantic_model=None,
):
# --
Item = utils.get_item_model()
ItemRelation = utils.get_item_relation_model()
# --
if item_pydantic_model is None:
item_pydantic_model = ItemSchema
if nested_item_pydantic_model is None:
nested_item_pydantic_model = NestedItemSchema
if format not in ALLOWED_FORMATS:
raise ValueError(f'format "{format}" is not supported')
# --
if lookup_field != 'id':
# XXXX fix this
# .get()
queryset = Item.objects.all().prefetch_related('parents')
queryset = filter_by_lookup_indexes(queryset, lookup_field)
instance = queryset.first()
else:
# XXXX fix this
# .get()
try:
instance = Item.objects.get(id=item_id)
except Item.DoesNotExist:
instance = None
# --
if instance is None:
return None
if format in ['dict', 'json']:
parent_relations = ItemRelation.objects.filter(child=instance).distinct()
pydantic_model = item_pydantic_model.from_django(instance)
dict_value = pydantic_model.dict()
if format == 'dict':
dict_value['parents'] = {}
for x in parent_relations:
if x.status not in dict_value['parents'].keys():
dict_value['parents'][x.status] = []
parent = nested_item_pydantic_model.from_django(x.parent)
dict_value['parents'][x.status].append(parent.dict())
return dict_value
json_value = pydantic_model.json()
dict_value = json.loads(json_value)
for x in parent_relations:
if x.status not in dict_value['parents'].keys():
dict_value['parents'][x.status] = []
parent = nested_item_pydantic_model.from_django(x.parent)
dict_value['parents'][x.status].append(json.loads(parent.json()))
return json.dumps(dict_value)
return instance | 0.195594 | 0.108048 |
import cvxpy.utilities as u
import cvxpy.lin_ops.lin_utils as lu
from cvxpy.expressions.constants.parameter import Parameter
from cvxpy.atoms.elementwise.elementwise import Elementwise
from cvxpy.atoms.elementwise.abs import abs
from cvxpy.atoms.elementwise.square import square
import numpy as np
class huber(Elementwise):
"""The Huber function
Huber(x, M) = 2M|x|-M^2 for |x| >= |M|
|x|^2 for |x| <= |M|
M defaults to 1.
Parameters
----------
x : Expression
A CVXPY expression.
M : int/float or Parameter
"""
def __init__(self, x, M=1):
self.M = self.cast_to_const(M)
super(huber, self).__init__(x)
@Elementwise.numpy_numeric
def numeric(self, values):
"""Returns the huber function applied elementwise to x.
"""
x = values[0]
output = np.zeros(x.shape)
M = self.M.value
for row in range(x.shape[0]):
for col in range(x.shape[1]):
if np.abs(x[row, col]) <= M:
output[row, col] = np.square(x[row, col])
else:
output[row, col] = 2*M*np.abs(x[row, col]) - M**2
return output
def sign_from_args(self):
"""Always positive.
"""
return u.Sign.POSITIVE
def func_curvature(self):
"""Default curvature.
"""
return u.Curvature.CONVEX
def monotonicity(self):
"""Increasing for positive arg, decreasing for negative.
"""
return [u.monotonicity.SIGNED]
def get_data(self):
"""Returns the parameter M.
"""
return self.M
def validate_arguments(self):
"""Checks that M >= 0 and is constant.
"""
if not (self.M.is_positive() and self.M.is_constant() \
and self.M.is_scalar()):
raise ValueError("M must be a non-negative scalar constant.")
@staticmethod
def graph_implementation(arg_objs, size, data=None):
"""Reduces the atom to an affine expression and list of constraints.
minimize n^2 + 2M|s|
subject to s + n = x
Parameters
----------
arg_objs : list
LinExpr for each argument.
size : tuple
The size of the resulting expression.
data :
Additional data required by the atom.
Returns
-------
tuple
(LinOp for objective, list of constraints)
"""
M = data
x = arg_objs[0]
n = lu.create_var(size)
s = lu.create_var(size)
two = lu.create_const(2, (1, 1))
if isinstance(M, Parameter):
M = lu.create_param(M, (1, 1))
else: # M is constant.
M = lu.create_const(M.value, (1, 1))
# n**2 + 2*M*|s|
n2, constr_sq = square.graph_implementation([n], size)
abs_s, constr_abs = abs.graph_implementation([s], size)
M_abs_s = lu.mul_expr(M, abs_s, size)
obj = lu.sum_expr([n2, lu.mul_expr(two, M_abs_s, size)])
# x == s + n
constraints = constr_sq + constr_abs
constraints.append(lu.create_eq(x, lu.sum_expr([n, s])))
return (obj, constraints) | src/tools/ecos/cvxpy/cvxpy/atoms/elementwise/huber.py | import cvxpy.utilities as u
import cvxpy.lin_ops.lin_utils as lu
from cvxpy.expressions.constants.parameter import Parameter
from cvxpy.atoms.elementwise.elementwise import Elementwise
from cvxpy.atoms.elementwise.abs import abs
from cvxpy.atoms.elementwise.square import square
import numpy as np
class huber(Elementwise):
"""The Huber function
Huber(x, M) = 2M|x|-M^2 for |x| >= |M|
|x|^2 for |x| <= |M|
M defaults to 1.
Parameters
----------
x : Expression
A CVXPY expression.
M : int/float or Parameter
"""
def __init__(self, x, M=1):
self.M = self.cast_to_const(M)
super(huber, self).__init__(x)
@Elementwise.numpy_numeric
def numeric(self, values):
"""Returns the huber function applied elementwise to x.
"""
x = values[0]
output = np.zeros(x.shape)
M = self.M.value
for row in range(x.shape[0]):
for col in range(x.shape[1]):
if np.abs(x[row, col]) <= M:
output[row, col] = np.square(x[row, col])
else:
output[row, col] = 2*M*np.abs(x[row, col]) - M**2
return output
def sign_from_args(self):
"""Always positive.
"""
return u.Sign.POSITIVE
def func_curvature(self):
"""Default curvature.
"""
return u.Curvature.CONVEX
def monotonicity(self):
"""Increasing for positive arg, decreasing for negative.
"""
return [u.monotonicity.SIGNED]
def get_data(self):
"""Returns the parameter M.
"""
return self.M
def validate_arguments(self):
"""Checks that M >= 0 and is constant.
"""
if not (self.M.is_positive() and self.M.is_constant() \
and self.M.is_scalar()):
raise ValueError("M must be a non-negative scalar constant.")
@staticmethod
def graph_implementation(arg_objs, size, data=None):
"""Reduces the atom to an affine expression and list of constraints.
minimize n^2 + 2M|s|
subject to s + n = x
Parameters
----------
arg_objs : list
LinExpr for each argument.
size : tuple
The size of the resulting expression.
data :
Additional data required by the atom.
Returns
-------
tuple
(LinOp for objective, list of constraints)
"""
M = data
x = arg_objs[0]
n = lu.create_var(size)
s = lu.create_var(size)
two = lu.create_const(2, (1, 1))
if isinstance(M, Parameter):
M = lu.create_param(M, (1, 1))
else: # M is constant.
M = lu.create_const(M.value, (1, 1))
# n**2 + 2*M*|s|
n2, constr_sq = square.graph_implementation([n], size)
abs_s, constr_abs = abs.graph_implementation([s], size)
M_abs_s = lu.mul_expr(M, abs_s, size)
obj = lu.sum_expr([n2, lu.mul_expr(two, M_abs_s, size)])
# x == s + n
constraints = constr_sq + constr_abs
constraints.append(lu.create_eq(x, lu.sum_expr([n, s])))
return (obj, constraints) | 0.890711 | 0.541469 |
import json
buildings = ["HEADQUARTER", "BARRACKS", "STABLE", "WORKSHOP", "ACADEMY", "SMITHY", "RALLY_POINT", "STATUE", "MARKET",
"TIMBER_CAMP", "CLAY_PIT", "IRON_MINE", "FARM", "WAREHOUSE", "HIDING_PLACE", "WALL"]
requirements = [
{},
{"HEADQUARTER": 3},
{"HEADQUARTER": 10, "BARRACKS": 5, "SMITHY": 5},
{"HEADQUARTER": 10, "SMITHY": 10},
{"HEADQUARTER": 20, "SMITHY": 20, "MARKET": 10},
{"HEADQUARTER": 5, "BARRACKS": 1},
{},
{},
{"HEADQUARTER": 3, "WAREHOUSE": 2},
{},
{},
{},
{},
{},
{},
{"BARRACKS": 1}
]
levels = [
[
[[90, 80, 70, 5, 5], 0.95],
[[113, 102, 88, 1, 6], 0.91],
[[143, 130, 111, 1, 7], 0.86],
[[180, 166, 140, 1, 8], 0.82],
[[227, 211, 176, 1, 9], 0.78],
[[286, 270, 222, 2, 11], 0.75],
[[360, 344, 280, 2, 13], 0.71],
[[454, 438, 353, 2, 15], 0.68],
[[572, 559, 445, 3, 18], 0.64],
[[720, 712, 560, 3, 21], 0.61],
[[908, 908, 706, 3, 24], 0.58],
[[1144, 1158, 890, 4, 28], 0.56],
[[1441, 1476, 1121, 5, 33], 0.53],
[[1816, 1882, 1412, 5, 38], 0.51],
[[2288, 2400, 1779, 7, 45], 0.48],
[[2883, 3060, 2242, 8, 53], 0.46],
[[3632, 3902, 2825, 9, 62], 0.44],
[[4577, 4975, 3560, 10, 72], 0.42],
[[5767, 6343, 4485, 12, 84], 0.40],
[[7266, 8087, 5651, 15, 99], 0.38],
[[9155, 10, 311, 7120, 17, 116], 0.36],
[[11535, 13146, 8972, 19, 135], 0.34],
[[14534, 16762, 11304, 23, 158], 0.33],
[[18313, 21371, 14244, 27, 185], 0.31],
[[23075, 27248, 17947, 31, 216], 0.30],
[[29074, 34741, 22613, 37, 253], 0.28],
[[36633, 44295, 28493, 43, 296], 0.27],
[[46158, 56476, 35901, 51, 347], 0.26],
[[58159, 72007, 45235, 59, 406], 0.24],
[[73280, 91809, 56996, 69, 475], 0.23]
],
[
[[200, 170, 90, 7, 7], 0.63],
[[252, 218, 113, 1, 8], 0.59],
[[318, 279, 143, 2, 10], 0.56],
[[400, 357, 180, 1, 11], 0.53],
[[504, 456, 227, 2, 13], 0.50],
[[635, 584, 286, 2, 15], 0.47],
[[800, 748, 360, 3, 18], 0.44],
[[1008, 957, 454, 3, 21], 0.42],
[[1271, 1225, 572, 4, 25], 0.39],
[[1601, 1568, 720, 4, 29], 0.37],
[[2017, 2007, 908, 5, 34], 0.35],
[[2542, 2569, 1144, 5, 39], 0.33],
[[3202, 3288, 1441, 7, 46], 0.31],
[[4035, 4209, 1816, 8, 54], 0.29],
[[5084, 5388, 2288, 9, 63], 0.28],
[[6406, 6896, 2883, 11, 74], 0.26],
[[8072, 8827, 3632, 12, 86], 0.25],
[[10170, 11298, 4577, 15, 101], 0.23],
[[12814, 14462, 5767, 17, 118], 0.22],
[[16146, 18511, 7266, 20, 138], 0.21],
[[20344, 23695, 9155, 24, 162], 0.20],
[[25634, 30329, 11535, 27, 189], 0.19],
[[32298, 38821, 14534, 32, 221], 0.17],
[[40696, 49691, 18313, 38, 259], 0.16],
[[51277, 63605, 23075, 44, 303], 0.15]
],
[
[[270, 240, 260, 8, 8], 0.63],
[[340, 307, 328, 1, 9], 0.59],
[[429, 393, 413, 2, 11], 0.56],
[[540, 503, 520, 2, 13], 0.53],
[[681, 644, 655, 2, 15], 0.5],
[[857, 825, 826, 3, 18], 0.47],
[[1080, 1056, 1040, 3, 21], 0.44],
[[1361, 1351, 1311, 3, 24], 0.42],
[[1715, 1729, 1652, 4, 28], 0.39],
[[2161, 2214, 2081, 5, 33], 0.37],
[[2723, 2833, 2622, 5, 38], 0.35],
[[3431, 3627, 3304, 7, 45], 0.33],
[[4323, 4642, 4163, 8, 53], 0.31],
[[5447, 5942, 5246, 9, 62], 0.29],
[[6864, 7606, 6609, 10, 72], 0.28],
[[8648, 9736, 8328, 12, 84], 0.26],
[[10897, 12462, 10493, 15, 99], 0.25],
[[13730, 15951, 13221, 16, 115], 0.23],
[[17300, 20417, 16659, 20, 135], 0.22],
[[21797, 26134, 20990, 23, 158], 0.21]
],
[
[[300, 240, 260, 8, 8], 0.63],
[[378, 307, 328, 1, 9], 0.59],
[[476, 393, 413, 2, 11], 0.56],
[[600, 503, 520, 2, 13], 0.53],
[[756, 644, 655, 2, 15], 0.5],
[[953, 825, 826, 3, 18], 0.47],
[[1200, 1056, 1040, 3, 21], 0.44],
[[1513, 1351, 1311, 3, 24], 0.42],
[[1906, 1729, 1652, 4, 28], 0.39],
[[2401, 2214, 2081, 5, 33], 0.37],
[[3026, 2833, 2622, 5, 38], 0.35],
[[3812, 3627, 3304, 7, 45], 0.33],
[[4804, 4642, 4163, 8, 53], 0.31],
[[6053, 5942, 5246, 9, 62], 0.29],
[[7626, 7606, 6609, 10, 72], 0.28]
],
[
[[15000, 25000, 10000, 80, 80], 0.63],
[[30000, 50000, 20000, 14, 94], 0.59],
[[60000, 100000, 40000, 16, 110], 0.56]
],
[
[[220, 180, 240, 20, 20], 0.91],
[[277, 230, 302, 3, 23], 0.83],
[[349, 293, 381, 4, 27], 0.75],
[[440, 373, 480, 5, 32], 0.68],
[[555, 476, 605, 5, 37], 0.62],
[[699, 606, 762, 7, 44], 0.56],
[[880, 773, 960, 7, 51], 0.51],
[[1109, 986, 1210, 9, 60], 0.47],
[[1398, 1257, 1525, 10, 70], 0.42],
[[1761, 1603, 1921, 12, 82], 0.39],
[[2219, 2043, 2421, 14, 96], 0.35],
[[2796, 2605, 3050, 16, 112], 0.32],
[[3523, 3322, 3843, 20, 132], 0.29],
[[4439, 4236, 4842, 22, 154], 0.26],
[[5593, 5400, 6101, 26, 180], 0.24],
[[7047, 6885, 7687, 31, 211], 0.22],
[[8879, 8779, 9686, 36, 247], 0.2],
[[11187, 11193, 12204, 42, 289], 0.18],
[[14096, 14271, 15377, 49, 338], 0.16],
[[17761, 18196, 19375, 57, 395], 0.15]
],
[
[[10, 40, 30, 0, 0], 1.0]
],
[
[[220, 220, 220, 10, 10], 1.0]
],
[
[[100, 100, 100, 20, 20], 1],
[[126, 128, 126, 3, 23], 2],
[[159, 163, 159, 4, 27], 3],
[[200, 207, 200, 5, 32], 4],
[[252, 264, 252, 5, 37], 5],
[[318, 337, 318, 7, 44], 6],
[[400, 430, 400, 7, 51], 7],
[[504, 548, 504, 9, 60], 8],
[[635, 698, 635, 10, 70], 9],
[[800, 890, 800, 12, 82], 10],
[[1009, 1135, 1009, 14, 96], 11],
[[1271, 1447, 1271, 16, 112], 14],
[[1601, 1846, 1601, 20, 132], 19],
[[2018, 2353, 2018, 22, 154], 26],
[[2542, 3000, 2542, 26, 180], 35],
[[3203, 3825, 3203, 31, 211], 46],
[[4036, 4877, 4036, 36, 247], 59],
[[5085, 6218, 5085, 42, 289], 74],
[[6407, 7928, 6407, 49, 338], 91],
[[8073, 10109, 8073, 57, 395], 110],
[[10172, 12889, 10172, 67, 462], 131],
[[12817, 16433, 12817, 79, 541], 154],
[[16149, 20952, 16149, 92, 633], 179],
[[20348, 26714, 20348, 107, 740], 206],
[[25639, 34060, 25639, 126, 866], 235]
],
[
[[50, 60, 40, 5, 5], 30],
[[63, 77, 50, 1, 6], 35],
[[78, 98, 62, 1, 7], 41],
[[98, 124, 77, 1, 8], 47],
[[122, 159, 96, 1, 9], 55],
[[153, 202, 120, 1, 10], 64],
[[191, 258, 149, 2, 12], 74],
[[238, 329, 185, 2, 14], 86],
[[298, 419, 231, 2, 16], 100],
[[373, 534, 287, 2, 18], 117],
[[466, 681, 358, 3, 21], 136],
[[582, 868, 446, 3, 24], 158],
[[728, 1107, 555, 4, 28], 184],
[[909, 1412, 691, 5, 33], 214],
[[1137, 1800, 860, 5, 38], 249],
[[1421, 2295, 1071, 5, 43], 289],
[[1776, 2926, 1333, 7, 50], 337],
[[2220, 3731, 1659, 8, 58], 391],
[[2776, 4757, 2066, 9, 67], 455],
[[3469, 6065, 2572, 10, 77], 530],
[[4337, 7733, 3202, 12, 89], 616],
[[5421, 9860, 3987, 14, 103], 717],
[[6776, 12571, 4963, 16, 119], 833],
[[8470, 16028, 6180, 19, 138], 969],
[[10588, 20436, 7694, 21, 159], 1127],
[[13235, 26056, 9578, 24, 183], 1311],
[[16544, 33221, 11925, 29, 212], 1525],
[[20680, 42357, 14847, 33, 245], 1774],
[[25849, 54005, 18484, 38, 283], 2063],
[[32312, 68857, 23013, 43, 326], 2400]
],
[
[[65, 50, 40, 10, 10], 30],
[[83, 63, 50, 1, 11], 35],
[[105, 80, 62, 2, 13], 41],
[[133, 101, 76, 2, 15], 47],
[[169, 128, 95, 2, 17], 55],
[[215, 162, 117, 2, 19], 64],
[[273, 205, 145, 3, 22], 74],
[[346, 259, 180, 3, 25], 86],
[[440, 328, 224, 4, 29], 100],
[[559, 415, 277, 4, 33], 117],
[[709, 525, 344, 4, 37], 136],
[[901, 664, 426, 5, 42], 158],
[[1144, 840, 529, 6, 48], 184],
[[1453, 1062, 655, 7, 55], 214],
[[1846, 1343, 813, 8, 63], 249],
[[2344, 1700, 1008, 8, 71], 289],
[[2977, 2150, 1250, 10, 81], 337],
[[3781, 2720, 1550, 12, 93], 391],
[[4802, 3440, 1922, 13, 106], 455],
[[6098, 4352, 2383, 15, 121], 530],
[[7744, 5505, 2955, 16, 137], 616],
[[9835, 6964, 3664, 20, 157], 717],
[[12491, 8810, 4543, 22, 179], 833],
[[15863, 11144, 5633, 25, 204], 969],
[[20147, 14098, 6985, 28, 232], 1127],
[[25586, 17833, 8662, 33, 265], 1311],
[[32495, 22559, 10740, 37, 302], 1525],
[[41268, 28537, 13318, 42, 344], 1774],
[[52410, 36100, 16515, 48, 392], 2063],
[[66561, 45666, 20478, 55, 447], 2400]
],
[
[[75, 65, 70, 10, 10], 30],
[[94, 83, 87, 2, 12], 35],
[[118, 106, 108, 2, 14], 41],
[[147, 135, 133, 2, 16], 47],
[[184, 172, 165, 3, 19], 55],
[[231, 219, 205, 3, 22], 64],
[[289, 279, 254, 4, 26], 74],
[[362, 356, 316, 4, 30], 86],
[[453, 454, 391, 5, 35], 100],
[[567, 579, 485, 6, 41], 117],
[[710, 738, 602, 7, 48], 136],
[[889, 941, 746, 8, 56], 158],
[[1113, 1200, 925, 10, 66], 184],
[[1393, 1529, 1147, 11, 77], 214],
[[1744, 1950, 1422, 13, 90], 249],
[[2183, 2486, 1764, 15, 105], 289],
[[2734, 3170, 2187, 18, 123], 337],
[[3422, 4042, 2712, 21, 144], 391],
[[4285, 5153, 3363, 25, 169], 455],
[[5365, 6571, 4170, 28, 197], 530],
[[6717, 8378, 5170, 34, 231], 616],
[[8409, 10681, 6411, 39, 270], 717],
[[10528, 13619, 7950, 46, 316], 833],
[[13181, 17364, 9858, 54, 370], 969],
[[16503, 22139, 12224, 63, 433], 1127],
[[20662, 28227, 15158, 74, 507], 1311],
[[25869, 35990, 18796, 86, 593], 1525],
[[32388, 45887, 23307, 100, 693], 1774],
[[40549, 58506, 28900, 118, 811], 2063],
[[50768, 74595, 35837, 138, 949], 2400]
],
[
[[45, 40, 30, 0, 0], 240],
[[59, 53, 39, 0, 0], 281],
[[76, 70, 50, 0, 0], 329],
[[99, 92, 64, 0, 0], 386],
[[129, 121, 83, 0, 0], 452],
[[167, 160, 107, 0, 0], 530],
[[217, 212, 138, 0, 0], 622],
[[282, 279, 178, 0, 0], 729],
[[367, 369, 230, 0, 0], 854],
[[477, 487, 297, 0, 0], 1002],
[[620, 642, 383, 0, 0], 1174],
[[806, 848, 494, 0, 0], 1376],
[[1048, 1119, 637, 0, 0], 1613],
[[1363, 1477, 822, 0, 0], 1891],
[[1772, 1950, 1060, 0, 0], 2216],
[[2303, 2574, 1368, 0, 0], 2598],
[[2994, 3398, 1764, 0, 0], 3045],
[[3893, 4486, 2276, 0, 0], 3569],
[[5060, 5921, 2936, 0, 0], 4183],
[[6579, 7816, 3787, 0, 0], 4904],
[[8552, 10317, 4886, 0, 0], 5748],
[[11118, 13618, 6302, 0, 0], 6737],
[[14453, 17976, 8130, 0, 0], 7896],
[[18789, 23728, 10488, 0, 0], 9255],
[[24426, 31321, 13529, 0, 0], 10848],
[[31754, 41344, 17453, 0, 0], 12715],
[[41280, 54574, 22514, 0, 0], 14904],
[[53664, 72037, 29043, 0, 0], 17469],
[[69763, 95089, 37466, 0, 0], 20476],
[[90692, 125517, 48331, 0, 0], 24000]
],
[
[[60, 50, 40, 0, 0], 1000],
[[76, 64, 50, 0, 0], 1229],
[[96, 81, 62, 0, 0], 1512],
[[121, 102, 77, 0, 0], 1859],
[[154, 130, 96, 0, 0], 2285],
[[194, 165, 120, 0, 0], 2810],
[[246, 210, 149, 0, 0], 3454],
[[311, 266, 185, 0, 0], 4247],
[[393, 338, 231, 0, 0], 5222],
[[498, 430, 287, 0, 0], 6420],
[[630, 546, 358, 0, 0], 7893],
[[796, 693, 446, 0, 0], 9705],
[[1007, 880, 555, 0, 0], 11932],
[[1274, 1118, 691, 0, 0], 14670],
[[1612, 1420, 860, 0, 0], 18037],
[[2039, 1803, 1071, 0, 0], 22177],
[[2580, 2290, 1333, 0, 0], 27266],
[[3264, 2908, 1659, 0, 0], 33523],
[[4128, 3693, 2066, 0, 0], 41217],
[[5222, 4691, 2572, 0, 0], 50675],
[[6606, 5957, 3202, 0, 0], 62305],
[[8357, 7566, 3987, 0, 0], 76604],
[[10572, 9608, 4963, 0, 0], 94184],
[[13373, 12203, 6180, 0, 0], 115798],
[[16917, 15497, 7694, 0, 0], 142373],
[[21400, 19682, 9578, 0, 0], 175047],
[[27071, 24996, 11925, 0, 0], 215219],
[[34245, 31745, 14847, 0, 0], 264611],
[[43320, 40316, 18484, 0, 0], 325337],
[[54799, 51201, 23013, 0, 0], 400000]
],
[
[[50, 60, 50, 2, 2], 150],
[[63, 75, 63, 0, 2], 200],
[[78, 94, 78, 1, 3], 267],
[[98, 117, 98, 0, 3], 356],
[[122, 146, 122, 1, 4], 474],
[[153, 183, 153, 0, 4], 632],
[[191, 229, 191, 1, 5], 843],
[[238, 286, 238, 1, 6], 1125],
[[298, 358, 298, 1, 7], 1500],
[[373, 447, 373, 1, 8], 2000]
],
[
[[50, 100, 20, 5, 5], 0.04],
[[63, 128, 25, 1, 6], 0.08],
[[79, 163, 32, 1, 7], 0.12],
[[100, 207, 40, 1, 8], 0.16],
[[126, 264, 50, 1, 9], 0.2],
[[159, 337, 64, 2, 11], 0.24],
[[200, 430, 80, 2, 13], 0.29],
[[252, 548, 101, 2, 15], 0.34],
[[318, 698, 127, 3, 18], 0.39],
[[400, 890, 160, 3, 21], 0.44],
[[504, 1135, 202, 3, 24], 0.49],
[[635, 1447, 254, 4, 28], 0.55],
[[801, 1846, 320, 5, 33], 0.6],
[[1009, 2353, 404, 5, 38], 0.66],
[[1271, 3000, 508, 7, 45], 0.72],
[[1602, 3825, 641, 8, 53], 0.79],
[[2018, 4877, 807, 9, 62], 0.85],
[[2543, 6218, 1017, 10, 72], 0.92],
[[3204, 7928, 1281, 12, 84], 0.99],
[[4037, 10109, 1615, 15, 99], 1.07]
],
]
data = {
"requirements": {},
"levels": []
}
def create_files():
for i, name in enumerate(buildings):
f = open(f"{name}.json", "w")
data["requirements"].clear()
data["requirements"].update(requirements[i])
data["levels"] = levels[i]
f.write(json.dumps(data))
f.close()
def json_parser_for_buildings():
s = """[
[1,"{{Res|50|100|20}}","{{Workers|5|5}}","4%" ],
[2,"{{Res|63|128|25}}","{{Workers|1|6}}","8%" ],
[3,"{{Res|79|163|32}}","{{Workers|1|7}}","12%" ],
[4,"{{Res|100|207|40}}","{{Workers|1|8}}","16%" ],
[5,"{{Res|126|264|50}}","{{Workers|1|9}}","20%" ],
[6,"{{Res|159|337|64}}","{{Workers|2|11}}","24%" ],
[7,"{{Res|200|430|80}}","{{Workers|2|13}}","29%" ],
[8,"{{Res|252|548|101}}","{{Workers|2|15}}","34%" ],
[9,"{{Res|318|698|127}}","{{Workers|3|18}}","39%" ],
[10,"{{Res|400|890|160}}","{{Workers|3|21}}","44%" ],
[11,"{{Res|504|1135|202}}","{{Workers|3|24}}","49%" ],
[12,"{{Res|635|1447|254}}","{{Workers|4|28}}","55%" ],
[13,"{{Res|801|1846|320}}","{{Workers|5|33}}","60%" ],
[14,"{{Res|1009|2353|404}}","{{Workers|5|38}}","66%" ],
[15,"{{Res|1271|3000|508}}","{{Workers|7|45}}","72%" ],
[16,"{{Res|1602|3825|641}}","{{Workers|8|53}}","79%" ],
[17,"{{Res|2018|4877|807}}","{{Workers|9|62}}","85%" ],
[18,"{{Res|2543|6218|1017}}","{{Workers|10|72}}","92%" ],
[19,"{{Res|3204|7928|1281}}","{{Workers|12|84}}","99%" ],
[20,"{{Res|4037|10109|1615}}","{{Workers|15|99}}","107%" ]
]"""
json_object = json.loads(s)
result = ""
for index, element in enumerate(json_object):
wood, clay, iron = str(element[1][6:-2]).split("|")
pop = str(element[2][10:-2]).split("|")[0]
factor = float(element[3][0:-1]) / 100
print(f"[[{wood},{clay},{iron},{pop}], {factor}],")
def json_parser_for_points():
s = """[
["1,10,16,20,24,10,10,42,512,19,0,24,10,6,6,6,5,6,5,8","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","" ],
["2,2,3,4,5,2,,8,,4,,,2,1,1,1,1,1,1,2","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","" ],
["3,2,4,5,6,2,,10,,4,,,2,2,2,2,1,2,1,2","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","" ],
["4,3,5,6,6,,,13,,6,,,3,1,1,1,2,1,2,2","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","" ],
["5,4,5,6,9,,,14,,6,,,4,2,2,2,1,2,1,3","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","" ],
["6,4,7,9,10,,,18,,8,,,4,3,3,3,2,3,2,3","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","" ],
["7,5,8,10,12,,,20,,10,,,5,3,3,3,3,3,3,4","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","" ],
["8,6,9,12,14,,,25,,11,,,6,3,3,3,3,3,3,5","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","" ],
["9,7,12,14,17,,,31,,14,,,7,5,5,5,3,5,3,5","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","" ],
["10,9,14,17,21,,,36,,16,,,9,5,5,5,5,5,5,7","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","" ],
["11,10,16,21,25,,,43,,20,,,10,6,6,6,5,6,,9","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","" ],
["12,12,20,25,29,,,52,,23,,,12,8,8,8,6,8,,9","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","" ],
["13,15,24,29,36,,,62,,28,,,15,8,8,8,8,8,,12","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","" ],
["14,18,28,36,43,,,75,,34,,,18,11,11,11,8,11,,15","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","" ],
["15,21,34,43,51,,,90,,41,,,21,13,13,13,11,13,,17","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","" ],
["16,26,42,51,,,,108,,49,,,26,15,15,15,13,15,,20","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","" ],
["17,31,49,62,,,,130,,58,,,31,19,19,19,15,19,,25","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","" ],
["18,37,59,74,,,,155,,71,,,37,22,22,22,19,22,,29","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","" ],
["19,44,71,88,,,,186,,84,,,44,27,27,27,22,27,,36","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","" ],
["20,53,85,107,,,,224,,101,,,53,32,32,32,27,32,,43","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","" ],
["21,64,102,,,,,,,,,,64,38,38,38,32,38,,","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","" ],
["22,77,123,,,,,,,,,,77,46,46,46,38,46,,","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","" ],
["23,92,147,,,,,,,,,,92,55,55,55,46,55,,","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","" ],
["24,110,177,,,,,,,,,,110,66,66,66,55,66,,","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","" ],
["25,133,212,,,,,,,,,,133,80,80,80,66,80,,","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","" ],
["26,159,,,,,,,,,,,,95,95,95,80,95,,","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","" ],
["27,191,,,,,,,,,,,,115,115,115,95,115,,","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","" ],
["28,229,,,,,,,,,,,,137,137,137,115,137,,","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","" ],
["29,274,,,,,,,,,,,,165,165,165,137,165,,","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","" ],
["30,330,,,,,,,,,,,,198,198,198,165,198,,","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","" ]
]"""
json_object = json.loads(s)
matrix = [[0 for x in range(30)] for y in range(16)]
y = 0
x = 0
for index_y, element_y in enumerate(json_object):
l = str(element_y[0][1:]).split("'")[0].split(",")
for index_x, element_x in enumerate(l):
if index_x == 0 or 5 <= index_x <= 7:
continue
matrix[y][x] = 0 if element_x == '' else int(element_x)
y += 1
x += 1
y = 0
f = open(f"POINTS.json", "w")
f.write(json.dumps(matrix))
f.close()
# create_files()
# json_parser_for_buildings()
# json_parser_for_points() | resources/buildings/generate_files.py | import json
buildings = ["HEADQUARTER", "BARRACKS", "STABLE", "WORKSHOP", "ACADEMY", "SMITHY", "RALLY_POINT", "STATUE", "MARKET",
"TIMBER_CAMP", "CLAY_PIT", "IRON_MINE", "FARM", "WAREHOUSE", "HIDING_PLACE", "WALL"]
requirements = [
{},
{"HEADQUARTER": 3},
{"HEADQUARTER": 10, "BARRACKS": 5, "SMITHY": 5},
{"HEADQUARTER": 10, "SMITHY": 10},
{"HEADQUARTER": 20, "SMITHY": 20, "MARKET": 10},
{"HEADQUARTER": 5, "BARRACKS": 1},
{},
{},
{"HEADQUARTER": 3, "WAREHOUSE": 2},
{},
{},
{},
{},
{},
{},
{"BARRACKS": 1}
]
levels = [
[
[[90, 80, 70, 5, 5], 0.95],
[[113, 102, 88, 1, 6], 0.91],
[[143, 130, 111, 1, 7], 0.86],
[[180, 166, 140, 1, 8], 0.82],
[[227, 211, 176, 1, 9], 0.78],
[[286, 270, 222, 2, 11], 0.75],
[[360, 344, 280, 2, 13], 0.71],
[[454, 438, 353, 2, 15], 0.68],
[[572, 559, 445, 3, 18], 0.64],
[[720, 712, 560, 3, 21], 0.61],
[[908, 908, 706, 3, 24], 0.58],
[[1144, 1158, 890, 4, 28], 0.56],
[[1441, 1476, 1121, 5, 33], 0.53],
[[1816, 1882, 1412, 5, 38], 0.51],
[[2288, 2400, 1779, 7, 45], 0.48],
[[2883, 3060, 2242, 8, 53], 0.46],
[[3632, 3902, 2825, 9, 62], 0.44],
[[4577, 4975, 3560, 10, 72], 0.42],
[[5767, 6343, 4485, 12, 84], 0.40],
[[7266, 8087, 5651, 15, 99], 0.38],
[[9155, 10, 311, 7120, 17, 116], 0.36],
[[11535, 13146, 8972, 19, 135], 0.34],
[[14534, 16762, 11304, 23, 158], 0.33],
[[18313, 21371, 14244, 27, 185], 0.31],
[[23075, 27248, 17947, 31, 216], 0.30],
[[29074, 34741, 22613, 37, 253], 0.28],
[[36633, 44295, 28493, 43, 296], 0.27],
[[46158, 56476, 35901, 51, 347], 0.26],
[[58159, 72007, 45235, 59, 406], 0.24],
[[73280, 91809, 56996, 69, 475], 0.23]
],
[
[[200, 170, 90, 7, 7], 0.63],
[[252, 218, 113, 1, 8], 0.59],
[[318, 279, 143, 2, 10], 0.56],
[[400, 357, 180, 1, 11], 0.53],
[[504, 456, 227, 2, 13], 0.50],
[[635, 584, 286, 2, 15], 0.47],
[[800, 748, 360, 3, 18], 0.44],
[[1008, 957, 454, 3, 21], 0.42],
[[1271, 1225, 572, 4, 25], 0.39],
[[1601, 1568, 720, 4, 29], 0.37],
[[2017, 2007, 908, 5, 34], 0.35],
[[2542, 2569, 1144, 5, 39], 0.33],
[[3202, 3288, 1441, 7, 46], 0.31],
[[4035, 4209, 1816, 8, 54], 0.29],
[[5084, 5388, 2288, 9, 63], 0.28],
[[6406, 6896, 2883, 11, 74], 0.26],
[[8072, 8827, 3632, 12, 86], 0.25],
[[10170, 11298, 4577, 15, 101], 0.23],
[[12814, 14462, 5767, 17, 118], 0.22],
[[16146, 18511, 7266, 20, 138], 0.21],
[[20344, 23695, 9155, 24, 162], 0.20],
[[25634, 30329, 11535, 27, 189], 0.19],
[[32298, 38821, 14534, 32, 221], 0.17],
[[40696, 49691, 18313, 38, 259], 0.16],
[[51277, 63605, 23075, 44, 303], 0.15]
],
[
[[270, 240, 260, 8, 8], 0.63],
[[340, 307, 328, 1, 9], 0.59],
[[429, 393, 413, 2, 11], 0.56],
[[540, 503, 520, 2, 13], 0.53],
[[681, 644, 655, 2, 15], 0.5],
[[857, 825, 826, 3, 18], 0.47],
[[1080, 1056, 1040, 3, 21], 0.44],
[[1361, 1351, 1311, 3, 24], 0.42],
[[1715, 1729, 1652, 4, 28], 0.39],
[[2161, 2214, 2081, 5, 33], 0.37],
[[2723, 2833, 2622, 5, 38], 0.35],
[[3431, 3627, 3304, 7, 45], 0.33],
[[4323, 4642, 4163, 8, 53], 0.31],
[[5447, 5942, 5246, 9, 62], 0.29],
[[6864, 7606, 6609, 10, 72], 0.28],
[[8648, 9736, 8328, 12, 84], 0.26],
[[10897, 12462, 10493, 15, 99], 0.25],
[[13730, 15951, 13221, 16, 115], 0.23],
[[17300, 20417, 16659, 20, 135], 0.22],
[[21797, 26134, 20990, 23, 158], 0.21]
],
[
[[300, 240, 260, 8, 8], 0.63],
[[378, 307, 328, 1, 9], 0.59],
[[476, 393, 413, 2, 11], 0.56],
[[600, 503, 520, 2, 13], 0.53],
[[756, 644, 655, 2, 15], 0.5],
[[953, 825, 826, 3, 18], 0.47],
[[1200, 1056, 1040, 3, 21], 0.44],
[[1513, 1351, 1311, 3, 24], 0.42],
[[1906, 1729, 1652, 4, 28], 0.39],
[[2401, 2214, 2081, 5, 33], 0.37],
[[3026, 2833, 2622, 5, 38], 0.35],
[[3812, 3627, 3304, 7, 45], 0.33],
[[4804, 4642, 4163, 8, 53], 0.31],
[[6053, 5942, 5246, 9, 62], 0.29],
[[7626, 7606, 6609, 10, 72], 0.28]
],
[
[[15000, 25000, 10000, 80, 80], 0.63],
[[30000, 50000, 20000, 14, 94], 0.59],
[[60000, 100000, 40000, 16, 110], 0.56]
],
[
[[220, 180, 240, 20, 20], 0.91],
[[277, 230, 302, 3, 23], 0.83],
[[349, 293, 381, 4, 27], 0.75],
[[440, 373, 480, 5, 32], 0.68],
[[555, 476, 605, 5, 37], 0.62],
[[699, 606, 762, 7, 44], 0.56],
[[880, 773, 960, 7, 51], 0.51],
[[1109, 986, 1210, 9, 60], 0.47],
[[1398, 1257, 1525, 10, 70], 0.42],
[[1761, 1603, 1921, 12, 82], 0.39],
[[2219, 2043, 2421, 14, 96], 0.35],
[[2796, 2605, 3050, 16, 112], 0.32],
[[3523, 3322, 3843, 20, 132], 0.29],
[[4439, 4236, 4842, 22, 154], 0.26],
[[5593, 5400, 6101, 26, 180], 0.24],
[[7047, 6885, 7687, 31, 211], 0.22],
[[8879, 8779, 9686, 36, 247], 0.2],
[[11187, 11193, 12204, 42, 289], 0.18],
[[14096, 14271, 15377, 49, 338], 0.16],
[[17761, 18196, 19375, 57, 395], 0.15]
],
[
[[10, 40, 30, 0, 0], 1.0]
],
[
[[220, 220, 220, 10, 10], 1.0]
],
[
[[100, 100, 100, 20, 20], 1],
[[126, 128, 126, 3, 23], 2],
[[159, 163, 159, 4, 27], 3],
[[200, 207, 200, 5, 32], 4],
[[252, 264, 252, 5, 37], 5],
[[318, 337, 318, 7, 44], 6],
[[400, 430, 400, 7, 51], 7],
[[504, 548, 504, 9, 60], 8],
[[635, 698, 635, 10, 70], 9],
[[800, 890, 800, 12, 82], 10],
[[1009, 1135, 1009, 14, 96], 11],
[[1271, 1447, 1271, 16, 112], 14],
[[1601, 1846, 1601, 20, 132], 19],
[[2018, 2353, 2018, 22, 154], 26],
[[2542, 3000, 2542, 26, 180], 35],
[[3203, 3825, 3203, 31, 211], 46],
[[4036, 4877, 4036, 36, 247], 59],
[[5085, 6218, 5085, 42, 289], 74],
[[6407, 7928, 6407, 49, 338], 91],
[[8073, 10109, 8073, 57, 395], 110],
[[10172, 12889, 10172, 67, 462], 131],
[[12817, 16433, 12817, 79, 541], 154],
[[16149, 20952, 16149, 92, 633], 179],
[[20348, 26714, 20348, 107, 740], 206],
[[25639, 34060, 25639, 126, 866], 235]
],
[
[[50, 60, 40, 5, 5], 30],
[[63, 77, 50, 1, 6], 35],
[[78, 98, 62, 1, 7], 41],
[[98, 124, 77, 1, 8], 47],
[[122, 159, 96, 1, 9], 55],
[[153, 202, 120, 1, 10], 64],
[[191, 258, 149, 2, 12], 74],
[[238, 329, 185, 2, 14], 86],
[[298, 419, 231, 2, 16], 100],
[[373, 534, 287, 2, 18], 117],
[[466, 681, 358, 3, 21], 136],
[[582, 868, 446, 3, 24], 158],
[[728, 1107, 555, 4, 28], 184],
[[909, 1412, 691, 5, 33], 214],
[[1137, 1800, 860, 5, 38], 249],
[[1421, 2295, 1071, 5, 43], 289],
[[1776, 2926, 1333, 7, 50], 337],
[[2220, 3731, 1659, 8, 58], 391],
[[2776, 4757, 2066, 9, 67], 455],
[[3469, 6065, 2572, 10, 77], 530],
[[4337, 7733, 3202, 12, 89], 616],
[[5421, 9860, 3987, 14, 103], 717],
[[6776, 12571, 4963, 16, 119], 833],
[[8470, 16028, 6180, 19, 138], 969],
[[10588, 20436, 7694, 21, 159], 1127],
[[13235, 26056, 9578, 24, 183], 1311],
[[16544, 33221, 11925, 29, 212], 1525],
[[20680, 42357, 14847, 33, 245], 1774],
[[25849, 54005, 18484, 38, 283], 2063],
[[32312, 68857, 23013, 43, 326], 2400]
],
[
[[65, 50, 40, 10, 10], 30],
[[83, 63, 50, 1, 11], 35],
[[105, 80, 62, 2, 13], 41],
[[133, 101, 76, 2, 15], 47],
[[169, 128, 95, 2, 17], 55],
[[215, 162, 117, 2, 19], 64],
[[273, 205, 145, 3, 22], 74],
[[346, 259, 180, 3, 25], 86],
[[440, 328, 224, 4, 29], 100],
[[559, 415, 277, 4, 33], 117],
[[709, 525, 344, 4, 37], 136],
[[901, 664, 426, 5, 42], 158],
[[1144, 840, 529, 6, 48], 184],
[[1453, 1062, 655, 7, 55], 214],
[[1846, 1343, 813, 8, 63], 249],
[[2344, 1700, 1008, 8, 71], 289],
[[2977, 2150, 1250, 10, 81], 337],
[[3781, 2720, 1550, 12, 93], 391],
[[4802, 3440, 1922, 13, 106], 455],
[[6098, 4352, 2383, 15, 121], 530],
[[7744, 5505, 2955, 16, 137], 616],
[[9835, 6964, 3664, 20, 157], 717],
[[12491, 8810, 4543, 22, 179], 833],
[[15863, 11144, 5633, 25, 204], 969],
[[20147, 14098, 6985, 28, 232], 1127],
[[25586, 17833, 8662, 33, 265], 1311],
[[32495, 22559, 10740, 37, 302], 1525],
[[41268, 28537, 13318, 42, 344], 1774],
[[52410, 36100, 16515, 48, 392], 2063],
[[66561, 45666, 20478, 55, 447], 2400]
],
[
[[75, 65, 70, 10, 10], 30],
[[94, 83, 87, 2, 12], 35],
[[118, 106, 108, 2, 14], 41],
[[147, 135, 133, 2, 16], 47],
[[184, 172, 165, 3, 19], 55],
[[231, 219, 205, 3, 22], 64],
[[289, 279, 254, 4, 26], 74],
[[362, 356, 316, 4, 30], 86],
[[453, 454, 391, 5, 35], 100],
[[567, 579, 485, 6, 41], 117],
[[710, 738, 602, 7, 48], 136],
[[889, 941, 746, 8, 56], 158],
[[1113, 1200, 925, 10, 66], 184],
[[1393, 1529, 1147, 11, 77], 214],
[[1744, 1950, 1422, 13, 90], 249],
[[2183, 2486, 1764, 15, 105], 289],
[[2734, 3170, 2187, 18, 123], 337],
[[3422, 4042, 2712, 21, 144], 391],
[[4285, 5153, 3363, 25, 169], 455],
[[5365, 6571, 4170, 28, 197], 530],
[[6717, 8378, 5170, 34, 231], 616],
[[8409, 10681, 6411, 39, 270], 717],
[[10528, 13619, 7950, 46, 316], 833],
[[13181, 17364, 9858, 54, 370], 969],
[[16503, 22139, 12224, 63, 433], 1127],
[[20662, 28227, 15158, 74, 507], 1311],
[[25869, 35990, 18796, 86, 593], 1525],
[[32388, 45887, 23307, 100, 693], 1774],
[[40549, 58506, 28900, 118, 811], 2063],
[[50768, 74595, 35837, 138, 949], 2400]
],
[
[[45, 40, 30, 0, 0], 240],
[[59, 53, 39, 0, 0], 281],
[[76, 70, 50, 0, 0], 329],
[[99, 92, 64, 0, 0], 386],
[[129, 121, 83, 0, 0], 452],
[[167, 160, 107, 0, 0], 530],
[[217, 212, 138, 0, 0], 622],
[[282, 279, 178, 0, 0], 729],
[[367, 369, 230, 0, 0], 854],
[[477, 487, 297, 0, 0], 1002],
[[620, 642, 383, 0, 0], 1174],
[[806, 848, 494, 0, 0], 1376],
[[1048, 1119, 637, 0, 0], 1613],
[[1363, 1477, 822, 0, 0], 1891],
[[1772, 1950, 1060, 0, 0], 2216],
[[2303, 2574, 1368, 0, 0], 2598],
[[2994, 3398, 1764, 0, 0], 3045],
[[3893, 4486, 2276, 0, 0], 3569],
[[5060, 5921, 2936, 0, 0], 4183],
[[6579, 7816, 3787, 0, 0], 4904],
[[8552, 10317, 4886, 0, 0], 5748],
[[11118, 13618, 6302, 0, 0], 6737],
[[14453, 17976, 8130, 0, 0], 7896],
[[18789, 23728, 10488, 0, 0], 9255],
[[24426, 31321, 13529, 0, 0], 10848],
[[31754, 41344, 17453, 0, 0], 12715],
[[41280, 54574, 22514, 0, 0], 14904],
[[53664, 72037, 29043, 0, 0], 17469],
[[69763, 95089, 37466, 0, 0], 20476],
[[90692, 125517, 48331, 0, 0], 24000]
],
[
[[60, 50, 40, 0, 0], 1000],
[[76, 64, 50, 0, 0], 1229],
[[96, 81, 62, 0, 0], 1512],
[[121, 102, 77, 0, 0], 1859],
[[154, 130, 96, 0, 0], 2285],
[[194, 165, 120, 0, 0], 2810],
[[246, 210, 149, 0, 0], 3454],
[[311, 266, 185, 0, 0], 4247],
[[393, 338, 231, 0, 0], 5222],
[[498, 430, 287, 0, 0], 6420],
[[630, 546, 358, 0, 0], 7893],
[[796, 693, 446, 0, 0], 9705],
[[1007, 880, 555, 0, 0], 11932],
[[1274, 1118, 691, 0, 0], 14670],
[[1612, 1420, 860, 0, 0], 18037],
[[2039, 1803, 1071, 0, 0], 22177],
[[2580, 2290, 1333, 0, 0], 27266],
[[3264, 2908, 1659, 0, 0], 33523],
[[4128, 3693, 2066, 0, 0], 41217],
[[5222, 4691, 2572, 0, 0], 50675],
[[6606, 5957, 3202, 0, 0], 62305],
[[8357, 7566, 3987, 0, 0], 76604],
[[10572, 9608, 4963, 0, 0], 94184],
[[13373, 12203, 6180, 0, 0], 115798],
[[16917, 15497, 7694, 0, 0], 142373],
[[21400, 19682, 9578, 0, 0], 175047],
[[27071, 24996, 11925, 0, 0], 215219],
[[34245, 31745, 14847, 0, 0], 264611],
[[43320, 40316, 18484, 0, 0], 325337],
[[54799, 51201, 23013, 0, 0], 400000]
],
[
[[50, 60, 50, 2, 2], 150],
[[63, 75, 63, 0, 2], 200],
[[78, 94, 78, 1, 3], 267],
[[98, 117, 98, 0, 3], 356],
[[122, 146, 122, 1, 4], 474],
[[153, 183, 153, 0, 4], 632],
[[191, 229, 191, 1, 5], 843],
[[238, 286, 238, 1, 6], 1125],
[[298, 358, 298, 1, 7], 1500],
[[373, 447, 373, 1, 8], 2000]
],
[
[[50, 100, 20, 5, 5], 0.04],
[[63, 128, 25, 1, 6], 0.08],
[[79, 163, 32, 1, 7], 0.12],
[[100, 207, 40, 1, 8], 0.16],
[[126, 264, 50, 1, 9], 0.2],
[[159, 337, 64, 2, 11], 0.24],
[[200, 430, 80, 2, 13], 0.29],
[[252, 548, 101, 2, 15], 0.34],
[[318, 698, 127, 3, 18], 0.39],
[[400, 890, 160, 3, 21], 0.44],
[[504, 1135, 202, 3, 24], 0.49],
[[635, 1447, 254, 4, 28], 0.55],
[[801, 1846, 320, 5, 33], 0.6],
[[1009, 2353, 404, 5, 38], 0.66],
[[1271, 3000, 508, 7, 45], 0.72],
[[1602, 3825, 641, 8, 53], 0.79],
[[2018, 4877, 807, 9, 62], 0.85],
[[2543, 6218, 1017, 10, 72], 0.92],
[[3204, 7928, 1281, 12, 84], 0.99],
[[4037, 10109, 1615, 15, 99], 1.07]
],
]
data = {
"requirements": {},
"levels": []
}
def create_files():
for i, name in enumerate(buildings):
f = open(f"{name}.json", "w")
data["requirements"].clear()
data["requirements"].update(requirements[i])
data["levels"] = levels[i]
f.write(json.dumps(data))
f.close()
def json_parser_for_buildings():
s = """[
[1,"{{Res|50|100|20}}","{{Workers|5|5}}","4%" ],
[2,"{{Res|63|128|25}}","{{Workers|1|6}}","8%" ],
[3,"{{Res|79|163|32}}","{{Workers|1|7}}","12%" ],
[4,"{{Res|100|207|40}}","{{Workers|1|8}}","16%" ],
[5,"{{Res|126|264|50}}","{{Workers|1|9}}","20%" ],
[6,"{{Res|159|337|64}}","{{Workers|2|11}}","24%" ],
[7,"{{Res|200|430|80}}","{{Workers|2|13}}","29%" ],
[8,"{{Res|252|548|101}}","{{Workers|2|15}}","34%" ],
[9,"{{Res|318|698|127}}","{{Workers|3|18}}","39%" ],
[10,"{{Res|400|890|160}}","{{Workers|3|21}}","44%" ],
[11,"{{Res|504|1135|202}}","{{Workers|3|24}}","49%" ],
[12,"{{Res|635|1447|254}}","{{Workers|4|28}}","55%" ],
[13,"{{Res|801|1846|320}}","{{Workers|5|33}}","60%" ],
[14,"{{Res|1009|2353|404}}","{{Workers|5|38}}","66%" ],
[15,"{{Res|1271|3000|508}}","{{Workers|7|45}}","72%" ],
[16,"{{Res|1602|3825|641}}","{{Workers|8|53}}","79%" ],
[17,"{{Res|2018|4877|807}}","{{Workers|9|62}}","85%" ],
[18,"{{Res|2543|6218|1017}}","{{Workers|10|72}}","92%" ],
[19,"{{Res|3204|7928|1281}}","{{Workers|12|84}}","99%" ],
[20,"{{Res|4037|10109|1615}}","{{Workers|15|99}}","107%" ]
]"""
json_object = json.loads(s)
result = ""
for index, element in enumerate(json_object):
wood, clay, iron = str(element[1][6:-2]).split("|")
pop = str(element[2][10:-2]).split("|")[0]
factor = float(element[3][0:-1]) / 100
print(f"[[{wood},{clay},{iron},{pop}], {factor}],")
def json_parser_for_points():
s = """[
["1,10,16,20,24,10,10,42,512,19,0,24,10,6,6,6,5,6,5,8","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","" ],
["2,2,3,4,5,2,,8,,4,,,2,1,1,1,1,1,1,2","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","" ],
["3,2,4,5,6,2,,10,,4,,,2,2,2,2,1,2,1,2","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","" ],
["4,3,5,6,6,,,13,,6,,,3,1,1,1,2,1,2,2","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","" ],
["5,4,5,6,9,,,14,,6,,,4,2,2,2,1,2,1,3","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","" ],
["6,4,7,9,10,,,18,,8,,,4,3,3,3,2,3,2,3","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","" ],
["7,5,8,10,12,,,20,,10,,,5,3,3,3,3,3,3,4","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","" ],
["8,6,9,12,14,,,25,,11,,,6,3,3,3,3,3,3,5","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","" ],
["9,7,12,14,17,,,31,,14,,,7,5,5,5,3,5,3,5","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","" ],
["10,9,14,17,21,,,36,,16,,,9,5,5,5,5,5,5,7","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","" ],
["11,10,16,21,25,,,43,,20,,,10,6,6,6,5,6,,9","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","" ],
["12,12,20,25,29,,,52,,23,,,12,8,8,8,6,8,,9","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","" ],
["13,15,24,29,36,,,62,,28,,,15,8,8,8,8,8,,12","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","" ],
["14,18,28,36,43,,,75,,34,,,18,11,11,11,8,11,,15","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","" ],
["15,21,34,43,51,,,90,,41,,,21,13,13,13,11,13,,17","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","" ],
["16,26,42,51,,,,108,,49,,,26,15,15,15,13,15,,20","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","" ],
["17,31,49,62,,,,130,,58,,,31,19,19,19,15,19,,25","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","" ],
["18,37,59,74,,,,155,,71,,,37,22,22,22,19,22,,29","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","" ],
["19,44,71,88,,,,186,,84,,,44,27,27,27,22,27,,36","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","" ],
["20,53,85,107,,,,224,,101,,,53,32,32,32,27,32,,43","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","" ],
["21,64,102,,,,,,,,,,64,38,38,38,32,38,,","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","" ],
["22,77,123,,,,,,,,,,77,46,46,46,38,46,,","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","" ],
["23,92,147,,,,,,,,,,92,55,55,55,46,55,,","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","" ],
["24,110,177,,,,,,,,,,110,66,66,66,55,66,,","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","" ],
["25,133,212,,,,,,,,,,133,80,80,80,66,80,,","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","" ],
["26,159,,,,,,,,,,,,95,95,95,80,95,,","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","" ],
["27,191,,,,,,,,,,,,115,115,115,95,115,,","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","" ],
["28,229,,,,,,,,,,,,137,137,137,115,137,,","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","" ],
["29,274,,,,,,,,,,,,165,165,165,137,165,,","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","" ],
["30,330,,,,,,,,,,,,198,198,198,165,198,,","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","" ]
]"""
json_object = json.loads(s)
matrix = [[0 for x in range(30)] for y in range(16)]
y = 0
x = 0
for index_y, element_y in enumerate(json_object):
l = str(element_y[0][1:]).split("'")[0].split(",")
for index_x, element_x in enumerate(l):
if index_x == 0 or 5 <= index_x <= 7:
continue
matrix[y][x] = 0 if element_x == '' else int(element_x)
y += 1
x += 1
y = 0
f = open(f"POINTS.json", "w")
f.write(json.dumps(matrix))
f.close()
# create_files()
# json_parser_for_buildings()
# json_parser_for_points() | 0.141994 | 0.346569 |
import os
import shutil
from wmt.config import site
from wmt.models.submissions import prepend_to_path
from wmt.utils.hook import find_simulation_input_file
from topoflow_utils.hook import choices_map
file_list = [] # ['pixel_file']
def uppercase_choice(choice):
"""Formats a string for consumption by TopoFlow.
Parameters
----------
choice : str
A parameter choice from WMT.
"""
import string
return string.join(choice.split(), '_').upper()
def execute(env):
"""Perform pre-stage tasks for running a component.
Parameters
----------
env : dict
A dict of component parameter values from WMT.
"""
env['stop_code'] = 1 # my choice
env['stop_time'] = env['_run_duration'] # years
env['n_steps'] = 1 # WMT needs something here
env['save_grid_dt'] = 1.0 # years
env['save_pixels_dt'] = 1.0 # years
env['dt'] = 1.0 # years
# TopoFlow needs site_prefix and case_prefix.
# env['site_prefix'] = 'default'
# env['case_prefix'] = 'WMT'
# If no pixel_file is given, let TopoFlow make one.
if env['pixel_file'] == 'off':
# file_list.remove('pixel_file')
env['pixel_file'] = env['case_prefix'] + '_outlets.txt'
env['BC_method'] = uppercase_choice(env['BC_method'])
env['make_z0_method'] = uppercase_choice(env['make_z0_method'])
env['noise_method'] = uppercase_choice(env['noise_method'])
env['A_units'] = 'm^2'
env['LINK_FLATS'] = choices_map[env['LINK_FLATS']]
env['FILL_PITS_IN_Z0'] = choices_map[env['FILL_PITS_IN_Z0']]
env['LR_PERIODIC'] = choices_map[env['LR_PERIODIC']]
env['TB_PERIODIC'] = choices_map[env['TB_PERIODIC']]
# Default files common to all TopoFlow components are stored with the
# topoflow component metadata.
# prepend_to_path('WMT_INPUT_FILE_PATH',
# os.path.join(site['db'], 'components', 'topoflow', 'files'))
for fname in file_list:
src = find_simulation_input_file(env[fname])
shutil.copy(src, os.curdir)
src = find_simulation_input_file(env['site_prefix'] + '.rti')
shutil.copy(src, os.path.join(os.curdir, env['site_prefix'] + '.rti')) | metadata/ErodeD8Global/hooks/pre-stage.py | import os
import shutil
from wmt.config import site
from wmt.models.submissions import prepend_to_path
from wmt.utils.hook import find_simulation_input_file
from topoflow_utils.hook import choices_map
file_list = [] # ['pixel_file']
def uppercase_choice(choice):
"""Formats a string for consumption by TopoFlow.
Parameters
----------
choice : str
A parameter choice from WMT.
"""
import string
return string.join(choice.split(), '_').upper()
def execute(env):
"""Perform pre-stage tasks for running a component.
Parameters
----------
env : dict
A dict of component parameter values from WMT.
"""
env['stop_code'] = 1 # my choice
env['stop_time'] = env['_run_duration'] # years
env['n_steps'] = 1 # WMT needs something here
env['save_grid_dt'] = 1.0 # years
env['save_pixels_dt'] = 1.0 # years
env['dt'] = 1.0 # years
# TopoFlow needs site_prefix and case_prefix.
# env['site_prefix'] = 'default'
# env['case_prefix'] = 'WMT'
# If no pixel_file is given, let TopoFlow make one.
if env['pixel_file'] == 'off':
# file_list.remove('pixel_file')
env['pixel_file'] = env['case_prefix'] + '_outlets.txt'
env['BC_method'] = uppercase_choice(env['BC_method'])
env['make_z0_method'] = uppercase_choice(env['make_z0_method'])
env['noise_method'] = uppercase_choice(env['noise_method'])
env['A_units'] = 'm^2'
env['LINK_FLATS'] = choices_map[env['LINK_FLATS']]
env['FILL_PITS_IN_Z0'] = choices_map[env['FILL_PITS_IN_Z0']]
env['LR_PERIODIC'] = choices_map[env['LR_PERIODIC']]
env['TB_PERIODIC'] = choices_map[env['TB_PERIODIC']]
# Default files common to all TopoFlow components are stored with the
# topoflow component metadata.
# prepend_to_path('WMT_INPUT_FILE_PATH',
# os.path.join(site['db'], 'components', 'topoflow', 'files'))
for fname in file_list:
src = find_simulation_input_file(env[fname])
shutil.copy(src, os.curdir)
src = find_simulation_input_file(env['site_prefix'] + '.rti')
shutil.copy(src, os.path.join(os.curdir, env['site_prefix'] + '.rti')) | 0.409929 | 0.276526 |
test001=\
{
}
test002=\
{
"name":"counter"
}
test003=\
{
"name":"counter",
"description":"Hi there, all okay!",
}
test0 =\
{
"name":"counter",
"description":"Hi there, all okay!",
"parameters":[]
}
# Payload to persist FogFunction
test101=\
{
}
test102=\
{
"id": 'FogFunction.ParkingLotRecommendation'
}
test103=\
{
"id": 'FogFunction.ParkingLotRecommendation',
"name": 'ParkingLotRecommendation',
"topology":
{
"name": 'ParkingLotRecommendation',
"description": 'to recommend where to park around the destination',
"tasks":[[]]
}
}
test104=\
{
"id": 'FogFunction.ParkingLotRecommendation',
"name": 'ParkingLotRecommendation',
"geoscope":
{
"scopeType": 'global', "scopeValue": 'global'
},
"status": 'enabled',
"action": 'UPDATE'
}
test1 =\
{
"id": 'FogFunction.ParkingLotRecommendation',
"name": 'ParkingLotRecommendation',
"topology":
{
"name": 'ParkingLotRecommendation',
"description": 'to recommend where to park around the destination',
"tasks":[[]]
},
"intent":
{
"topology": 'ParkingLotRecommendation',
"priority":
{
"exclusive": False, "level": 0
},
"qos": 'Max Throughput',
"geoscope":
{
"scopeType": 'global', "scopeValue": 'global'
},
"status": 'enabled',
"action": 'UPDATE'
}
}
# Payload to persist DockerImage
test200 =\
{
}
test201 =\
{
"operater": "counter",
}
test202 =\
{
"operater": "counter",
"name": "fogflow/counter"
}
test203 =\
{
"operater": "counter",
"name": "fogflow/counter",
"hwType": "X86",
"osType": "Linux"
}
test2 =\
{
"operater": "counter",
"name": "fogflow/counter",
"tag": "latest",
"hwType": "X86",
"osType": "Linux",
"prefetched": False
}
# payload to persist Topology
test300=\
{
}
test301=\
{
"description": "detect anomaly events in shops",
"name": "anomaly-detection"
}
test3 =\
{
"description": "detect anomaly events in shops",
"name": "anomaly-detection",
"tasks": [
{
"input_streams": [
{
"groupby": "ALL",
"scoped": True,
"selected_attributes": [],
"selected_type": "Anomaly"
}
],
"name": "Counting",
"operator": "counter",
"output_streams": [
{
"entity_type": "Stat32_new"
}
]
},
{
"input_streams": [
{
"groupby": "EntityID",
"scoped": True,
"selected_attributes": [],
"selected_type": "PowerPanel"
},
{
"groupby": "ALL",
"scoped": False,
"selected_attributes": [],
"selected_type": "Rule"
}
],
"name": "Detector",
"operator": "anomaly",
"output_streams": [
{
"entity_type": "Anomaly32_new"
}
]
}
]
}
#payload to persist service intent
test400=\
{
}
test401=\
{
"topology": "anomaly-detection",
"id": "ServiceIntent.849ecf56-4590-4493-a982-7b1a257053e2"
}
test402=\
{
"topology": "anomaly-detection",
"geoscope": { "scopeType": "global", "scopeValue": "global" },
}
test4=\
{
"topology": "anomaly-detection",
"priority": { "exclusive": False, "level": 50 },
"qos": 'NONE',
"geoscope": { "scopeType": "global", "scopeValue": "global" },
"id": "ServiceIntent.849ecf56-4590-4493-a982-7b1a257053e2"
} | test/UnitTest/persistance/data.py | test001=\
{
}
test002=\
{
"name":"counter"
}
test003=\
{
"name":"counter",
"description":"Hi there, all okay!",
}
test0 =\
{
"name":"counter",
"description":"Hi there, all okay!",
"parameters":[]
}
# Payload to persist FogFunction
test101=\
{
}
test102=\
{
"id": 'FogFunction.ParkingLotRecommendation'
}
test103=\
{
"id": 'FogFunction.ParkingLotRecommendation',
"name": 'ParkingLotRecommendation',
"topology":
{
"name": 'ParkingLotRecommendation',
"description": 'to recommend where to park around the destination',
"tasks":[[]]
}
}
test104=\
{
"id": 'FogFunction.ParkingLotRecommendation',
"name": 'ParkingLotRecommendation',
"geoscope":
{
"scopeType": 'global', "scopeValue": 'global'
},
"status": 'enabled',
"action": 'UPDATE'
}
test1 =\
{
"id": 'FogFunction.ParkingLotRecommendation',
"name": 'ParkingLotRecommendation',
"topology":
{
"name": 'ParkingLotRecommendation',
"description": 'to recommend where to park around the destination',
"tasks":[[]]
},
"intent":
{
"topology": 'ParkingLotRecommendation',
"priority":
{
"exclusive": False, "level": 0
},
"qos": 'Max Throughput',
"geoscope":
{
"scopeType": 'global', "scopeValue": 'global'
},
"status": 'enabled',
"action": 'UPDATE'
}
}
# Payload to persist DockerImage
test200 =\
{
}
test201 =\
{
"operater": "counter",
}
test202 =\
{
"operater": "counter",
"name": "fogflow/counter"
}
test203 =\
{
"operater": "counter",
"name": "fogflow/counter",
"hwType": "X86",
"osType": "Linux"
}
test2 =\
{
"operater": "counter",
"name": "fogflow/counter",
"tag": "latest",
"hwType": "X86",
"osType": "Linux",
"prefetched": False
}
# payload to persist Topology
test300=\
{
}
test301=\
{
"description": "detect anomaly events in shops",
"name": "anomaly-detection"
}
test3 =\
{
"description": "detect anomaly events in shops",
"name": "anomaly-detection",
"tasks": [
{
"input_streams": [
{
"groupby": "ALL",
"scoped": True,
"selected_attributes": [],
"selected_type": "Anomaly"
}
],
"name": "Counting",
"operator": "counter",
"output_streams": [
{
"entity_type": "Stat32_new"
}
]
},
{
"input_streams": [
{
"groupby": "EntityID",
"scoped": True,
"selected_attributes": [],
"selected_type": "PowerPanel"
},
{
"groupby": "ALL",
"scoped": False,
"selected_attributes": [],
"selected_type": "Rule"
}
],
"name": "Detector",
"operator": "anomaly",
"output_streams": [
{
"entity_type": "Anomaly32_new"
}
]
}
]
}
#payload to persist service intent
test400=\
{
}
test401=\
{
"topology": "anomaly-detection",
"id": "ServiceIntent.849ecf56-4590-4493-a982-7b1a257053e2"
}
test402=\
{
"topology": "anomaly-detection",
"geoscope": { "scopeType": "global", "scopeValue": "global" },
}
test4=\
{
"topology": "anomaly-detection",
"priority": { "exclusive": False, "level": 50 },
"qos": 'NONE',
"geoscope": { "scopeType": "global", "scopeValue": "global" },
"id": "ServiceIntent.849ecf56-4590-4493-a982-7b1a257053e2"
} | 0.460289 | 0.51751 |
from django.conf.urls import include, url
from glue2_views_api.views import *
# Define our custom URLs
# Additionally, we include login URLs for the browseable API.
urlpatterns = [
# url(r'^applicationenvironment/$',
# ApplicationEnvironment_List.as_view(),
# name='applicationenvironment-list'),
url(r'^software/$', Software_List.as_view(), name='software-list'),
url(r'^software/ID/(?P<id>[^/]+)/$', Software_Detail.as_view(), name='software-detail'),
url(r'^software/ResourceID/(?P<resourceid>[^/]+)/$', Software_Detail.as_view(), name='software-detail'),
url(r'^software/AppName/(?P<appname>[^/]+)/$', Software_Detail.as_view(), name='software-detail'),
url(r'^software-spf/$', Software_List.as_view(), name='software-list'),
url(r'^software-spf/ID/(?P<id>[^/]+)/$', Software_Detail.as_view(), name='software-detail'),
url(r'^software-spf/ResourceID/(?P<resourceid>[^/]+)/$', Software_Detail.as_view(), name='software-detail'),
url(r'^software-spf/AppName/(?P<appname>[^/]+)/$', Software_Detail.as_view(), name='software-detail'),
url(r'^services/$', Services_List.as_view(), name='services-list'),
url(r'^services/ID/(?P<id>[^/]+)/$', Services_Detail.as_view(), name='services-detail'),
url(r'^services/ResourceID/(?P<resourceid>[^/]+)/$', Services_Detail.as_view(), name='services-detail'),
url(r'^services/InterfaceName/(?P<interfacename>[^/]+)/$', Services_Detail.as_view(), name='services-detail'),
url(r'^services/ServiceType/(?P<servicetype>[^/]+)/$', Services_Detail.as_view(), name='services-detail'),
url(r'^services-spf/$', Services_List.as_view(), name='services-list'),
url(r'^services-spf/ID/(?P<id>[^/]+)/$', Services_Detail.as_view(), name='services-detail'),
url(r'^services-spf/ResourceID/(?P<resourceid>[^/]+)/$', Services_Detail.as_view(), name='services-detail'),
url(r'^services-spf/InterfaceName/(?P<interfacename>[^/]+)/$', Services_Detail.as_view(), name='services-detail'),
url(r'^services-spf/ServiceType/(?P<servicetype>[^/]+)/$', Services_Detail.as_view(), name='services-detail'),
url(r'^jobqueue/$', Jobqueue_List.as_view(), name='jobsqueue-list'),
url(r'^jobqueue/ResourceID/(?P<resourceid>[^/]+)/$', Jobqueue_List.as_view(), name='jobsqueue-list'),
url(r'^jobs/$', Jobqueue_List.as_view(), name='jobsqueue-list'),
url(r'^jobs/ResourceID/(?P<resourceid>[^/]+)/$', Jobqueue_List.as_view(), name='jobsqueue-list'),
url(r'^jobs2/ID/(?P<id>[^/]+)/$', Job_Detail.as_view(), name='jobs-detail'),
url(r'^jobs2/ResourceID/(?P<resourceid>[^/]+)/$', Job_List.as_view(), name='jobs-list'),
url(r'^userjobs/ResourceID/(?P<resourceid>[^/]+)/$', Jobs_per_Resource_by_ProfileID.as_view(), name='jobs-profileid'),
url(r'^userjobs/$', Jobs_by_ProfileID.as_view(), name='jobs-profileid'),
url(r'^jobs2/ResourceID/(?P<resourceid>[^/]+)/Queue/(?P<queue>[^/]+)/$', Job_List.as_view(), name='jobs-list'),
url(r'^jobs2/ResourceID/(?P<resourceid>[^/]+)/LocalAccount/(?P<localaccount>[^/]+)/$', Job_List.as_view(), name='jobs-list'),
] | django_xsede_warehouse/glue2_views_api/urls.py | from django.conf.urls import include, url
from glue2_views_api.views import *
# Define our custom URLs
# Additionally, we include login URLs for the browseable API.
urlpatterns = [
# url(r'^applicationenvironment/$',
# ApplicationEnvironment_List.as_view(),
# name='applicationenvironment-list'),
url(r'^software/$', Software_List.as_view(), name='software-list'),
url(r'^software/ID/(?P<id>[^/]+)/$', Software_Detail.as_view(), name='software-detail'),
url(r'^software/ResourceID/(?P<resourceid>[^/]+)/$', Software_Detail.as_view(), name='software-detail'),
url(r'^software/AppName/(?P<appname>[^/]+)/$', Software_Detail.as_view(), name='software-detail'),
url(r'^software-spf/$', Software_List.as_view(), name='software-list'),
url(r'^software-spf/ID/(?P<id>[^/]+)/$', Software_Detail.as_view(), name='software-detail'),
url(r'^software-spf/ResourceID/(?P<resourceid>[^/]+)/$', Software_Detail.as_view(), name='software-detail'),
url(r'^software-spf/AppName/(?P<appname>[^/]+)/$', Software_Detail.as_view(), name='software-detail'),
url(r'^services/$', Services_List.as_view(), name='services-list'),
url(r'^services/ID/(?P<id>[^/]+)/$', Services_Detail.as_view(), name='services-detail'),
url(r'^services/ResourceID/(?P<resourceid>[^/]+)/$', Services_Detail.as_view(), name='services-detail'),
url(r'^services/InterfaceName/(?P<interfacename>[^/]+)/$', Services_Detail.as_view(), name='services-detail'),
url(r'^services/ServiceType/(?P<servicetype>[^/]+)/$', Services_Detail.as_view(), name='services-detail'),
url(r'^services-spf/$', Services_List.as_view(), name='services-list'),
url(r'^services-spf/ID/(?P<id>[^/]+)/$', Services_Detail.as_view(), name='services-detail'),
url(r'^services-spf/ResourceID/(?P<resourceid>[^/]+)/$', Services_Detail.as_view(), name='services-detail'),
url(r'^services-spf/InterfaceName/(?P<interfacename>[^/]+)/$', Services_Detail.as_view(), name='services-detail'),
url(r'^services-spf/ServiceType/(?P<servicetype>[^/]+)/$', Services_Detail.as_view(), name='services-detail'),
url(r'^jobqueue/$', Jobqueue_List.as_view(), name='jobsqueue-list'),
url(r'^jobqueue/ResourceID/(?P<resourceid>[^/]+)/$', Jobqueue_List.as_view(), name='jobsqueue-list'),
url(r'^jobs/$', Jobqueue_List.as_view(), name='jobsqueue-list'),
url(r'^jobs/ResourceID/(?P<resourceid>[^/]+)/$', Jobqueue_List.as_view(), name='jobsqueue-list'),
url(r'^jobs2/ID/(?P<id>[^/]+)/$', Job_Detail.as_view(), name='jobs-detail'),
url(r'^jobs2/ResourceID/(?P<resourceid>[^/]+)/$', Job_List.as_view(), name='jobs-list'),
url(r'^userjobs/ResourceID/(?P<resourceid>[^/]+)/$', Jobs_per_Resource_by_ProfileID.as_view(), name='jobs-profileid'),
url(r'^userjobs/$', Jobs_by_ProfileID.as_view(), name='jobs-profileid'),
url(r'^jobs2/ResourceID/(?P<resourceid>[^/]+)/Queue/(?P<queue>[^/]+)/$', Job_List.as_view(), name='jobs-list'),
url(r'^jobs2/ResourceID/(?P<resourceid>[^/]+)/LocalAccount/(?P<localaccount>[^/]+)/$', Job_List.as_view(), name='jobs-list'),
] | 0.287168 | 0.060975 |
import torch
import torch.functional as F
import torch.nn as nn
import torch.optim as optim
import torch.utils.data as data
from util import TUTDataset
from model import SELDNet
import argparse
import sys
import os
from pprint import pprint
# test tensorboardX
from tensorboardX import SummaryWriter
dummy_input = (torch.zeros(4,8, 5000, 512),)
IR_SET = ["ir0","ir1","ir2","ir3","ir4"]
SPLIT_SET = ["split4"]
OV_SET = ["ov1"]
device = torch.device("cuda") if torch.cuda.is_available() else torch.device("cpu")
def test(args):
'''The function to train SELDNet
Args:
args: cmd line arguments parsed by `argparse`
batch_size
epoch_num
'''
tutdata = TUTDataset("data/mic_dev","data/metadata_dev",sample_freq=44100,split_set=SPLIT_SET,ir_set=IR_SET,ov_set=OV_SET)
tutloader = data.DataLoader(tutdata,batch_size=args.batch_size,shuffle=True)
criterion_sed = nn.CrossEntropyLoss()
criterion_doa = nn.MSELoss()
model = SELDNet(K=tutdata.num_class)
# test tensorboardX
with SummaryWriter(comment='SELDNet') as w:
print(w)
w.add_graph(model, dummy_input)
model.load_state_dict(torch.load("SELDNet-best.ckpt"))
model.to(device)
model.eval()
test_loss_sum = 0
steps = 0
for sample,sed,doa in tutloader:
sample = sample.to(device)
sed = sed.to(device)
doa = doa.to(device)
print("steps {}".format(steps))
out_sed,out_doa = model(sample)
out_sed = out_sed.reshape(-1,tutdata.num_class)
sed = sed.reshape(-1)
loss_sed = criterion_sed(out_sed,sed)
loss_doa = criterion_doa(out_doa.double(),doa.double())
loss = loss_sed+loss_doa
test_loss_sum+=float(loss)
steps+=1
print("test loss is {}".format(test_loss_sum/steps))
def test_decode(args):
tutdata = TUTDataset("data/mic_dev","data/metadata_dev",sample_freq=44100,split_set=SPLIT_SET,ir_set=IR_SET,ov_set=OV_SET)
print(tutdata.file_names[0])
sample,sed,doa = tutdata[0]
sed_onehot = torch.zeros((sed.shape[0],tutdata.num_class))
print(sed)
print(set(list(sed.numpy())))
print(tutdata.name2idx)
for k,v in enumerate(sed):
sed_onehot[k,v] = 1
res = tutdata.decode_one(sed_onehot,doa)
pprint(res)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='SELDNet params')
parser.add_argument('--batch_size', type=int,default=4,help='The batch size')
args = parser.parse_args()
test(args) | test.py | import torch
import torch.functional as F
import torch.nn as nn
import torch.optim as optim
import torch.utils.data as data
from util import TUTDataset
from model import SELDNet
import argparse
import sys
import os
from pprint import pprint
# test tensorboardX
from tensorboardX import SummaryWriter
dummy_input = (torch.zeros(4,8, 5000, 512),)
IR_SET = ["ir0","ir1","ir2","ir3","ir4"]
SPLIT_SET = ["split4"]
OV_SET = ["ov1"]
device = torch.device("cuda") if torch.cuda.is_available() else torch.device("cpu")
def test(args):
'''The function to train SELDNet
Args:
args: cmd line arguments parsed by `argparse`
batch_size
epoch_num
'''
tutdata = TUTDataset("data/mic_dev","data/metadata_dev",sample_freq=44100,split_set=SPLIT_SET,ir_set=IR_SET,ov_set=OV_SET)
tutloader = data.DataLoader(tutdata,batch_size=args.batch_size,shuffle=True)
criterion_sed = nn.CrossEntropyLoss()
criterion_doa = nn.MSELoss()
model = SELDNet(K=tutdata.num_class)
# test tensorboardX
with SummaryWriter(comment='SELDNet') as w:
print(w)
w.add_graph(model, dummy_input)
model.load_state_dict(torch.load("SELDNet-best.ckpt"))
model.to(device)
model.eval()
test_loss_sum = 0
steps = 0
for sample,sed,doa in tutloader:
sample = sample.to(device)
sed = sed.to(device)
doa = doa.to(device)
print("steps {}".format(steps))
out_sed,out_doa = model(sample)
out_sed = out_sed.reshape(-1,tutdata.num_class)
sed = sed.reshape(-1)
loss_sed = criterion_sed(out_sed,sed)
loss_doa = criterion_doa(out_doa.double(),doa.double())
loss = loss_sed+loss_doa
test_loss_sum+=float(loss)
steps+=1
print("test loss is {}".format(test_loss_sum/steps))
def test_decode(args):
tutdata = TUTDataset("data/mic_dev","data/metadata_dev",sample_freq=44100,split_set=SPLIT_SET,ir_set=IR_SET,ov_set=OV_SET)
print(tutdata.file_names[0])
sample,sed,doa = tutdata[0]
sed_onehot = torch.zeros((sed.shape[0],tutdata.num_class))
print(sed)
print(set(list(sed.numpy())))
print(tutdata.name2idx)
for k,v in enumerate(sed):
sed_onehot[k,v] = 1
res = tutdata.decode_one(sed_onehot,doa)
pprint(res)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='SELDNet params')
parser.add_argument('--batch_size', type=int,default=4,help='The batch size')
args = parser.parse_args()
test(args) | 0.458106 | 0.300592 |
from PyQt5 import QtWidgets, QtCore, QtGui
from ui import Window
class Settings(Window):
"""Настройка графического инфтерфейса"""
def __init__(self):
super(Settings, self).__init__()
self.style_sheet = "font: 14pt \"Times New Roman\";" # Размер и шрифт текста
self.header_list = ["Улица", "Дом", "Место", "TKD", "IP", "Примечание"] # Названия хедера таблицы
self.width_header = [250, 100, 160, 125, 125, 395] # Размеры столбцов хедера
self.set_settings_main_window() # Настройки главного окна
self.set_settings_combo_boxes() # Настройки комбобокса
self.set_settings_line_edits() # Настройки поля ввода
self.set_settings_buttons() # Настрока кнопок
self.set_settings_table_widget() # Настройка таблицы
def set_settings_main_window(self):
"""Найстройка главного окна"""
self.resize(1200, 700)
self.setWindowTitle("SearchInDB")
self.setWindowIcon(QtGui.QIcon("Icons/main.ico"))
def set_settings_combo_boxes(self):
"""Настройка поля Список"""
self.combo_box.setMinimumSize(30, 30)
self.combo_box.setStyleSheet(self.style_sheet)
self.combo_box.setToolTip("Выберете раздел поиска")
for i in ["Улица", "TKD", "IP"]:
self.combo_box.addItem(i)
def set_settings_line_edits(self):
"""Настройка поля Ввода"""
self.line_edit.setMinimumSize(QtCore.QSize(0, 30))
self.line_edit.setStyleSheet(self.style_sheet)
self.line_edit.setToolTip("Введите значение")
def set_settings_buttons(self):
"""Настройка кнопок"""
self.search_button.setIcon(QtGui.QIcon("Icons/search.ico"))
self.search_button.setToolTip("Поиск")
self.add_button.setIcon(QtGui.QIcon("Icons/add.ico"))
self.add_button.setToolTip("Добавить строку")
self.del_button.setIcon(QtGui.QIcon("Icons/del.ico"))
self.del_button.setToolTip("Удалить строку")
self.save_button.setIcon(QtGui.QIcon("Icons/save.ico"))
self.save_button.setToolTip("Сохранить")
for i in [self.search_button, self.add_button, self.del_button, self.save_button]:
i.setMinimumSize(33, 32)
i.setStyleSheet(self.style_sheet)
i.setToolTipDuration(3000)
def set_settings_table_widget(self):
"""Настройка таблицы"""
self.table_widget.setColumnCount(len(self.header_list))
self.table_widget.setHorizontalHeaderLabels(self.header_list)
self.table_widget.setStyleSheet(self.style_sheet)
self.table_widget.verticalHeader().setVisible(False)
self.table_widget.setSortingEnabled(True)
self.table_widget.setSelectionBehavior(QtWidgets.QAbstractItemView.SelectionBehavior.SelectRows)
self.table_widget.setSelectionMode(QtWidgets.QAbstractItemView.SelectionMode.SingleSelection)
for i, j in zip(self.header_list, self.width_header):
self.table_widget.setColumnWidth(self.header_list.index(i), j) | settings.py | from PyQt5 import QtWidgets, QtCore, QtGui
from ui import Window
class Settings(Window):
"""Настройка графического инфтерфейса"""
def __init__(self):
super(Settings, self).__init__()
self.style_sheet = "font: 14pt \"Times New Roman\";" # Размер и шрифт текста
self.header_list = ["Улица", "Дом", "Место", "TKD", "IP", "Примечание"] # Названия хедера таблицы
self.width_header = [250, 100, 160, 125, 125, 395] # Размеры столбцов хедера
self.set_settings_main_window() # Настройки главного окна
self.set_settings_combo_boxes() # Настройки комбобокса
self.set_settings_line_edits() # Настройки поля ввода
self.set_settings_buttons() # Настрока кнопок
self.set_settings_table_widget() # Настройка таблицы
def set_settings_main_window(self):
"""Найстройка главного окна"""
self.resize(1200, 700)
self.setWindowTitle("SearchInDB")
self.setWindowIcon(QtGui.QIcon("Icons/main.ico"))
def set_settings_combo_boxes(self):
"""Настройка поля Список"""
self.combo_box.setMinimumSize(30, 30)
self.combo_box.setStyleSheet(self.style_sheet)
self.combo_box.setToolTip("Выберете раздел поиска")
for i in ["Улица", "TKD", "IP"]:
self.combo_box.addItem(i)
def set_settings_line_edits(self):
"""Настройка поля Ввода"""
self.line_edit.setMinimumSize(QtCore.QSize(0, 30))
self.line_edit.setStyleSheet(self.style_sheet)
self.line_edit.setToolTip("Введите значение")
def set_settings_buttons(self):
"""Настройка кнопок"""
self.search_button.setIcon(QtGui.QIcon("Icons/search.ico"))
self.search_button.setToolTip("Поиск")
self.add_button.setIcon(QtGui.QIcon("Icons/add.ico"))
self.add_button.setToolTip("Добавить строку")
self.del_button.setIcon(QtGui.QIcon("Icons/del.ico"))
self.del_button.setToolTip("Удалить строку")
self.save_button.setIcon(QtGui.QIcon("Icons/save.ico"))
self.save_button.setToolTip("Сохранить")
for i in [self.search_button, self.add_button, self.del_button, self.save_button]:
i.setMinimumSize(33, 32)
i.setStyleSheet(self.style_sheet)
i.setToolTipDuration(3000)
def set_settings_table_widget(self):
"""Настройка таблицы"""
self.table_widget.setColumnCount(len(self.header_list))
self.table_widget.setHorizontalHeaderLabels(self.header_list)
self.table_widget.setStyleSheet(self.style_sheet)
self.table_widget.verticalHeader().setVisible(False)
self.table_widget.setSortingEnabled(True)
self.table_widget.setSelectionBehavior(QtWidgets.QAbstractItemView.SelectionBehavior.SelectRows)
self.table_widget.setSelectionMode(QtWidgets.QAbstractItemView.SelectionMode.SingleSelection)
for i, j in zip(self.header_list, self.width_header):
self.table_widget.setColumnWidth(self.header_list.index(i), j) | 0.400632 | 0.10307 |
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('auth', '0011_update_proxy_permissions'),
]
operations = [
migrations.CreateModel(
name='doctor',
fields=[
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, primary_key=True, serialize=False, to=settings.AUTH_USER_MODEL)),
('is_patient', models.BooleanField(default=False)),
('is_doctor', models.BooleanField(default=True)),
('name', models.CharField(max_length=50)),
('dob', models.DateField()),
('address', models.CharField(max_length=100)),
('mobile_no', models.CharField(max_length=15)),
('gender', models.CharField(max_length=10)),
('registration_no', models.CharField(max_length=20)),
('year_of_registration', models.DateField()),
('qualification', models.CharField(max_length=20)),
('State_Medical_Council', models.CharField(max_length=30)),
('specialization', models.CharField(max_length=30)),
('rating', models.IntegerField(default=0)),
],
),
migrations.CreateModel(
name='patient',
fields=[
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, primary_key=True, serialize=False, to=settings.AUTH_USER_MODEL)),
('is_patient', models.BooleanField(default=True)),
('is_doctor', models.BooleanField(default=False)),
('name', models.CharField(max_length=50)),
('dob', models.DateField()),
('address', models.CharField(max_length=100)),
('mobile_no', models.CharField(max_length=15)),
('gender', models.CharField(max_length=10)),
],
),
migrations.CreateModel(
name='rating_review',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('rating', models.IntegerField(default=0)),
('review', models.TextField(blank=True)),
('doctor', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='main_app.doctor')),
('patient', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='main_app.patient')),
],
),
migrations.CreateModel(
name='diseaseinfo',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('diseasename', models.CharField(max_length=200)),
('no_of_symp', models.IntegerField()),
('symptomsname', models.CharField(max_length=200)),
('confidence', models.DecimalField(decimal_places=2, max_digits=5)),
('consultdoctor', models.CharField(max_length=200)),
('patient', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='main_app.patient')),
],
),
migrations.CreateModel(
name='consultation',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('consultation_date', models.DateField()),
('status', models.CharField(max_length=20)),
('diseaseinfo', models.OneToOneField(null=True, on_delete=django.db.models.deletion.SET_NULL, to='main_app.diseaseinfo')),
('doctor', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='main_app.doctor')),
('patient', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='main_app.patient')),
],
),
] | main_app/migrations/0001_initial.py |
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('auth', '0011_update_proxy_permissions'),
]
operations = [
migrations.CreateModel(
name='doctor',
fields=[
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, primary_key=True, serialize=False, to=settings.AUTH_USER_MODEL)),
('is_patient', models.BooleanField(default=False)),
('is_doctor', models.BooleanField(default=True)),
('name', models.CharField(max_length=50)),
('dob', models.DateField()),
('address', models.CharField(max_length=100)),
('mobile_no', models.CharField(max_length=15)),
('gender', models.CharField(max_length=10)),
('registration_no', models.CharField(max_length=20)),
('year_of_registration', models.DateField()),
('qualification', models.CharField(max_length=20)),
('State_Medical_Council', models.CharField(max_length=30)),
('specialization', models.CharField(max_length=30)),
('rating', models.IntegerField(default=0)),
],
),
migrations.CreateModel(
name='patient',
fields=[
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, primary_key=True, serialize=False, to=settings.AUTH_USER_MODEL)),
('is_patient', models.BooleanField(default=True)),
('is_doctor', models.BooleanField(default=False)),
('name', models.CharField(max_length=50)),
('dob', models.DateField()),
('address', models.CharField(max_length=100)),
('mobile_no', models.CharField(max_length=15)),
('gender', models.CharField(max_length=10)),
],
),
migrations.CreateModel(
name='rating_review',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('rating', models.IntegerField(default=0)),
('review', models.TextField(blank=True)),
('doctor', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='main_app.doctor')),
('patient', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='main_app.patient')),
],
),
migrations.CreateModel(
name='diseaseinfo',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('diseasename', models.CharField(max_length=200)),
('no_of_symp', models.IntegerField()),
('symptomsname', models.CharField(max_length=200)),
('confidence', models.DecimalField(decimal_places=2, max_digits=5)),
('consultdoctor', models.CharField(max_length=200)),
('patient', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='main_app.patient')),
],
),
migrations.CreateModel(
name='consultation',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('consultation_date', models.DateField()),
('status', models.CharField(max_length=20)),
('diseaseinfo', models.OneToOneField(null=True, on_delete=django.db.models.deletion.SET_NULL, to='main_app.diseaseinfo')),
('doctor', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='main_app.doctor')),
('patient', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='main_app.patient')),
],
),
] | 0.572842 | 0.154058 |
import argparse
import json
import os
import pdb
import sys
import time
import string
import torch
from torch.utils.data import DataLoader
sys.path.append('.')
from config import special_toks, train_conf
from dataset import FastDataset
from models import BlindStatelessLSTM, MultiAttentiveTransformer
from tools.simmc_dataset import SIMMCDatasetForResponseGeneration
"""expected form for model output
[
{
"dialog_id": <dialog_id>,
"candidate_scores": [
<list of 100 scores for 100 candidates for round 1>
<list of 100 scores for 100 candidates for round 2>
...
]
}
...
]
"""
def instantiate_model(args, model_configurations, out_vocab, device):
if args.model == 'blindstateless':
return BlindStatelessLSTM(word_embeddings_path=args.embeddings,
pad_token=special_toks['pad_token'],
unk_token=special_toks['unk_token'],
seed=train_conf['seed'],
OOV_corrections=False,
freeze_embeddings=True)
elif args.model == 'matransformer':
return MultiAttentiveTransformer(**model_configurations,
seed=train_conf['seed'],
device=device,
out_vocab=out_vocab,
retrieval_eval=args.retrieval_eval,
gen_eval=args.gen_eval,
beam_size=args.beam_size,
mode='inference',
**special_toks,
)
else:
raise Exception('Model not present!')
def create_eval_dicts(dataset):
dataset.create_id2turns()
gen_eval_dict = {}
retr_eval_dict = {}
for dial_id, num_turns in dataset.id2turns.items():
gen_eval_dict[dial_id] = {'dialog_id': dial_id, 'predictions': []}
retr_eval_dict[dial_id] = {'dialog_id': dial_id, 'candidate_scores': []}
return gen_eval_dict, retr_eval_dict
def move_batch_to_device(batch, device):
for key in batch.keys():
if key == 'history':
raise Exception('Not implemented')
if key != 'attributes':
batch[key] = batch[key].to(device)
def visualize_result(utt_ids, item_ids, id2word, gen_ids=None):
item = [id2word[id.item()] for id in item_ids if id != 0]
words_request = [id2word[id.item()] for id in utt_ids if id != 0]
if gen_ids is not None:
words_resp = [id2word[id] for id in gen_ids]
#cleaned_req = clean_response(words_request)
#cleaned_resp = clean_response(words_resp)
print('USER: {}'.format(words_request))
if gen_ids is not None:
print('GEN: {}'.format(words_resp))
print('Item: {}'.format(item))
def eval(model, test_dataset, args, save_folder, device):
model.eval()
model.to(device)
#print('MODEL: {}'.format(model))
# prepare DataLoader
params = {'batch_size': 1,
'shuffle': False,
'num_workers': 0}
testloader = DataLoader(test_dataset, **params, collate_fn=model.collate_fn)
gen_eval_dict, retr_eval_dict = create_eval_dicts(test_dataset)
with torch.no_grad():
for curr_step, (dial_ids, turns, batch) in enumerate(testloader):
assert len(dial_ids) == 1, 'Only unitary batch size is allowed during testing'
dial_id = dial_ids[0]
turn = turns[0]
move_batch_to_device(batch, device)
res = model(**batch,
history=None,
actions=None)
if args.gen_eval:
gen_eval_dict[dial_id]['predictions'].append({'turn_id': turn, 'response': res['generation']['string']})
#visualize_result(batch['utterances'][0], batch['focus_items'][0], id2word, res['generation']['string'])
if args.retrieval_eval:
retr_eval_dict[dial_id]['candidate_scores'].append({'turn_id': turn, 'scores': res['retrieval'].squeeze(0).tolist()})
#todo here adjust candidates scores based on semantic attribute informations
if args.gen_eval:
gen_eval_list = []
for key in gen_eval_dict:
gen_eval_list.append(gen_eval_dict[key])
save_file = os.path.join(save_folder, 'eval_gen.json')
try:
with open(save_file, 'w+') as fp:
json.dump(gen_eval_list, fp)
print('generation results saved in {}'.format(save_file))
except:
print('Error in writing the resulting JSON')
if args.retrieval_eval:
retr_eval_list = []
for key in retr_eval_dict:
retr_eval_list.append(retr_eval_dict[key])
save_file = os.path.join(save_folder, 'eval_retr.json')
try:
with open(save_file, 'w+') as fp:
json.dump(retr_eval_list, fp)
print('retrieval results saved in {}'.format(save_file))
except:
print('Error in writing the resulting JSON')
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument(
"--model",
type=str,
choices=['blindstateless', 'blindstateful', 'matransformer'],
required=True,
help="Type of the model (options: 'blindstateless', 'blindstateful', 'matransformer')")
parser.add_argument(
"--model_path",
default=None,
type=str,
required=True,
help="Path to the weights of the model")
parser.add_argument(
"--model_conf",
default=None,
type=str,
required=True,
help="Path to the model configuration JSON file")
parser.add_argument(
"--vocabulary",
default=None,
type=str,
required=True,
help="Path to output vocabulary for the model")
parser.add_argument(
"--data",
default=None,
type=str,
required=True,
help="Path to test dataset json file")
parser.add_argument(
"--metadata_ids",
type=str,
required=True,
help="Path to metadata ids file")
parser.add_argument(
"--beam_size",
type=int,
required=True,
help="Size of the beam for the beam search at inference time")
parser.add_argument(
"--retrieval_eval",
action='store_true',
default=False,
required=False,
help="Flag to enable retrieval evaluation")
parser.add_argument(
"--gen_eval",
action='store_true',
default=False,
required=False,
help="Flag to enable generation evaluation")
parser.add_argument(
"--cuda",
default=None,
required=False,
type=int,
help="id of device to use")
start_t = time.time()
args = parser.parse_args()
test_dataset = FastDataset(dat_path=args.data,
metadata_ids_path= args.metadata_ids,
retrieval=args.retrieval_eval)
device = torch.device('cuda:{}'.format(args.cuda) if torch.cuda.is_available() and args.cuda is not None else "cpu")
print('EVAL DATASET: {}'.format(test_dataset))
# prepare model
with open(args.model_conf) as fp:
model_configurations = json.load(fp)
with open(args.vocabulary, 'rb') as fp:
bert2genid = torch.load(fp)
model = instantiate_model(args,
model_configurations=model_configurations,
out_vocab=bert2genid,
device=device)
model.load_state_dict(torch.load(args.model_path))
model_folder = '/'.join(args.model_path.split('/')[:-1])
print('model loaded from {}'.format(model_folder))
eval(model, test_dataset, args, save_folder=model_folder, device=device)
end_t = time.time()
m_count = ((end_t-start_t)/60) % 60
s_count = (end_t-start_t) % 60
print('evaluation time: {}m:{}s'.format(round(m_count), round(s_count))) | mm_response_generation/eval.py | import argparse
import json
import os
import pdb
import sys
import time
import string
import torch
from torch.utils.data import DataLoader
sys.path.append('.')
from config import special_toks, train_conf
from dataset import FastDataset
from models import BlindStatelessLSTM, MultiAttentiveTransformer
from tools.simmc_dataset import SIMMCDatasetForResponseGeneration
"""expected form for model output
[
{
"dialog_id": <dialog_id>,
"candidate_scores": [
<list of 100 scores for 100 candidates for round 1>
<list of 100 scores for 100 candidates for round 2>
...
]
}
...
]
"""
def instantiate_model(args, model_configurations, out_vocab, device):
if args.model == 'blindstateless':
return BlindStatelessLSTM(word_embeddings_path=args.embeddings,
pad_token=special_toks['pad_token'],
unk_token=special_toks['unk_token'],
seed=train_conf['seed'],
OOV_corrections=False,
freeze_embeddings=True)
elif args.model == 'matransformer':
return MultiAttentiveTransformer(**model_configurations,
seed=train_conf['seed'],
device=device,
out_vocab=out_vocab,
retrieval_eval=args.retrieval_eval,
gen_eval=args.gen_eval,
beam_size=args.beam_size,
mode='inference',
**special_toks,
)
else:
raise Exception('Model not present!')
def create_eval_dicts(dataset):
dataset.create_id2turns()
gen_eval_dict = {}
retr_eval_dict = {}
for dial_id, num_turns in dataset.id2turns.items():
gen_eval_dict[dial_id] = {'dialog_id': dial_id, 'predictions': []}
retr_eval_dict[dial_id] = {'dialog_id': dial_id, 'candidate_scores': []}
return gen_eval_dict, retr_eval_dict
def move_batch_to_device(batch, device):
for key in batch.keys():
if key == 'history':
raise Exception('Not implemented')
if key != 'attributes':
batch[key] = batch[key].to(device)
def visualize_result(utt_ids, item_ids, id2word, gen_ids=None):
item = [id2word[id.item()] for id in item_ids if id != 0]
words_request = [id2word[id.item()] for id in utt_ids if id != 0]
if gen_ids is not None:
words_resp = [id2word[id] for id in gen_ids]
#cleaned_req = clean_response(words_request)
#cleaned_resp = clean_response(words_resp)
print('USER: {}'.format(words_request))
if gen_ids is not None:
print('GEN: {}'.format(words_resp))
print('Item: {}'.format(item))
def eval(model, test_dataset, args, save_folder, device):
model.eval()
model.to(device)
#print('MODEL: {}'.format(model))
# prepare DataLoader
params = {'batch_size': 1,
'shuffle': False,
'num_workers': 0}
testloader = DataLoader(test_dataset, **params, collate_fn=model.collate_fn)
gen_eval_dict, retr_eval_dict = create_eval_dicts(test_dataset)
with torch.no_grad():
for curr_step, (dial_ids, turns, batch) in enumerate(testloader):
assert len(dial_ids) == 1, 'Only unitary batch size is allowed during testing'
dial_id = dial_ids[0]
turn = turns[0]
move_batch_to_device(batch, device)
res = model(**batch,
history=None,
actions=None)
if args.gen_eval:
gen_eval_dict[dial_id]['predictions'].append({'turn_id': turn, 'response': res['generation']['string']})
#visualize_result(batch['utterances'][0], batch['focus_items'][0], id2word, res['generation']['string'])
if args.retrieval_eval:
retr_eval_dict[dial_id]['candidate_scores'].append({'turn_id': turn, 'scores': res['retrieval'].squeeze(0).tolist()})
#todo here adjust candidates scores based on semantic attribute informations
if args.gen_eval:
gen_eval_list = []
for key in gen_eval_dict:
gen_eval_list.append(gen_eval_dict[key])
save_file = os.path.join(save_folder, 'eval_gen.json')
try:
with open(save_file, 'w+') as fp:
json.dump(gen_eval_list, fp)
print('generation results saved in {}'.format(save_file))
except:
print('Error in writing the resulting JSON')
if args.retrieval_eval:
retr_eval_list = []
for key in retr_eval_dict:
retr_eval_list.append(retr_eval_dict[key])
save_file = os.path.join(save_folder, 'eval_retr.json')
try:
with open(save_file, 'w+') as fp:
json.dump(retr_eval_list, fp)
print('retrieval results saved in {}'.format(save_file))
except:
print('Error in writing the resulting JSON')
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument(
"--model",
type=str,
choices=['blindstateless', 'blindstateful', 'matransformer'],
required=True,
help="Type of the model (options: 'blindstateless', 'blindstateful', 'matransformer')")
parser.add_argument(
"--model_path",
default=None,
type=str,
required=True,
help="Path to the weights of the model")
parser.add_argument(
"--model_conf",
default=None,
type=str,
required=True,
help="Path to the model configuration JSON file")
parser.add_argument(
"--vocabulary",
default=None,
type=str,
required=True,
help="Path to output vocabulary for the model")
parser.add_argument(
"--data",
default=None,
type=str,
required=True,
help="Path to test dataset json file")
parser.add_argument(
"--metadata_ids",
type=str,
required=True,
help="Path to metadata ids file")
parser.add_argument(
"--beam_size",
type=int,
required=True,
help="Size of the beam for the beam search at inference time")
parser.add_argument(
"--retrieval_eval",
action='store_true',
default=False,
required=False,
help="Flag to enable retrieval evaluation")
parser.add_argument(
"--gen_eval",
action='store_true',
default=False,
required=False,
help="Flag to enable generation evaluation")
parser.add_argument(
"--cuda",
default=None,
required=False,
type=int,
help="id of device to use")
start_t = time.time()
args = parser.parse_args()
test_dataset = FastDataset(dat_path=args.data,
metadata_ids_path= args.metadata_ids,
retrieval=args.retrieval_eval)
device = torch.device('cuda:{}'.format(args.cuda) if torch.cuda.is_available() and args.cuda is not None else "cpu")
print('EVAL DATASET: {}'.format(test_dataset))
# prepare model
with open(args.model_conf) as fp:
model_configurations = json.load(fp)
with open(args.vocabulary, 'rb') as fp:
bert2genid = torch.load(fp)
model = instantiate_model(args,
model_configurations=model_configurations,
out_vocab=bert2genid,
device=device)
model.load_state_dict(torch.load(args.model_path))
model_folder = '/'.join(args.model_path.split('/')[:-1])
print('model loaded from {}'.format(model_folder))
eval(model, test_dataset, args, save_folder=model_folder, device=device)
end_t = time.time()
m_count = ((end_t-start_t)/60) % 60
s_count = (end_t-start_t) % 60
print('evaluation time: {}m:{}s'.format(round(m_count), round(s_count))) | 0.252108 | 0.182863 |
import sys
from struct import unpack
class ProtoBufAnalyserError(Exception):
"""
Class for error in the parameters provided to this script
"""
def __init__(self, msg):
print(ERROR : " + msg)
sys.exit(1)
pass
class ProtoBufAnalyser(object):
'''
This class is designed to analyse a protobuf content without proto file
All details in https://developers.google.com/protocol-buffers/docs/encoding
'''
def __init__(self, protobuf):
'''
Initialize analyser
protobuf : protobuf to analyse in string coding the protobuf content in hexadecimal
'''
self._data = protobuf
self._readIdx = 0
self._wireTypes = ['Varint','64-bit','Length-delimited','Start group','End group','32-bit']
def analyse(self):
'''
Analyse the protobuf
'''
stop = False
while stop == False:
fieldNum, wireType = self.readKey()
if wireType >= 0 and wireType <= 5:
print("fieldNum=" + repr(fieldNum) + " wire type=" + repr(wireType) + " (" + self._wireTypes[wireType] + ")")
if wireType == 0: #Varint
val = self.readVarint()
print(" Read Varint: " + repr(val))
elif wireType == 1: #64-bit (16 hex char)
signedFixed, unsignedFixed, floatingPoint = self.readFixedLen(16)
print(" Read 64-bit: " + repr(signedFixed) + " (fixed64) " + repr(unsignedFixed) + " (unsigned fixed64) " + repr(floatingPoint) + " (float64) ")
elif wireType == 2: #Length-delimited
val = self.readDelimited()
try:
asciiVal = " (ASCII): " + str(bytes.fromhex(val)) # For Python version <= 2.7, use val.decode('hex') instead str(bytes.fromhex(val))
except TypeError as e:
raise ProtoBufAnalyserError("Odd hexadecimal data lengh which doesn't correspond to bytes : " + str(e) )
print(" Read Length-delimited (hex): " + repr(val) + asciiVal)
elif wireType == 3: #Start group (deprecated)
stop = True
raise ProtoBufAnalyserError("Start group field detected but this is a depricated protobuf field not supported by this script" )
elif wireType == 4: #End group (deprecated)
stop = True
raise ProtoBufAnalyserError("End group field detected but this is a depricated protobuf field not supported by this script" )
elif wireType == 5: #32-bit (8 hex char)
signedFixed, unsignedFixed, floatingPoint = self.readFixedLen(8)
print(" Read 32-bit: " + repr(signedFixed) + " (fixed32) " + repr(unsignedFixed) + " (unsigned fixed32) " + repr(floatingPoint) + " (float32) ")
else:
stop = True
raise ProtoBufAnalyserError("Invalid wire type detected : " + repr(wireType) + " with field number: " + repr(fieldNum) )
if self._readIdx >= len(self._data):
stop = True
print("End of protobuf reached without error")
def readKey(self):
'''
Read protobuf key and return field num and wire type
'''
key = format(self.readVarint(),'08b')
if not len(key) :
raise ProtoBufAnalyserError("Invalid key value: " + key)
# Read the specified variable lengh field (lengh is twice the specified value due to hexadecimal representation)
wireType = int(key[-3:],2)
fieldNum = int(key[:-3],2)
return fieldNum, wireType
def readDelimited(self):
'''
Read a Length-delimited from index and return its value
'''
lengh = self.readVarint()
# Check if data lengh correspond to specified field lengh
if len(self._data) < ( self._readIdx+(2*lengh) ):
if (len(self._data)-self._readIdx) > 0:
raise ProtoBufAnalyserError("Length-delimited field specified lengh is " + repr(2*lengh) + " hex characters and given data lengh is " + repr(len(self._data)-self._readIdx) )
else:
raise ProtoBufAnalyserError("Length-delimited field specified lengh is " + repr(2*lengh) + " the end of given data has been reached" )
# Read the specified variable lengh field (lengh is twice the specified value due to hexadecimal representation)
value = self._data[self._readIdx:self._readIdx+(2*lengh)]
# Update read index
self._readIdx = self._readIdx + (2*lengh)
return value
def readFixedLen(self,lengh):
'''
Read a fix lengh value from index and return its value as hexadecimal in a str
Protobuf implementation use this coding for
Lengh = 32 for fixed32, sfixed32, float
Lengh = 64 for fixed64, sfixed64, double
return signedFixed, unsignedFixed and floatingPoint versions of the read data
'''
#Fix lengh value are little endiand (LSB first)
StrVal = ""
# Check if data lengh correspond to specified field lengh
if len(self._data) < ( self._readIdx+(lengh) ):
if (len(self._data)-self._readIdx) > 0:
raise ProtoBufAnalyserError("FixedLength field specified lengh is " + repr(lengh) + " hex characters and given data lengh is " + repr(len(self._data)-self._readIdx) )
else:
raise ProtoBufAnalyserError("FixedLength field specified lengh is " + repr(lengh) + " and the end of given data has been reached" )
# Read the hexadecimal data 2 characters at a time to actually read a byte of data
for i in range(lengh/2):
# Decode from little endian, first bytes are less significants
StrVal = self._data[self._readIdx:self._readIdx+2] + StrVal
# Get the next byte of varint (2 step because of byte size in hexa coded in string)
self._readIdx = self._readIdx + 2
# Generate the fixed point signed and unsigned and floating point result depending on data width
if lengh == 16 : # 64 bits
signedFixed = unpack('!q', StrVal.decode('hex'))[0]
unsignedFixed = unpack('!Q', StrVal.decode('hex'))[0]
floatingPoint = unpack('!d', StrVal.decode('hex'))[0]
if lengh == 8 : # 32 bits
signedFixed = unpack('!l', StrVal.decode('hex'))[0]
unsignedFixed = unpack('!L', StrVal.decode('hex'))[0]
floatingPoint = unpack('!f', StrVal.decode('hex'))[0]
return signedFixed, unsignedFixed, floatingPoint
def readVarint(self):
'''
Read a Varint from index and return its value as int
'''
retVarint = ""
# Read at index and get binary representation in string
# taking chars 2 by 2 because of hexadecimal representation of bytes
isLast, binaryVal = self.getBinValFromVarintByte(self._data[self._readIdx:self._readIdx+2])
retVarint = binaryVal + retVarint
# Get the next byte of varint (2 step because of byte size in hexa coded in string)
self._readIdx = self._readIdx + 2
# The byte is not the last of the varint
while not isLast:
isLast, binaryVal = self.getBinValFromVarintByte(self._data[self._readIdx:self._readIdx+2])
retVarint = binaryVal + retVarint
# Get the next byte of varint (2 step because of byte size in hexa coded in string)
self._readIdx = self._readIdx + 2
return int(retVarint,2)
def getBinValFromVarintByte(self, varint):
'''
Get partial binary value of varint value from a byte and isLast flag
Return isLast as bool and value as string representing binary
'''
if not len(varint):
raise ProtoBufAnalyserError("Invalid lengh of data to decode this varint value")
isLast = True
binaryVal = bin(int(varint, 16))[2:].zfill(8)
if binaryVal[0] == "1":
isLast = False
# Drop the msb
return isLast, binaryVal[1:]
if __name__ == '__main__':
if len(sys.argv) == 2:
myAnalyser = ProtoBufAnalyser(sys.argv[1])
myAnalyser.analyse()
else:
print('Missing parameter : Protobuf in hexadecimal representation in string') | ProtoBufAnalyser.py | import sys
from struct import unpack
class ProtoBufAnalyserError(Exception):
"""
Class for error in the parameters provided to this script
"""
def __init__(self, msg):
print(ERROR : " + msg)
sys.exit(1)
pass
class ProtoBufAnalyser(object):
'''
This class is designed to analyse a protobuf content without proto file
All details in https://developers.google.com/protocol-buffers/docs/encoding
'''
def __init__(self, protobuf):
'''
Initialize analyser
protobuf : protobuf to analyse in string coding the protobuf content in hexadecimal
'''
self._data = protobuf
self._readIdx = 0
self._wireTypes = ['Varint','64-bit','Length-delimited','Start group','End group','32-bit']
def analyse(self):
'''
Analyse the protobuf
'''
stop = False
while stop == False:
fieldNum, wireType = self.readKey()
if wireType >= 0 and wireType <= 5:
print("fieldNum=" + repr(fieldNum) + " wire type=" + repr(wireType) + " (" + self._wireTypes[wireType] + ")")
if wireType == 0: #Varint
val = self.readVarint()
print(" Read Varint: " + repr(val))
elif wireType == 1: #64-bit (16 hex char)
signedFixed, unsignedFixed, floatingPoint = self.readFixedLen(16)
print(" Read 64-bit: " + repr(signedFixed) + " (fixed64) " + repr(unsignedFixed) + " (unsigned fixed64) " + repr(floatingPoint) + " (float64) ")
elif wireType == 2: #Length-delimited
val = self.readDelimited()
try:
asciiVal = " (ASCII): " + str(bytes.fromhex(val)) # For Python version <= 2.7, use val.decode('hex') instead str(bytes.fromhex(val))
except TypeError as e:
raise ProtoBufAnalyserError("Odd hexadecimal data lengh which doesn't correspond to bytes : " + str(e) )
print(" Read Length-delimited (hex): " + repr(val) + asciiVal)
elif wireType == 3: #Start group (deprecated)
stop = True
raise ProtoBufAnalyserError("Start group field detected but this is a depricated protobuf field not supported by this script" )
elif wireType == 4: #End group (deprecated)
stop = True
raise ProtoBufAnalyserError("End group field detected but this is a depricated protobuf field not supported by this script" )
elif wireType == 5: #32-bit (8 hex char)
signedFixed, unsignedFixed, floatingPoint = self.readFixedLen(8)
print(" Read 32-bit: " + repr(signedFixed) + " (fixed32) " + repr(unsignedFixed) + " (unsigned fixed32) " + repr(floatingPoint) + " (float32) ")
else:
stop = True
raise ProtoBufAnalyserError("Invalid wire type detected : " + repr(wireType) + " with field number: " + repr(fieldNum) )
if self._readIdx >= len(self._data):
stop = True
print("End of protobuf reached without error")
def readKey(self):
'''
Read protobuf key and return field num and wire type
'''
key = format(self.readVarint(),'08b')
if not len(key) :
raise ProtoBufAnalyserError("Invalid key value: " + key)
# Read the specified variable lengh field (lengh is twice the specified value due to hexadecimal representation)
wireType = int(key[-3:],2)
fieldNum = int(key[:-3],2)
return fieldNum, wireType
def readDelimited(self):
'''
Read a Length-delimited from index and return its value
'''
lengh = self.readVarint()
# Check if data lengh correspond to specified field lengh
if len(self._data) < ( self._readIdx+(2*lengh) ):
if (len(self._data)-self._readIdx) > 0:
raise ProtoBufAnalyserError("Length-delimited field specified lengh is " + repr(2*lengh) + " hex characters and given data lengh is " + repr(len(self._data)-self._readIdx) )
else:
raise ProtoBufAnalyserError("Length-delimited field specified lengh is " + repr(2*lengh) + " the end of given data has been reached" )
# Read the specified variable lengh field (lengh is twice the specified value due to hexadecimal representation)
value = self._data[self._readIdx:self._readIdx+(2*lengh)]
# Update read index
self._readIdx = self._readIdx + (2*lengh)
return value
def readFixedLen(self,lengh):
'''
Read a fix lengh value from index and return its value as hexadecimal in a str
Protobuf implementation use this coding for
Lengh = 32 for fixed32, sfixed32, float
Lengh = 64 for fixed64, sfixed64, double
return signedFixed, unsignedFixed and floatingPoint versions of the read data
'''
#Fix lengh value are little endiand (LSB first)
StrVal = ""
# Check if data lengh correspond to specified field lengh
if len(self._data) < ( self._readIdx+(lengh) ):
if (len(self._data)-self._readIdx) > 0:
raise ProtoBufAnalyserError("FixedLength field specified lengh is " + repr(lengh) + " hex characters and given data lengh is " + repr(len(self._data)-self._readIdx) )
else:
raise ProtoBufAnalyserError("FixedLength field specified lengh is " + repr(lengh) + " and the end of given data has been reached" )
# Read the hexadecimal data 2 characters at a time to actually read a byte of data
for i in range(lengh/2):
# Decode from little endian, first bytes are less significants
StrVal = self._data[self._readIdx:self._readIdx+2] + StrVal
# Get the next byte of varint (2 step because of byte size in hexa coded in string)
self._readIdx = self._readIdx + 2
# Generate the fixed point signed and unsigned and floating point result depending on data width
if lengh == 16 : # 64 bits
signedFixed = unpack('!q', StrVal.decode('hex'))[0]
unsignedFixed = unpack('!Q', StrVal.decode('hex'))[0]
floatingPoint = unpack('!d', StrVal.decode('hex'))[0]
if lengh == 8 : # 32 bits
signedFixed = unpack('!l', StrVal.decode('hex'))[0]
unsignedFixed = unpack('!L', StrVal.decode('hex'))[0]
floatingPoint = unpack('!f', StrVal.decode('hex'))[0]
return signedFixed, unsignedFixed, floatingPoint
def readVarint(self):
'''
Read a Varint from index and return its value as int
'''
retVarint = ""
# Read at index and get binary representation in string
# taking chars 2 by 2 because of hexadecimal representation of bytes
isLast, binaryVal = self.getBinValFromVarintByte(self._data[self._readIdx:self._readIdx+2])
retVarint = binaryVal + retVarint
# Get the next byte of varint (2 step because of byte size in hexa coded in string)
self._readIdx = self._readIdx + 2
# The byte is not the last of the varint
while not isLast:
isLast, binaryVal = self.getBinValFromVarintByte(self._data[self._readIdx:self._readIdx+2])
retVarint = binaryVal + retVarint
# Get the next byte of varint (2 step because of byte size in hexa coded in string)
self._readIdx = self._readIdx + 2
return int(retVarint,2)
def getBinValFromVarintByte(self, varint):
'''
Get partial binary value of varint value from a byte and isLast flag
Return isLast as bool and value as string representing binary
'''
if not len(varint):
raise ProtoBufAnalyserError("Invalid lengh of data to decode this varint value")
isLast = True
binaryVal = bin(int(varint, 16))[2:].zfill(8)
if binaryVal[0] == "1":
isLast = False
# Drop the msb
return isLast, binaryVal[1:]
if __name__ == '__main__':
if len(sys.argv) == 2:
myAnalyser = ProtoBufAnalyser(sys.argv[1])
myAnalyser.analyse()
else:
print('Missing parameter : Protobuf in hexadecimal representation in string') | 0.301259 | 0.200186 |
import torch
from .base import ModuleBase
class YoloLoss(ModuleBase):
def __init__(self, num_classes, prior_box, object_scale=5):
super().__init__()
self.num_classes = num_classes
self.prior_box = prior_box
self.object_scale = 5
self.negative_location_scale = 0.01
def encode_target(self, prediction, targets, priors):
"""
Find one bounding box that matches the ground truth, then use it as positive.
"""
target_tensors = [torch.tensor(target, dtype=torch.float32, device=priors.device) for target in targets]
results = [self.encode_one_batch(prediction[i], target_tensors[i], priors) for i in range(len(prediction))]
return torch.stack(results)
def encode_one_batch(self, prediction, target, priors):
if target.shape[0] == 0:
return torch.zeros_like(prediction)
ious = self.iou(target[:, 1:], priors)
best_prior_overlap, best_prior_index = ious.max(1) # Shape: (num_labels,)
matched_priors = priors[best_prior_index]
matched_priors_center_xy = (matched_priors[:, :2] + matched_priors[:, 2:4]) / 2
matched_priors_wh = matched_priors[:, 2:4] - matched_priors[:, :2]
target_center_xy = (target[:, 1:3] + target[:, 3:5]) / 2
target_wh = target[:, 3:5] - target[:, 1:3]
encoded = torch.zeros_like(prediction)
encoded[best_prior_index, :2] = target_center_xy - matched_priors_center_xy
encoded[best_prior_index, 2:4] = torch.log(target_wh / matched_priors_wh)
encoded[best_prior_index, 4] = 1
class_index = target[:, 0:1].long() # Shape: (num_labels, 1)
encoded[best_prior_index, 5:] = encoded[best_prior_index, 5:].scatter(1, class_index, 1)
return encoded
def forward(self, predictions, targets):
assert len(predictions.shape) == 4 and predictions.shape[1] % (self.num_classes + 5) == 0
priors = self.prior_box(predictions)
ps = predictions.shape
predictions = predictions.view(ps[0], -1, self.num_classes + 5, ps[2], ps[3]).permute(0, 3, 4, 1, 2).view(ps[0], -1, self.num_classes + 5)
# The third dimension is x, y, w, h, confidence, class0, class1, ...
encoded_targets = self.encode_target(predictions, targets, priors)
object_mask = encoded_targets[:, :, 4] > 0
masked_predictions = predictions[object_mask]
masked_targets = encoded_targets[object_mask]
# Skip objectness loss if the priors have 0.6 IOU with targets.
location_loss = torch.nn.functional.mse_loss(masked_predictions[:, :, :4, :, :], masked_targets[:, :, :4, :, :], reduction='sum')
object_loss = torch.nn.functional.mse_loss(torch.sigmoid(masked_predictions[:, :, 4, :, :]), masked_targets[:, :, 4, :, :], reduction='sum')
no_object_loss = torch.nn.functional.mse_loss(torch.sigmoid(predictions[not object_mask][:, :, 4, :, :]), encoded_targets[not object_mask][:, :, 4, :, :], reduction='sum')
# TODO: the official code uses softmax
class_loss = torch.nn.functional.mse_loss(torch.sigmoid(masked_predictions[:, :, 5:, :, :]), masked_targets[:, :, 5:, :, :], reduction='sum')
return location_loss + object_loss * self.object_scale + no_object_loss + class_loss
def iou(self, box0, box1):
""" Compute Intersecion Over Union (IOU) between given two set of boxes.
Args:
box0 (N0, 4)
box1 (N1, 4)
Return: iou (N0, N1)
"""
# Get Intersection
max_xy = torch.min(box0[:, 2:].unsqueeze(1).expand(box0.size(0), box1.size(0), 2), box1[:, 2:].unsqueeze(0).expand(box0.size(0), box1.size(0), 2))
min_xy = torch.max(box0[:, :2].unsqueeze(1).expand(box0.size(0), box1.size(0), 2), box1[:, :2].unsqueeze(0).expand(box0.size(0), box1.size(0), 2))
intersection = torch.clamp((max_xy - min_xy), min=0)
intersection_areas = intersection[:, :, 0] * intersection[:, :, 1]
box0_areas = ((box0[:, 2] - box0[:, 0]) * (box0[:, 3] - box0[:, 1])).unsqueeze(1).expand_as(intersection_areas)
box1_areas = ((box1[:, 2] - box1[:, 0]) * (box1[:, 3] - box1[:, 1])).unsqueeze(0).expand_as(intersection_areas)
union_areas = box0_areas + box1_areas - intersection_areas
return intersection_areas / union_areas | mitorch/models/modules/yolo_loss.py | import torch
from .base import ModuleBase
class YoloLoss(ModuleBase):
def __init__(self, num_classes, prior_box, object_scale=5):
super().__init__()
self.num_classes = num_classes
self.prior_box = prior_box
self.object_scale = 5
self.negative_location_scale = 0.01
def encode_target(self, prediction, targets, priors):
"""
Find one bounding box that matches the ground truth, then use it as positive.
"""
target_tensors = [torch.tensor(target, dtype=torch.float32, device=priors.device) for target in targets]
results = [self.encode_one_batch(prediction[i], target_tensors[i], priors) for i in range(len(prediction))]
return torch.stack(results)
def encode_one_batch(self, prediction, target, priors):
if target.shape[0] == 0:
return torch.zeros_like(prediction)
ious = self.iou(target[:, 1:], priors)
best_prior_overlap, best_prior_index = ious.max(1) # Shape: (num_labels,)
matched_priors = priors[best_prior_index]
matched_priors_center_xy = (matched_priors[:, :2] + matched_priors[:, 2:4]) / 2
matched_priors_wh = matched_priors[:, 2:4] - matched_priors[:, :2]
target_center_xy = (target[:, 1:3] + target[:, 3:5]) / 2
target_wh = target[:, 3:5] - target[:, 1:3]
encoded = torch.zeros_like(prediction)
encoded[best_prior_index, :2] = target_center_xy - matched_priors_center_xy
encoded[best_prior_index, 2:4] = torch.log(target_wh / matched_priors_wh)
encoded[best_prior_index, 4] = 1
class_index = target[:, 0:1].long() # Shape: (num_labels, 1)
encoded[best_prior_index, 5:] = encoded[best_prior_index, 5:].scatter(1, class_index, 1)
return encoded
def forward(self, predictions, targets):
assert len(predictions.shape) == 4 and predictions.shape[1] % (self.num_classes + 5) == 0
priors = self.prior_box(predictions)
ps = predictions.shape
predictions = predictions.view(ps[0], -1, self.num_classes + 5, ps[2], ps[3]).permute(0, 3, 4, 1, 2).view(ps[0], -1, self.num_classes + 5)
# The third dimension is x, y, w, h, confidence, class0, class1, ...
encoded_targets = self.encode_target(predictions, targets, priors)
object_mask = encoded_targets[:, :, 4] > 0
masked_predictions = predictions[object_mask]
masked_targets = encoded_targets[object_mask]
# Skip objectness loss if the priors have 0.6 IOU with targets.
location_loss = torch.nn.functional.mse_loss(masked_predictions[:, :, :4, :, :], masked_targets[:, :, :4, :, :], reduction='sum')
object_loss = torch.nn.functional.mse_loss(torch.sigmoid(masked_predictions[:, :, 4, :, :]), masked_targets[:, :, 4, :, :], reduction='sum')
no_object_loss = torch.nn.functional.mse_loss(torch.sigmoid(predictions[not object_mask][:, :, 4, :, :]), encoded_targets[not object_mask][:, :, 4, :, :], reduction='sum')
# TODO: the official code uses softmax
class_loss = torch.nn.functional.mse_loss(torch.sigmoid(masked_predictions[:, :, 5:, :, :]), masked_targets[:, :, 5:, :, :], reduction='sum')
return location_loss + object_loss * self.object_scale + no_object_loss + class_loss
def iou(self, box0, box1):
""" Compute Intersecion Over Union (IOU) between given two set of boxes.
Args:
box0 (N0, 4)
box1 (N1, 4)
Return: iou (N0, N1)
"""
# Get Intersection
max_xy = torch.min(box0[:, 2:].unsqueeze(1).expand(box0.size(0), box1.size(0), 2), box1[:, 2:].unsqueeze(0).expand(box0.size(0), box1.size(0), 2))
min_xy = torch.max(box0[:, :2].unsqueeze(1).expand(box0.size(0), box1.size(0), 2), box1[:, :2].unsqueeze(0).expand(box0.size(0), box1.size(0), 2))
intersection = torch.clamp((max_xy - min_xy), min=0)
intersection_areas = intersection[:, :, 0] * intersection[:, :, 1]
box0_areas = ((box0[:, 2] - box0[:, 0]) * (box0[:, 3] - box0[:, 1])).unsqueeze(1).expand_as(intersection_areas)
box1_areas = ((box1[:, 2] - box1[:, 0]) * (box1[:, 3] - box1[:, 1])).unsqueeze(0).expand_as(intersection_areas)
union_areas = box0_areas + box1_areas - intersection_areas
return intersection_areas / union_areas | 0.85897 | 0.673146 |
import sys
import json
import datetime
from tkinter import Tk, Canvas, Entry, Button, Frame, Label, StringVar, ALL
from tkinter import ttk
from tkinter import messagebox
import urllib.request
import urllib.parse
class WeatherReporter:
weather_data = None
APIKEY = 'ENTER_YOUR_API_KEY_HERE'
def __init__(self, root):
self.root = root
self.create_top_frame()
self.create_weather_display_frame()
def create_top_frame(self):
frame = Frame(self.root)
frame.pack(side="top")
Label(frame, text='Enter Location').pack(side="left")
self.location = StringVar()
Entry(frame, textvariable=self.location).pack(side="left")
ttk.Button(frame, text='Go', command=self.on_show_weather_button_clicked).pack(
side="left")
def create_weather_display_frame(self):
self.canvas = Canvas(
self.root, height='425', width='340', background='black')
self.canvas.create_rectangle(10, 10, 330, 415, fill='#F6AF06')
self.canvas.pack(side="bottom")
def on_show_weather_button_clicked(self):
if not self.location.get():
return
self.clear_canvas()
self.get_weather_data()
self.format_data()
self.display_data()
def get_weather_data(self):
self.weather_data = self.get_data_from_url()
self.weather_data = self.json_to_dict(self.weather_data)
def clear_canvas(self):
self.canvas.delete(ALL)
self.canvas.create_rectangle(10, 10, 330, 415, fill='#F6AF06')
def format_data(self):
data = self.weather_data
self.name = data['name']
self.latitude = self.str2num(data['lat'], 3)
self.longitude = self.str2num(data['lon'], 3)
self.country = data['country']
self.time_now = self.time_stamp_to_data(data['dt'])
self.description = data['description']
self.icon_name = "weatherimages/{}.png".format(data['icon'].lower())
self.clouds = data['all'] + ' %'
self.sunrise_time = self.time_stamp_to_time(data['sunrise'])
self.sunset_time = self.time_stamp_to_time(data['sunset'])
self.temp_now_in_celcius = self.str2num(
self.kelvin_to_celsius(float(data['temp'])), 2) + u' \u2103'
self.temp_now_in_fahrenheit = self.str2num(
self.kelvin_to_fahrenheit(float(data['temp'])), 2) + u' \u2109'
self.temp_min_in_celcius = self.str2num(
self.kelvin_to_celsius(float(data['temp_min'])), 2) + u' \u2103'
self.temp_max_in_celcius = self.str2num(
self.kelvin_to_celsius(float(data['temp_max'])), 2) + u' \u2103'
def kelvin_to_celsius(self, k):
return k - 273.15
def kelvin_to_fahrenheit(self, k):
return (k * 9 / 5 - 459.67)
def str2num(self, string, precision):
return "%0.*f" % (precision, float(string))
def display_data(self):
if not self.weather_data:
messagebox.showerror(
'Name not found', 'Unable to fetch record - Name not found')
return
data = self.weather_data
opts = {'fill': 'white', 'font': 'Helvetica 12'}
self.canvas.create_text(52, 30, text=self.name, **opts)
self.canvas.create_text(
245, 35, text='Latitude :' + self.latitude, **opts)
self.canvas.create_text(
245, 53, text='Longitude: ' + self.longitude, **opts)
self.canvas.create_text(
55, 50, text='Country : ' + self.country, **opts)
self.canvas.create_text(155, 80, text=self.time_now, **opts)
self.canvas.create_text(85, 105, text='NOW', **opts)
self.img = PhotoImage(file=self.icon_name)
self.canvas.create_image(140, 105, image=self.img)
self.canvas.create_text(240, 105, text=self.description, **opts)
self.canvas.create_text(85, 155, text='Temperature', **opts)
self.canvas.create_text(
87, 175, text=self.temp_min_in_celcius + ' ~ ' + self.temp_max_in_celcius, **opts)
self.canvas.create_text(
225, 140, text=self.temp_now_in_celcius, **opts)
self.canvas.create_text(
225, 180, text=self.temp_now_in_fahrenheit, **opts)
self.canvas.create_text(95, 215, text='Relative Humidity', **opts)
self.canvas.create_text(198, 215, text=data['humidity'] + ' %', **opts)
self.canvas.create_text(77, 235, text='Wind Speed', **opts)
self.canvas.create_text(205, 235, text=data['speed'] + ' m/s ', **opts)
self.canvas.create_text(80, 255, text='Wind Degree', **opts)
self.canvas.create_text(
223, 255, text=data['deg'] + ' degrees', **opts)
self.canvas.create_text(80, 275, text='Pressure(at.)', **opts)
self.canvas.create_text(
225, 275, text=data['pressure'] + ' millibars', **opts)
if '3h' in data:
self.canvas.create_text(83, 293, text='Rain (Last 3h)', **opts)
self.canvas.create_text(
200, 293, text=data['3h'] + ' mm', **opts) # rain
self.canvas.create_text(58, 310, text='Clouds', **opts)
self.canvas.create_text(200, 310, text=self.clouds, **opts) # clouds
self.canvas.create_text(60, 328, text='Sunrise', **opts)
self.canvas.create_text(200, 328, text=self.sunrise_time, **opts)
self.canvas.create_text(59, 343, text='Sunset', **opts)
self.canvas.create_text(200, 343, text=self.sunset_time, **opts)
self.canvas.create_text(159, 378, text='Powered by:', **opts)
self.canvas.create_text(
159, 398, text='www.openweathermap.org', **opts)
def time_stamp_to_time(self, ts):
return (datetime.datetime.fromtimestamp(int(ts)).strftime('%H:%M:%S'))
def time_stamp_to_data(self, ts):
return (datetime.datetime.fromtimestamp(int(ts)).strftime('%Y-%m-%d %H:%M:%S'))
def get_data_from_url(self):
try:
params = urllib.parse.urlencode(
{'q': self.location.get(), 'APPID': self.APIKEY}, encoding="utf-8")
api_url = (
'http://api.openweathermap.org/data/2.5/weather?{}'
.format(params)
)
with urllib.request.urlopen(api_url) as f:
json_data = f.read()
return json_data
except IOError as e:
messagebox.showerror(
'Unable to connect', 'Unable to connect %s' % e)
sys.exit(1)
def json_to_dict(self, json_data):
decoder = json.JSONDecoder()
decoded_json_data = decoder.decode(json_data.decode("utf-8"))
flattened_dict = {}
for key, value in decoded_json_data.items():
if key == 'weather':
for ke, va in value[0].items():
flattened_dict[str(ke)] = str(va).upper()
continue
try:
for k, v in value.items():
flattened_dict[str(k)] = str(v).upper()
except:
flattened_dict[str(key)] = str(value).upper()
return flattened_dict
def main():
root = Tk()
WeatherReporter(root)
root.mainloop()
if __name__ == '__main__':
main() | Chapter 09/9.06_weather_ reporter.py | import sys
import json
import datetime
from tkinter import Tk, Canvas, Entry, Button, Frame, Label, StringVar, ALL
from tkinter import ttk
from tkinter import messagebox
import urllib.request
import urllib.parse
class WeatherReporter:
weather_data = None
APIKEY = 'ENTER_YOUR_API_KEY_HERE'
def __init__(self, root):
self.root = root
self.create_top_frame()
self.create_weather_display_frame()
def create_top_frame(self):
frame = Frame(self.root)
frame.pack(side="top")
Label(frame, text='Enter Location').pack(side="left")
self.location = StringVar()
Entry(frame, textvariable=self.location).pack(side="left")
ttk.Button(frame, text='Go', command=self.on_show_weather_button_clicked).pack(
side="left")
def create_weather_display_frame(self):
self.canvas = Canvas(
self.root, height='425', width='340', background='black')
self.canvas.create_rectangle(10, 10, 330, 415, fill='#F6AF06')
self.canvas.pack(side="bottom")
def on_show_weather_button_clicked(self):
if not self.location.get():
return
self.clear_canvas()
self.get_weather_data()
self.format_data()
self.display_data()
def get_weather_data(self):
self.weather_data = self.get_data_from_url()
self.weather_data = self.json_to_dict(self.weather_data)
def clear_canvas(self):
self.canvas.delete(ALL)
self.canvas.create_rectangle(10, 10, 330, 415, fill='#F6AF06')
def format_data(self):
data = self.weather_data
self.name = data['name']
self.latitude = self.str2num(data['lat'], 3)
self.longitude = self.str2num(data['lon'], 3)
self.country = data['country']
self.time_now = self.time_stamp_to_data(data['dt'])
self.description = data['description']
self.icon_name = "weatherimages/{}.png".format(data['icon'].lower())
self.clouds = data['all'] + ' %'
self.sunrise_time = self.time_stamp_to_time(data['sunrise'])
self.sunset_time = self.time_stamp_to_time(data['sunset'])
self.temp_now_in_celcius = self.str2num(
self.kelvin_to_celsius(float(data['temp'])), 2) + u' \u2103'
self.temp_now_in_fahrenheit = self.str2num(
self.kelvin_to_fahrenheit(float(data['temp'])), 2) + u' \u2109'
self.temp_min_in_celcius = self.str2num(
self.kelvin_to_celsius(float(data['temp_min'])), 2) + u' \u2103'
self.temp_max_in_celcius = self.str2num(
self.kelvin_to_celsius(float(data['temp_max'])), 2) + u' \u2103'
def kelvin_to_celsius(self, k):
return k - 273.15
def kelvin_to_fahrenheit(self, k):
return (k * 9 / 5 - 459.67)
def str2num(self, string, precision):
return "%0.*f" % (precision, float(string))
def display_data(self):
if not self.weather_data:
messagebox.showerror(
'Name not found', 'Unable to fetch record - Name not found')
return
data = self.weather_data
opts = {'fill': 'white', 'font': 'Helvetica 12'}
self.canvas.create_text(52, 30, text=self.name, **opts)
self.canvas.create_text(
245, 35, text='Latitude :' + self.latitude, **opts)
self.canvas.create_text(
245, 53, text='Longitude: ' + self.longitude, **opts)
self.canvas.create_text(
55, 50, text='Country : ' + self.country, **opts)
self.canvas.create_text(155, 80, text=self.time_now, **opts)
self.canvas.create_text(85, 105, text='NOW', **opts)
self.img = PhotoImage(file=self.icon_name)
self.canvas.create_image(140, 105, image=self.img)
self.canvas.create_text(240, 105, text=self.description, **opts)
self.canvas.create_text(85, 155, text='Temperature', **opts)
self.canvas.create_text(
87, 175, text=self.temp_min_in_celcius + ' ~ ' + self.temp_max_in_celcius, **opts)
self.canvas.create_text(
225, 140, text=self.temp_now_in_celcius, **opts)
self.canvas.create_text(
225, 180, text=self.temp_now_in_fahrenheit, **opts)
self.canvas.create_text(95, 215, text='Relative Humidity', **opts)
self.canvas.create_text(198, 215, text=data['humidity'] + ' %', **opts)
self.canvas.create_text(77, 235, text='Wind Speed', **opts)
self.canvas.create_text(205, 235, text=data['speed'] + ' m/s ', **opts)
self.canvas.create_text(80, 255, text='Wind Degree', **opts)
self.canvas.create_text(
223, 255, text=data['deg'] + ' degrees', **opts)
self.canvas.create_text(80, 275, text='Pressure(at.)', **opts)
self.canvas.create_text(
225, 275, text=data['pressure'] + ' millibars', **opts)
if '3h' in data:
self.canvas.create_text(83, 293, text='Rain (Last 3h)', **opts)
self.canvas.create_text(
200, 293, text=data['3h'] + ' mm', **opts) # rain
self.canvas.create_text(58, 310, text='Clouds', **opts)
self.canvas.create_text(200, 310, text=self.clouds, **opts) # clouds
self.canvas.create_text(60, 328, text='Sunrise', **opts)
self.canvas.create_text(200, 328, text=self.sunrise_time, **opts)
self.canvas.create_text(59, 343, text='Sunset', **opts)
self.canvas.create_text(200, 343, text=self.sunset_time, **opts)
self.canvas.create_text(159, 378, text='Powered by:', **opts)
self.canvas.create_text(
159, 398, text='www.openweathermap.org', **opts)
def time_stamp_to_time(self, ts):
return (datetime.datetime.fromtimestamp(int(ts)).strftime('%H:%M:%S'))
def time_stamp_to_data(self, ts):
return (datetime.datetime.fromtimestamp(int(ts)).strftime('%Y-%m-%d %H:%M:%S'))
def get_data_from_url(self):
try:
params = urllib.parse.urlencode(
{'q': self.location.get(), 'APPID': self.APIKEY}, encoding="utf-8")
api_url = (
'http://api.openweathermap.org/data/2.5/weather?{}'
.format(params)
)
with urllib.request.urlopen(api_url) as f:
json_data = f.read()
return json_data
except IOError as e:
messagebox.showerror(
'Unable to connect', 'Unable to connect %s' % e)
sys.exit(1)
def json_to_dict(self, json_data):
decoder = json.JSONDecoder()
decoded_json_data = decoder.decode(json_data.decode("utf-8"))
flattened_dict = {}
for key, value in decoded_json_data.items():
if key == 'weather':
for ke, va in value[0].items():
flattened_dict[str(ke)] = str(va).upper()
continue
try:
for k, v in value.items():
flattened_dict[str(k)] = str(v).upper()
except:
flattened_dict[str(key)] = str(value).upper()
return flattened_dict
def main():
root = Tk()
WeatherReporter(root)
root.mainloop()
if __name__ == '__main__':
main() | 0.32178 | 0.146759 |
from copy import deepcopy
from functools import partial
from typing import Any, Callable, Dict, Optional, Tuple
from .types import Index, KeyAttributeType, TableQuery
from .utils.index import ( # noqa # included only for cleaner imports
find_index,
hash_key_name,
range_key_name,
require_index,
)
def single_partition(index: Index, partition_value: KeyAttributeType) -> TableQuery:
"""Sets up a simple query/scan dict for a single partition which can
be provided to a boto3 TableResource.
This is the core of any DynamoDB query; you cannot query anything but a single partition.
Only indices (whether primary or secondary) with a composite key may be queried.
"""
query: Dict[str, Any] = dict()
try:
query["IndexName"] = index["IndexName"] # type: ignore
except TypeError:
pass # a primary index
keystr = "#partition"
valstr = ":partition"
query["KeyConditionExpression"] = f"{keystr} = {valstr} "
query["ExpressionAttributeNames"] = {keystr: hash_key_name(index)}
query["ExpressionAttributeValues"] = {valstr: partition_value}
return query
QueryTransformer = Callable[[TableQuery], TableQuery]
def order(ascending: bool) -> QueryTransformer:
"""Creates a query builder"""
def tx_query(query: TableQuery) -> TableQuery:
"""Creates new query with ScanIndexForward set."""
return dict(query, ScanIndexForward=ascending)
return tx_query
ascending = order(ascending=True)
descending = order(ascending=False)
def limit(limit: int) -> QueryTransformer:
def tx_query(query: TableQuery) -> TableQuery:
return dict(query, Limit=limit) if limit else query
return tx_query
def page(last_evaluated_key: dict) -> QueryTransformer:
"""Resume a query on the page represented by the LastEvaluatedKey you
previously received.
Note that there are pagination utilities in `paginate` if you don't
actually need to maintain this state (e.g., across client calls in a
RESTful service) and simply want to iterate through all the results.
"""
def tx_query(query: TableQuery) -> TableQuery:
return dict(query, ExclusiveStartKey=last_evaluated_key) if last_evaluated_key else query
return tx_query
From = page
"""Deprecated name - prefer 'page'
The name From overlaps with SQL parlance about selecting a table,
which is absolutely not what we're doing here. This was intended
as shorthand for 'starting from', but even that overlaps with the
concepts of 'greater than or equal' or 'less than or equal' for a
range query.
'page' makes it clearer, hopefully, that what is in view is
specifically a pagination of a previous query.
"""
def within_range(
index: Index, *, gte: Optional[KeyAttributeType] = None, lte: Optional[KeyAttributeType] = None,
) -> QueryTransformer:
by = range_key_name(index)
expr_attr_names = dict()
expr_attr_values = dict()
key_condition_expr = ""
if gte and lte:
expr_attr_names["#sortBy"] = by
expr_attr_values[":GTE"] = gte
expr_attr_values[":LTE"] = lte
key_condition_expr += f" AND #sortBy BETWEEN :GTE and :LTE"
elif gte:
expr_attr_names["#sortBy"] = by
expr_attr_values[":GTE"] = gte
key_condition_expr += f" AND #sortBy >= :GTE "
elif lte:
expr_attr_names["#sortBy"] = by
expr_attr_values[":LTE"] = lte
key_condition_expr += f" AND #sortBy <= :LTE "
def tx_query(query: TableQuery) -> TableQuery:
query = deepcopy(query)
query["ExpressionAttributeNames"] = dict(
query.get("ExpressionAttributeNames", dict()), **expr_attr_names
)
query["ExpressionAttributeValues"] = dict(
query.get("ExpressionAttributeValues", dict()), **expr_attr_values
)
if key_condition_expr:
query["KeyConditionExpression"] = (
query.get("KeyConditionExpression", "") + key_condition_expr
)
return query
return tx_query
def pipe(*funcs):
"""Left to right function composition"""
def piped(arg):
r = arg
for f in funcs:
r = f(r)
return r
return piped
def in_index(
index: Index,
) -> Tuple[Callable[[KeyAttributeType], TableQuery], Callable[..., QueryTransformer]]:
"""Shorthand for calling single_partition and within_range separately"""
return partial(single_partition, index), partial(within_range, index) | xoto3/dynamodb/query.py | from copy import deepcopy
from functools import partial
from typing import Any, Callable, Dict, Optional, Tuple
from .types import Index, KeyAttributeType, TableQuery
from .utils.index import ( # noqa # included only for cleaner imports
find_index,
hash_key_name,
range_key_name,
require_index,
)
def single_partition(index: Index, partition_value: KeyAttributeType) -> TableQuery:
"""Sets up a simple query/scan dict for a single partition which can
be provided to a boto3 TableResource.
This is the core of any DynamoDB query; you cannot query anything but a single partition.
Only indices (whether primary or secondary) with a composite key may be queried.
"""
query: Dict[str, Any] = dict()
try:
query["IndexName"] = index["IndexName"] # type: ignore
except TypeError:
pass # a primary index
keystr = "#partition"
valstr = ":partition"
query["KeyConditionExpression"] = f"{keystr} = {valstr} "
query["ExpressionAttributeNames"] = {keystr: hash_key_name(index)}
query["ExpressionAttributeValues"] = {valstr: partition_value}
return query
QueryTransformer = Callable[[TableQuery], TableQuery]
def order(ascending: bool) -> QueryTransformer:
"""Creates a query builder"""
def tx_query(query: TableQuery) -> TableQuery:
"""Creates new query with ScanIndexForward set."""
return dict(query, ScanIndexForward=ascending)
return tx_query
ascending = order(ascending=True)
descending = order(ascending=False)
def limit(limit: int) -> QueryTransformer:
def tx_query(query: TableQuery) -> TableQuery:
return dict(query, Limit=limit) if limit else query
return tx_query
def page(last_evaluated_key: dict) -> QueryTransformer:
"""Resume a query on the page represented by the LastEvaluatedKey you
previously received.
Note that there are pagination utilities in `paginate` if you don't
actually need to maintain this state (e.g., across client calls in a
RESTful service) and simply want to iterate through all the results.
"""
def tx_query(query: TableQuery) -> TableQuery:
return dict(query, ExclusiveStartKey=last_evaluated_key) if last_evaluated_key else query
return tx_query
From = page
"""Deprecated name - prefer 'page'
The name From overlaps with SQL parlance about selecting a table,
which is absolutely not what we're doing here. This was intended
as shorthand for 'starting from', but even that overlaps with the
concepts of 'greater than or equal' or 'less than or equal' for a
range query.
'page' makes it clearer, hopefully, that what is in view is
specifically a pagination of a previous query.
"""
def within_range(
index: Index, *, gte: Optional[KeyAttributeType] = None, lte: Optional[KeyAttributeType] = None,
) -> QueryTransformer:
by = range_key_name(index)
expr_attr_names = dict()
expr_attr_values = dict()
key_condition_expr = ""
if gte and lte:
expr_attr_names["#sortBy"] = by
expr_attr_values[":GTE"] = gte
expr_attr_values[":LTE"] = lte
key_condition_expr += f" AND #sortBy BETWEEN :GTE and :LTE"
elif gte:
expr_attr_names["#sortBy"] = by
expr_attr_values[":GTE"] = gte
key_condition_expr += f" AND #sortBy >= :GTE "
elif lte:
expr_attr_names["#sortBy"] = by
expr_attr_values[":LTE"] = lte
key_condition_expr += f" AND #sortBy <= :LTE "
def tx_query(query: TableQuery) -> TableQuery:
query = deepcopy(query)
query["ExpressionAttributeNames"] = dict(
query.get("ExpressionAttributeNames", dict()), **expr_attr_names
)
query["ExpressionAttributeValues"] = dict(
query.get("ExpressionAttributeValues", dict()), **expr_attr_values
)
if key_condition_expr:
query["KeyConditionExpression"] = (
query.get("KeyConditionExpression", "") + key_condition_expr
)
return query
return tx_query
def pipe(*funcs):
"""Left to right function composition"""
def piped(arg):
r = arg
for f in funcs:
r = f(r)
return r
return piped
def in_index(
index: Index,
) -> Tuple[Callable[[KeyAttributeType], TableQuery], Callable[..., QueryTransformer]]:
"""Shorthand for calling single_partition and within_range separately"""
return partial(single_partition, index), partial(within_range, index) | 0.883889 | 0.409044 |
from ._base import BaseTestCase
from datetime import datetime, timedelta, timezone
import dateutil.parser
from django.conf import settings
from django.contrib.auth.models import User
from django.core.management import call_command
from django.test import override_settings
from ban.models import Ban, Warn
class TestBan(BaseTestCase):
def setUp(self):
super(TestBan, self).setUp()
self.harriet = User.objects.get(pk=1)
self.florence = User.objects.get(pk=2)
def assert_can_ban_user_for_period(self, period_name, period_length):
# Harriet logs in as an admin.
self.login_as_admin()
# She hits the admin panel for users.
self.get('/admin/auth/user')
# She bans Florence for requested period of time.
self.select_admin_object(self.florence.pk)
self.admin_action('Ban selected users for {}'.format(period_name))
# She goes to the admin panel for bans.
self.get('/admin/ban/ban')
# She sees a ban for Florence ending after specified period.
row = self.browser.find_element_by_class_name('row1').text
found_end_text = row.replace(self.florence.username, '').replace(self.harriet.username, '')
found_end_ts = dateutil.parser.parse(found_end_text).replace(tzinfo=timezone.utc).timestamp()
expected_end_ts = (datetime.now(timezone.utc) + timedelta(days=period_length)).timestamp()
self.assertTrue(row.startswith(self.florence.username))
self.assertTrue(row.endswith(self.harriet.username))
self.assertAlmostEqual(expected_end_ts, found_end_ts, delta=60)
def test_can_ban_user_permanently(self):
# Harriet logs in as an admin.
self.login_as_admin()
# She hits the admin panel for users.
self.get('/admin/auth/user')
# She bans Florence permanently.
self.select_admin_object(self.florence.pk)
self.admin_action('Ban selected users permanently')
# She goes to the admin panel for bans.
self.get('/admin/ban/ban')
# She sees a ban for Florence with no end date.
self.assertEqual(
self.browser.find_element_by_class_name('row1').text,
'test_user1 (None) admin',
)
def test_can_ban_user_for_month(self):
self.assert_can_ban_user_for_period('month', 30)
def test_can_ban_user_for_week(self):
self.assert_can_ban_user_for_period('week', 7)
def test_can_ban_user_for_day(self):
self.assert_can_ban_user_for_period('day', 1)
def test_can_warn_user(self):
# Harriet logs in as an admin.
self.login_as_admin()
# She hits the admin panel for users.
self.get('/admin/auth/user')
# She warns Florence.
self.select_admin_object(self.florence.pk)
self.admin_action('Warn selected users')
# She goes to the admin panel for warns.
self.get('/admin/ban/warn')
# She sees a warn for Florence.
self.assertEqual(
self.browser.find_element_by_class_name('row1').text,
'test_user1 admin',
)
def test_banned_user_cannot_log_in(self):
# Florence was banned some time ago.
Ban.objects.create(creator=self.harriet, receiver=self.florence)
# She tries to log in.
self.login_as_test_user()
# She is redirected to the login page.
self.assertEqual(self.browser.current_url, '{}{}'.format(self.live_server_url, settings.LOGIN_URL))
# She sees a message that she was banned.
self.assertIn('This account has been banned.', self.get_text())
def test_banned_user_can_log_in_after_ban_period(self):
# Florence was banned some time ago, but is active now.
end_date = datetime.now(timezone.utc) - timedelta(days=1)
Ban.objects.create(creator=self.harriet, receiver=self.florence, end_date=end_date)
# She logs in.
self.login_as_test_user()
# She is redirected to the login redirect url.
self.assertEqual(self.browser.current_url, '{}{}'.format(self.live_server_url, settings.LOGIN_REDIRECT_URL))
# She doesn't see a message that she was banned.
self.assertNotIn('This account has been banned.', self.get_text())
@override_settings(WARNS_THRESHOLD=3)
def test_user_gets_banned_after_too_many_warnings(self):
# Florence has been warned two times already.
Warn.objects.create(receiver=self.florence, creator=self.harriet)
Warn.objects.create(receiver=self.florence, creator=self.harriet)
# Harriet logs in as an admin.
self.login_as_admin()
# She hits the admin panel for users.
self.get('/admin/auth/user')
# She warns Florence.
self.select_admin_object(self.florence.pk)
self.admin_action('Warn selected users')
# She goes to the admin panel for warns.
self.get('/admin/ban/warn')
# She sees no warns there.
self.assertIn('0 warns', self.get_text())
# She goes to the admin panel for bans.
self.get('/admin/ban/ban')
# She sees a ban for Florence with no end date.
self.assertEqual(
self.browser.find_element_by_class_name('row1').text,
'test_user1 (None) (None)',
)
def test_multiple_bans_merge_into_one(self):
# Florence was banned some time ago.
end_date = datetime.now(timezone.utc) + timedelta(days=1)
Ban.objects.create(creator=self.harriet, receiver=self.florence, end_date=end_date)
# Harriet logs in as an admin.
self.login_as_admin()
# She hits the admin panel for users.
self.get('/admin/auth/user')
# She bans Florence permanently.
self.select_admin_object(self.florence.pk)
self.admin_action('Ban selected users permanently')
# She goes to the admin panel for bans.
self.get('/admin/ban/ban')
# She sees a permanent ban for Florence with no end date.
self.assertEqual(
self.browser.find_element_by_class_name('row1').text,
'test_user1 (None) admin',
)
# She does not see any other bans for Florence as they were merged into one.
self.assertIn('1 ban', self.get_text())
def test_cannot_warn_banned_user(self):
# Florence was banned some time ago.
Ban.objects.create(creator=self.harriet, receiver=self.florence)
# Harriet logs in as an admin.
self.login_as_admin()
# She hits the admin panel for users.
self.get('/admin/auth/user')
# She warns Florence.
self.select_admin_object(self.florence.pk)
self.admin_action('Warn selected users')
# She goes to the admin panel for warns.
self.get('/admin/ban/warn')
# She sees no warns there.
self.assertIn('0 warns', self.get_text())
def test_can_clean_inactive_bans(self):
# There are some inactive bans.
end_date = datetime.now(timezone.utc) - timedelta(days=1)
Ban.objects.create(creator=self.harriet, receiver=User.objects.get(pk=3), end_date=end_date)
Ban.objects.create(creator=self.harriet, receiver=User.objects.get(pk=4), end_date=end_date)
# And also one active one.
Ban.objects.create(creator=self.harriet, receiver=self.florence)
# Harriet calls management command to clean up inactive bans.
call_command('clean_inactive_bans')
# She logs in as an admin.
self.login_as_admin()
# She goes to the admin panel for bans.
self.get('/admin/ban/ban')
# She sees only one ban there.
self.assertIn('1 ban', self.get_text()) | fts/Ban.py | from ._base import BaseTestCase
from datetime import datetime, timedelta, timezone
import dateutil.parser
from django.conf import settings
from django.contrib.auth.models import User
from django.core.management import call_command
from django.test import override_settings
from ban.models import Ban, Warn
class TestBan(BaseTestCase):
def setUp(self):
super(TestBan, self).setUp()
self.harriet = User.objects.get(pk=1)
self.florence = User.objects.get(pk=2)
def assert_can_ban_user_for_period(self, period_name, period_length):
# Harriet logs in as an admin.
self.login_as_admin()
# She hits the admin panel for users.
self.get('/admin/auth/user')
# She bans Florence for requested period of time.
self.select_admin_object(self.florence.pk)
self.admin_action('Ban selected users for {}'.format(period_name))
# She goes to the admin panel for bans.
self.get('/admin/ban/ban')
# She sees a ban for Florence ending after specified period.
row = self.browser.find_element_by_class_name('row1').text
found_end_text = row.replace(self.florence.username, '').replace(self.harriet.username, '')
found_end_ts = dateutil.parser.parse(found_end_text).replace(tzinfo=timezone.utc).timestamp()
expected_end_ts = (datetime.now(timezone.utc) + timedelta(days=period_length)).timestamp()
self.assertTrue(row.startswith(self.florence.username))
self.assertTrue(row.endswith(self.harriet.username))
self.assertAlmostEqual(expected_end_ts, found_end_ts, delta=60)
def test_can_ban_user_permanently(self):
# Harriet logs in as an admin.
self.login_as_admin()
# She hits the admin panel for users.
self.get('/admin/auth/user')
# She bans Florence permanently.
self.select_admin_object(self.florence.pk)
self.admin_action('Ban selected users permanently')
# She goes to the admin panel for bans.
self.get('/admin/ban/ban')
# She sees a ban for Florence with no end date.
self.assertEqual(
self.browser.find_element_by_class_name('row1').text,
'test_user1 (None) admin',
)
def test_can_ban_user_for_month(self):
self.assert_can_ban_user_for_period('month', 30)
def test_can_ban_user_for_week(self):
self.assert_can_ban_user_for_period('week', 7)
def test_can_ban_user_for_day(self):
self.assert_can_ban_user_for_period('day', 1)
def test_can_warn_user(self):
# Harriet logs in as an admin.
self.login_as_admin()
# She hits the admin panel for users.
self.get('/admin/auth/user')
# She warns Florence.
self.select_admin_object(self.florence.pk)
self.admin_action('Warn selected users')
# She goes to the admin panel for warns.
self.get('/admin/ban/warn')
# She sees a warn for Florence.
self.assertEqual(
self.browser.find_element_by_class_name('row1').text,
'test_user1 admin',
)
def test_banned_user_cannot_log_in(self):
# Florence was banned some time ago.
Ban.objects.create(creator=self.harriet, receiver=self.florence)
# She tries to log in.
self.login_as_test_user()
# She is redirected to the login page.
self.assertEqual(self.browser.current_url, '{}{}'.format(self.live_server_url, settings.LOGIN_URL))
# She sees a message that she was banned.
self.assertIn('This account has been banned.', self.get_text())
def test_banned_user_can_log_in_after_ban_period(self):
# Florence was banned some time ago, but is active now.
end_date = datetime.now(timezone.utc) - timedelta(days=1)
Ban.objects.create(creator=self.harriet, receiver=self.florence, end_date=end_date)
# She logs in.
self.login_as_test_user()
# She is redirected to the login redirect url.
self.assertEqual(self.browser.current_url, '{}{}'.format(self.live_server_url, settings.LOGIN_REDIRECT_URL))
# She doesn't see a message that she was banned.
self.assertNotIn('This account has been banned.', self.get_text())
@override_settings(WARNS_THRESHOLD=3)
def test_user_gets_banned_after_too_many_warnings(self):
# Florence has been warned two times already.
Warn.objects.create(receiver=self.florence, creator=self.harriet)
Warn.objects.create(receiver=self.florence, creator=self.harriet)
# Harriet logs in as an admin.
self.login_as_admin()
# She hits the admin panel for users.
self.get('/admin/auth/user')
# She warns Florence.
self.select_admin_object(self.florence.pk)
self.admin_action('Warn selected users')
# She goes to the admin panel for warns.
self.get('/admin/ban/warn')
# She sees no warns there.
self.assertIn('0 warns', self.get_text())
# She goes to the admin panel for bans.
self.get('/admin/ban/ban')
# She sees a ban for Florence with no end date.
self.assertEqual(
self.browser.find_element_by_class_name('row1').text,
'test_user1 (None) (None)',
)
def test_multiple_bans_merge_into_one(self):
# Florence was banned some time ago.
end_date = datetime.now(timezone.utc) + timedelta(days=1)
Ban.objects.create(creator=self.harriet, receiver=self.florence, end_date=end_date)
# Harriet logs in as an admin.
self.login_as_admin()
# She hits the admin panel for users.
self.get('/admin/auth/user')
# She bans Florence permanently.
self.select_admin_object(self.florence.pk)
self.admin_action('Ban selected users permanently')
# She goes to the admin panel for bans.
self.get('/admin/ban/ban')
# She sees a permanent ban for Florence with no end date.
self.assertEqual(
self.browser.find_element_by_class_name('row1').text,
'test_user1 (None) admin',
)
# She does not see any other bans for Florence as they were merged into one.
self.assertIn('1 ban', self.get_text())
def test_cannot_warn_banned_user(self):
# Florence was banned some time ago.
Ban.objects.create(creator=self.harriet, receiver=self.florence)
# Harriet logs in as an admin.
self.login_as_admin()
# She hits the admin panel for users.
self.get('/admin/auth/user')
# She warns Florence.
self.select_admin_object(self.florence.pk)
self.admin_action('Warn selected users')
# She goes to the admin panel for warns.
self.get('/admin/ban/warn')
# She sees no warns there.
self.assertIn('0 warns', self.get_text())
def test_can_clean_inactive_bans(self):
# There are some inactive bans.
end_date = datetime.now(timezone.utc) - timedelta(days=1)
Ban.objects.create(creator=self.harriet, receiver=User.objects.get(pk=3), end_date=end_date)
Ban.objects.create(creator=self.harriet, receiver=User.objects.get(pk=4), end_date=end_date)
# And also one active one.
Ban.objects.create(creator=self.harriet, receiver=self.florence)
# Harriet calls management command to clean up inactive bans.
call_command('clean_inactive_bans')
# She logs in as an admin.
self.login_as_admin()
# She goes to the admin panel for bans.
self.get('/admin/ban/ban')
# She sees only one ban there.
self.assertIn('1 ban', self.get_text()) | 0.496582 | 0.24289 |
from typing import Optional
from fastapi import APIRouter, Depends, Header
from fastapi.security import OAuth2PasswordBearer
from sql import schemas, crud, database, models
from sql.database import db_state_default
from core import exceptions
database.db.connect()
database.db.create_tables(
[
models.User,
models.Post,
models.Catalog,
models.Tag,
models.PostCatalog,
models.PostTag,
]
)
database.db.close()
oauth2_scheme = OAuth2PasswordBearer(tokenUrl="token")
router = APIRouter()
sleep_time = 10
async def reset_db_state():
database.db._state._state.set(db_state_default.copy())
database.db._state.reset()
def get_db(db_state=Depends(reset_db_state)):
try:
database.db.connect()
yield
finally:
if not database.db.is_closed():
database.db.close()
@router.post("/tag", response_model=schemas.Tag, dependencies=[Depends(get_db)])
async def create_catalog(tag: schemas.TagCreate):
return crud.create_tag(tag=tag)
@router.get("/tag/{tag_id}", response_model=schemas.Tag, dependencies=[Depends(get_db)])
async def get_tag(tag_id: int):
return crud.get_tag(tag_id=tag_id)
@router.get("/tags", response_model=schemas.Tag, dependencies=[Depends(get_db)])
async def get_tags(
authentication=Header(None), skip: Optional[int] = 0, limit: Optional[int] = 100
):
if not authentication:
raise exceptions.UnauthorizedHTTPException()
return crud.get_tags_of_user(
user_id=authentication.get("user_id"), skip=skip, limit=limit
)
@router.post("/catalog", response_model=schemas.Catalog, dependencies=[Depends(get_db)])
async def create_catalog(catalog: schemas.CatalogCreate):
return crud.create_catalog(catalog=catalog)
@router.get(
"/catalog/{catalog_id}",
response_model=schemas.Catalog,
dependencies=[Depends(get_db)],
)
async def get_catalog(catalog_id: int):
return crud.get_catalog(catalog_id=catalog_id)
@router.get("/catalogs", response_model=schemas.Catalog, dependencies=[Depends(get_db)])
async def get_catalogs(
authentication=Header(None), skip: Optional[int] = 0, limit: Optional[int] = 100
):
if not authentication:
raise exceptions.UnauthorizedHTTPException()
return crud.get_catalogs_of_user(
user_id=authentication.get("user_id"), skip=skip, limit=limit
)
@router.get("/posts", response_model=schemas.Post, dependencies=[Depends(get_db)])
async def get_posts(
authentication=Header(None), skip: Optional[int] = 0, limit: Optional[int] = 100
):
if not authentication:
raise exceptions.UnauthorizedHTTPException()
return crud.get_posts_of_user(
user_id=authentication.get("user_id"), skip=skip, limit=limit
)
@router.post("/post", response_model=schemas.Post, dependencies=[Depends(get_db)])
async def create_post(post: schemas.PostCreate):
return crud.create_post(post=post)
@router.put(
"/post/{post_id}", response_model=schemas.Post, dependencies=[Depends(get_db)]
)
async def update_post(post: schemas.PostUpdate):
return crud.update_post(post=post)
@router.delete("/post", dependencies=[Depends(get_db)])
async def delete_post(ids: list) -> bool:
return crud.delete_post(ids) | blog/api/post.py | from typing import Optional
from fastapi import APIRouter, Depends, Header
from fastapi.security import OAuth2PasswordBearer
from sql import schemas, crud, database, models
from sql.database import db_state_default
from core import exceptions
database.db.connect()
database.db.create_tables(
[
models.User,
models.Post,
models.Catalog,
models.Tag,
models.PostCatalog,
models.PostTag,
]
)
database.db.close()
oauth2_scheme = OAuth2PasswordBearer(tokenUrl="token")
router = APIRouter()
sleep_time = 10
async def reset_db_state():
database.db._state._state.set(db_state_default.copy())
database.db._state.reset()
def get_db(db_state=Depends(reset_db_state)):
try:
database.db.connect()
yield
finally:
if not database.db.is_closed():
database.db.close()
@router.post("/tag", response_model=schemas.Tag, dependencies=[Depends(get_db)])
async def create_catalog(tag: schemas.TagCreate):
return crud.create_tag(tag=tag)
@router.get("/tag/{tag_id}", response_model=schemas.Tag, dependencies=[Depends(get_db)])
async def get_tag(tag_id: int):
return crud.get_tag(tag_id=tag_id)
@router.get("/tags", response_model=schemas.Tag, dependencies=[Depends(get_db)])
async def get_tags(
authentication=Header(None), skip: Optional[int] = 0, limit: Optional[int] = 100
):
if not authentication:
raise exceptions.UnauthorizedHTTPException()
return crud.get_tags_of_user(
user_id=authentication.get("user_id"), skip=skip, limit=limit
)
@router.post("/catalog", response_model=schemas.Catalog, dependencies=[Depends(get_db)])
async def create_catalog(catalog: schemas.CatalogCreate):
return crud.create_catalog(catalog=catalog)
@router.get(
"/catalog/{catalog_id}",
response_model=schemas.Catalog,
dependencies=[Depends(get_db)],
)
async def get_catalog(catalog_id: int):
return crud.get_catalog(catalog_id=catalog_id)
@router.get("/catalogs", response_model=schemas.Catalog, dependencies=[Depends(get_db)])
async def get_catalogs(
authentication=Header(None), skip: Optional[int] = 0, limit: Optional[int] = 100
):
if not authentication:
raise exceptions.UnauthorizedHTTPException()
return crud.get_catalogs_of_user(
user_id=authentication.get("user_id"), skip=skip, limit=limit
)
@router.get("/posts", response_model=schemas.Post, dependencies=[Depends(get_db)])
async def get_posts(
authentication=Header(None), skip: Optional[int] = 0, limit: Optional[int] = 100
):
if not authentication:
raise exceptions.UnauthorizedHTTPException()
return crud.get_posts_of_user(
user_id=authentication.get("user_id"), skip=skip, limit=limit
)
@router.post("/post", response_model=schemas.Post, dependencies=[Depends(get_db)])
async def create_post(post: schemas.PostCreate):
return crud.create_post(post=post)
@router.put(
"/post/{post_id}", response_model=schemas.Post, dependencies=[Depends(get_db)]
)
async def update_post(post: schemas.PostUpdate):
return crud.update_post(post=post)
@router.delete("/post", dependencies=[Depends(get_db)])
async def delete_post(ids: list) -> bool:
return crud.delete_post(ids) | 0.712432 | 0.088465 |
from typing import Tuple
from django.db import models
from django.utils import timezone
from jsonfield import JSONField
from backend.utils.models import BaseTSModel
from .managers import RepositoryAuthManager, RepositoryManager
class Repository(BaseTSModel):
url = models.URLField('URL')
name = models.CharField('Name', max_length=32)
description = models.CharField(max_length=512)
project_id = models.CharField('ProjectID', max_length=32)
provider = models.CharField('Provider', max_length=32)
is_provisioned = models.BooleanField('Provisioned?', default=False)
refreshed_at = models.DateTimeField(null=True)
# git repo fields
commit = models.CharField(max_length=64, null=True)
# TODO: check, if git, use which branch?
branch = models.CharField(max_length=30, null=True)
# chart museum fields
storage_info = JSONField(default={})
objects = RepositoryManager()
class Meta:
unique_together = ("project_id", "name")
db_table = 'helm_repository'
def __str__(self):
return "[{id}]{project}/{name}".format(id=self.id, project=self.project_id, name=self.name)
def refreshed(self, commit):
self.refreshed_at = timezone.now()
self.commit = commit
self.save()
@property
def plain_auths(self):
auths = list(self.auths.values("credentials", "type", "role"))
return [
{
"type": auth["type"],
"role": auth["role"],
"credentials": auth["credentials"],
}
for auth in auths
]
@property
def username_password(self) -> Tuple[str, str]:
try:
credentials = list(self.auths.values("credentials"))
credential = credentials[0]["credentials"]
return (credential["username"], credential["password"])
except Exception:
return ("", "")
class RepositoryAuth(models.Model):
AUTH_CHOICE = (("BASIC", "BasicAuth"),)
type = models.CharField('Type', choices=AUTH_CHOICE, max_length=16)
# ex: {"password":"<PASSWORD>","username":"admin-T49e"}
credentials = JSONField('Credentials', default={})
repo = models.ForeignKey(Repository, on_delete=models.CASCADE, related_name='auths')
# TODO: use rbac module instead
role = models.CharField('Role', max_length=16)
objects = RepositoryAuthManager()
@property
def credentials_decoded(self):
return self.credentials
class Meta:
db_table = 'helm_repo_auth' | bcs-ui/backend/helm/helm/models/repo.py | from typing import Tuple
from django.db import models
from django.utils import timezone
from jsonfield import JSONField
from backend.utils.models import BaseTSModel
from .managers import RepositoryAuthManager, RepositoryManager
class Repository(BaseTSModel):
url = models.URLField('URL')
name = models.CharField('Name', max_length=32)
description = models.CharField(max_length=512)
project_id = models.CharField('ProjectID', max_length=32)
provider = models.CharField('Provider', max_length=32)
is_provisioned = models.BooleanField('Provisioned?', default=False)
refreshed_at = models.DateTimeField(null=True)
# git repo fields
commit = models.CharField(max_length=64, null=True)
# TODO: check, if git, use which branch?
branch = models.CharField(max_length=30, null=True)
# chart museum fields
storage_info = JSONField(default={})
objects = RepositoryManager()
class Meta:
unique_together = ("project_id", "name")
db_table = 'helm_repository'
def __str__(self):
return "[{id}]{project}/{name}".format(id=self.id, project=self.project_id, name=self.name)
def refreshed(self, commit):
self.refreshed_at = timezone.now()
self.commit = commit
self.save()
@property
def plain_auths(self):
auths = list(self.auths.values("credentials", "type", "role"))
return [
{
"type": auth["type"],
"role": auth["role"],
"credentials": auth["credentials"],
}
for auth in auths
]
@property
def username_password(self) -> Tuple[str, str]:
try:
credentials = list(self.auths.values("credentials"))
credential = credentials[0]["credentials"]
return (credential["username"], credential["password"])
except Exception:
return ("", "")
class RepositoryAuth(models.Model):
AUTH_CHOICE = (("BASIC", "BasicAuth"),)
type = models.CharField('Type', choices=AUTH_CHOICE, max_length=16)
# ex: {"password":"<PASSWORD>","username":"admin-T49e"}
credentials = JSONField('Credentials', default={})
repo = models.ForeignKey(Repository, on_delete=models.CASCADE, related_name='auths')
# TODO: use rbac module instead
role = models.CharField('Role', max_length=16)
objects = RepositoryAuthManager()
@property
def credentials_decoded(self):
return self.credentials
class Meta:
db_table = 'helm_repo_auth' | 0.424293 | 0.102799 |
"""Piecewise-linearly-controlled rotation."""
from typing import List, Optional
import numpy as np
from qiskit.circuit import QuantumRegister, QuantumCircuit
from qiskit.circuit.exceptions import CircuitError
from qiskit.circuit.library.arithmetic.linear_pauli_rotations import LinearPauliRotations
from unqomp.examples.intergercomparator import makeIntegerComparator
from unqomp.ancillaallocation import AncillaRegister, AncillaCircuit
def _contains_zero_breakpoint(breakpoints):
return np.isclose(0, breakpoints[0])
def makesPLR(num_state_qubits, breakpoints, slopes, offsets):
qr_state = QuantumRegister(num_state_qubits, name='state')
qr_target = QuantumRegister(1, name='target')
circuit = AncillaCircuit(qr_state, qr_target)
mapped_slopes = np.zeros_like(slopes)
for i, slope in enumerate(slopes):
mapped_slopes[i] = slope - sum(mapped_slopes[:i])
mapped_offsets = np.zeros_like(offsets)
for i, (offset, slope, point) in enumerate(zip(offsets, slopes, breakpoints)):
mapped_offsets[i] = offset - slope * point - sum(mapped_offsets[:i])
basis = 'Y'
# apply comparators and controlled linear rotations
for i, point in enumerate(breakpoints):
if i == 0 and _contains_zero_breakpoint(breakpoints):
# apply rotation
lin_r = LinearPauliRotations(num_state_qubits=num_state_qubits,
slope=mapped_slopes[i],
offset=mapped_offsets[i], basis = 'Y')
circuit.append(lin_r.to_gate(), qr_state[:] + [qr_target])
else:
comp_ancilla = circuit.new_ancilla_register(1, name = 'ac' + str(i))
circuit.append(makeIntegerComparator(num_state_qubits, point).to_ancilla_gate(), [*qr_state[:], comp_ancilla[0]])
# apply controlled rotation
lin_r = LinearPauliRotations(num_state_qubits=num_state_qubits,
slope=mapped_slopes[i],
offset=mapped_offsets[i],
basis=basis)
circuit.append(lin_r.to_gate().control(), [comp_ancilla[0]] + qr_state[:] + [qr_target])
return circuit | unqomp/examples/piecewiselinrot.py |
"""Piecewise-linearly-controlled rotation."""
from typing import List, Optional
import numpy as np
from qiskit.circuit import QuantumRegister, QuantumCircuit
from qiskit.circuit.exceptions import CircuitError
from qiskit.circuit.library.arithmetic.linear_pauli_rotations import LinearPauliRotations
from unqomp.examples.intergercomparator import makeIntegerComparator
from unqomp.ancillaallocation import AncillaRegister, AncillaCircuit
def _contains_zero_breakpoint(breakpoints):
return np.isclose(0, breakpoints[0])
def makesPLR(num_state_qubits, breakpoints, slopes, offsets):
qr_state = QuantumRegister(num_state_qubits, name='state')
qr_target = QuantumRegister(1, name='target')
circuit = AncillaCircuit(qr_state, qr_target)
mapped_slopes = np.zeros_like(slopes)
for i, slope in enumerate(slopes):
mapped_slopes[i] = slope - sum(mapped_slopes[:i])
mapped_offsets = np.zeros_like(offsets)
for i, (offset, slope, point) in enumerate(zip(offsets, slopes, breakpoints)):
mapped_offsets[i] = offset - slope * point - sum(mapped_offsets[:i])
basis = 'Y'
# apply comparators and controlled linear rotations
for i, point in enumerate(breakpoints):
if i == 0 and _contains_zero_breakpoint(breakpoints):
# apply rotation
lin_r = LinearPauliRotations(num_state_qubits=num_state_qubits,
slope=mapped_slopes[i],
offset=mapped_offsets[i], basis = 'Y')
circuit.append(lin_r.to_gate(), qr_state[:] + [qr_target])
else:
comp_ancilla = circuit.new_ancilla_register(1, name = 'ac' + str(i))
circuit.append(makeIntegerComparator(num_state_qubits, point).to_ancilla_gate(), [*qr_state[:], comp_ancilla[0]])
# apply controlled rotation
lin_r = LinearPauliRotations(num_state_qubits=num_state_qubits,
slope=mapped_slopes[i],
offset=mapped_offsets[i],
basis=basis)
circuit.append(lin_r.to_gate().control(), [comp_ancilla[0]] + qr_state[:] + [qr_target])
return circuit | 0.922961 | 0.646886 |
#Types.
from typing import Dict, List, IO, Any
#PartialMeritRemoval class.
from PythonTests.Classes.Consensus.MeritRemoval import PartialMeritRemoval
#TestError Exception.
from PythonTests.Tests.Errors import TestError
#Meros classes.
from PythonTests.Meros.Meros import MessageType
from PythonTests.Meros.RPC import RPC
from PythonTests.Meros.Liver import Liver
from PythonTests.Meros.Syncer import Syncer
#MeritRemoval verifier.
from PythonTests.Tests.Consensus.Verify import verifyMeritRemoval
#JSON standard lib.
import json
def PartialTest(
rpc: RPC
) -> None:
file: IO[Any] = open("PythonTests/Vectors/Consensus/MeritRemoval/Partial.json", "r")
vectors: Dict[str, Any] = json.loads(file.read())
file.close()
keys: Dict[bytes, int] = {
bytes.fromhex(vectors["blockchain"][0]["header"]["miner"]): 0
}
nicks: List[bytes] = [bytes.fromhex(vectors["blockchain"][0]["header"]["miner"])]
#MeritRemoval.
#pylint: disable=no-member
removal: PartialMeritRemoval = PartialMeritRemoval.fromSignedJSON(keys, vectors["removal"])
#Create and execute a Liver to cause a Partial MeritRemoval.
def sendElement() -> None:
#Send the second Element.
rpc.meros.signedElement(removal.se2)
#Verify the MeritRemoval.
if rpc.meros.recv() != (
MessageType.SignedMeritRemoval.toByte() +
removal.signedSerialize(nicks)
):
raise TestError("Meros didn't send us the Merit Removal.")
verifyMeritRemoval(rpc, 2, 2, removal.holder, True)
Liver(
rpc,
vectors["blockchain"],
callbacks={
2: sendElement,
3: lambda: verifyMeritRemoval(rpc, 2, 2, removal.holder, False)
}
).live()
#Create and execute a Liver to handle a Partial MeritRemoval.
def sendMeritRemoval() -> None:
#Send and verify the MeritRemoval.
if rpc.meros.signedElement(removal) != rpc.meros.recv():
raise TestError("Meros didn't send us the Merit Removal.")
verifyMeritRemoval(rpc, 2, 2, removal.holder, True)
Liver(
rpc,
vectors["blockchain"],
callbacks={
2: sendMeritRemoval,
3: lambda: verifyMeritRemoval(rpc, 2, 2, removal.holder, False)
}
).live()
#Create and execute a Syncer to handle a Partial MeritRemoval.
Syncer(rpc, vectors["blockchain"]).sync()
verifyMeritRemoval(rpc, 2, 2, removal.holder, False) | PythonTests/Tests/Consensus/MeritRemoval/PartialTest.py |
#Types.
from typing import Dict, List, IO, Any
#PartialMeritRemoval class.
from PythonTests.Classes.Consensus.MeritRemoval import PartialMeritRemoval
#TestError Exception.
from PythonTests.Tests.Errors import TestError
#Meros classes.
from PythonTests.Meros.Meros import MessageType
from PythonTests.Meros.RPC import RPC
from PythonTests.Meros.Liver import Liver
from PythonTests.Meros.Syncer import Syncer
#MeritRemoval verifier.
from PythonTests.Tests.Consensus.Verify import verifyMeritRemoval
#JSON standard lib.
import json
def PartialTest(
rpc: RPC
) -> None:
file: IO[Any] = open("PythonTests/Vectors/Consensus/MeritRemoval/Partial.json", "r")
vectors: Dict[str, Any] = json.loads(file.read())
file.close()
keys: Dict[bytes, int] = {
bytes.fromhex(vectors["blockchain"][0]["header"]["miner"]): 0
}
nicks: List[bytes] = [bytes.fromhex(vectors["blockchain"][0]["header"]["miner"])]
#MeritRemoval.
#pylint: disable=no-member
removal: PartialMeritRemoval = PartialMeritRemoval.fromSignedJSON(keys, vectors["removal"])
#Create and execute a Liver to cause a Partial MeritRemoval.
def sendElement() -> None:
#Send the second Element.
rpc.meros.signedElement(removal.se2)
#Verify the MeritRemoval.
if rpc.meros.recv() != (
MessageType.SignedMeritRemoval.toByte() +
removal.signedSerialize(nicks)
):
raise TestError("Meros didn't send us the Merit Removal.")
verifyMeritRemoval(rpc, 2, 2, removal.holder, True)
Liver(
rpc,
vectors["blockchain"],
callbacks={
2: sendElement,
3: lambda: verifyMeritRemoval(rpc, 2, 2, removal.holder, False)
}
).live()
#Create and execute a Liver to handle a Partial MeritRemoval.
def sendMeritRemoval() -> None:
#Send and verify the MeritRemoval.
if rpc.meros.signedElement(removal) != rpc.meros.recv():
raise TestError("Meros didn't send us the Merit Removal.")
verifyMeritRemoval(rpc, 2, 2, removal.holder, True)
Liver(
rpc,
vectors["blockchain"],
callbacks={
2: sendMeritRemoval,
3: lambda: verifyMeritRemoval(rpc, 2, 2, removal.holder, False)
}
).live()
#Create and execute a Syncer to handle a Partial MeritRemoval.
Syncer(rpc, vectors["blockchain"]).sync()
verifyMeritRemoval(rpc, 2, 2, removal.holder, False) | 0.662469 | 0.362377 |
from configparser import ConfigParser
import os
import numpy as np
import matplotlib.pyplot as plt
import gravelamps.lensing
#Read in the simple INI file utils_testing.ini
config = ConfigParser()
config.read("utils_testing.ini")
#Check if the data directory exists, if it doesn't make it
outdir = config.get("output_settings", "outdir")
data_subdirectory = outdir + "/data"
if not os.path.isdir(data_subdirectory):
os.mkdir(data_subdirectory)
#Test generation of the dimensionless frequency file
w_file = gravelamps.lensing.utils.generate_dimensionless_frequency_file(config)
print(f"Dimensionless Frequency file located at {w_file}")
#Test generation of the source position file
y_file = gravelamps.lensing.utils.generate_source_position_file(config)
print(f"Source Position file located at {y_file}")
#Test the generation of the interpolator
#Generate data covering dimensionless frequency and source position
dim_freq_array = np.linspace(0, 2*np.pi, 120)
sour_pos_array = np.linspace(0, 2*np.pi, 100)
#Generating amplification factor data - using dummy sin functions
amp_fac_real_array = np.zeros((len(dim_freq_array), len(sour_pos_array)))
amp_fac_imag_array = np.zeros(amp_fac_real_array.shape)
for i in range(len(sour_pos_array)):
amp_fac_real_array[:,i] = np.sin(dim_freq_array)
for j in range(len(dim_freq_array)):
amp_fac_imag_array[j,:] = np.sin(sour_pos_array)
#Create files containing test data
np.savetxt("test_w.dat", dim_freq_array)
np.savetxt("test_y.dat", sour_pos_array)
np.savetxt("test_freal.dat", amp_fac_real_array)
np.savetxt("test_fimag.dat", amp_fac_imag_array)
#Generate interpolator
interpolator = gravelamps.lensing.utils.generate_interpolator(
"test_w.dat", "test_y.dat", "test_freal.dat", "test_fimag.dat")
#Shift arrays to test interpolation
shifted_dim_freq = dim_freq_array + dim_freq_array[1]/2
shifted_sour_pos = sour_pos_array + sour_pos_array[1]/2
#Plots show the dimensionless frequency interpolation
#Left: Calculated values vs Interpolated
#Right: Residual between the two
#Top: Using the exact data points used to generate interpolator
#Bottom: Shifting the data to use non of the points used to generate interpolator
plt.subplot(2,2,1)
plt.plot(dim_freq_array, np.sin(dim_freq_array), label="Calculated")
plt.plot(dim_freq_array, np.real(interpolator(dim_freq_array, 0)),
linestyle='--', color='k', label="Interpolated")
plt.legend()
plt.subplot(2,2,2)
plt.plot(dim_freq_array, np.real(interpolator(dim_freq_array, 0)) - np.sin(dim_freq_array))
plt.subplot(2,2,3)
plt.plot(shifted_dim_freq, np.sin(shifted_dim_freq), label="Calculated")
plt.plot(shifted_dim_freq, np.real(interpolator(shifted_dim_freq, 0)),
linestyle="--", color='k', label="Interpolated")
plt.legend()
plt.subplot(2,2,4)
plt.plot(shifted_dim_freq, np.real(interpolator(shifted_dim_freq, 0)) - np.sin(shifted_dim_freq))
plt.show()
#Same as above but for source position interpolation
plt.subplot(2,2,1)
plt.plot(sour_pos_array, np.sin(sour_pos_array), label="Caluclated")
plt.plot(sour_pos_array, np.imag(interpolator(0, sour_pos_array)),
linestyle="--", color='k', label="Interpolated")
plt.legend()
plt.subplot(2,2,2)
plt.plot(sour_pos_array, np.imag(interpolator(0, sour_pos_array)) - np.sin(sour_pos_array))
plt.subplot(2,2,3)
plt.plot(shifted_sour_pos, np.sin(shifted_sour_pos),label="Calculated")
plt.plot(shifted_sour_pos, np.imag(interpolator(0, shifted_sour_pos)),
linestyle="--", color='k', label="Interpolated")
plt.legend()
plt.subplot(2,2,4)
plt.plot(shifted_sour_pos, np.imag(interpolator(0, shifted_sour_pos)) - np.sin(shifted_sour_pos))
plt.show() | review_materials/utils_testing/utils_testing.py | from configparser import ConfigParser
import os
import numpy as np
import matplotlib.pyplot as plt
import gravelamps.lensing
#Read in the simple INI file utils_testing.ini
config = ConfigParser()
config.read("utils_testing.ini")
#Check if the data directory exists, if it doesn't make it
outdir = config.get("output_settings", "outdir")
data_subdirectory = outdir + "/data"
if not os.path.isdir(data_subdirectory):
os.mkdir(data_subdirectory)
#Test generation of the dimensionless frequency file
w_file = gravelamps.lensing.utils.generate_dimensionless_frequency_file(config)
print(f"Dimensionless Frequency file located at {w_file}")
#Test generation of the source position file
y_file = gravelamps.lensing.utils.generate_source_position_file(config)
print(f"Source Position file located at {y_file}")
#Test the generation of the interpolator
#Generate data covering dimensionless frequency and source position
dim_freq_array = np.linspace(0, 2*np.pi, 120)
sour_pos_array = np.linspace(0, 2*np.pi, 100)
#Generating amplification factor data - using dummy sin functions
amp_fac_real_array = np.zeros((len(dim_freq_array), len(sour_pos_array)))
amp_fac_imag_array = np.zeros(amp_fac_real_array.shape)
for i in range(len(sour_pos_array)):
amp_fac_real_array[:,i] = np.sin(dim_freq_array)
for j in range(len(dim_freq_array)):
amp_fac_imag_array[j,:] = np.sin(sour_pos_array)
#Create files containing test data
np.savetxt("test_w.dat", dim_freq_array)
np.savetxt("test_y.dat", sour_pos_array)
np.savetxt("test_freal.dat", amp_fac_real_array)
np.savetxt("test_fimag.dat", amp_fac_imag_array)
#Generate interpolator
interpolator = gravelamps.lensing.utils.generate_interpolator(
"test_w.dat", "test_y.dat", "test_freal.dat", "test_fimag.dat")
#Shift arrays to test interpolation
shifted_dim_freq = dim_freq_array + dim_freq_array[1]/2
shifted_sour_pos = sour_pos_array + sour_pos_array[1]/2
#Plots show the dimensionless frequency interpolation
#Left: Calculated values vs Interpolated
#Right: Residual between the two
#Top: Using the exact data points used to generate interpolator
#Bottom: Shifting the data to use non of the points used to generate interpolator
plt.subplot(2,2,1)
plt.plot(dim_freq_array, np.sin(dim_freq_array), label="Calculated")
plt.plot(dim_freq_array, np.real(interpolator(dim_freq_array, 0)),
linestyle='--', color='k', label="Interpolated")
plt.legend()
plt.subplot(2,2,2)
plt.plot(dim_freq_array, np.real(interpolator(dim_freq_array, 0)) - np.sin(dim_freq_array))
plt.subplot(2,2,3)
plt.plot(shifted_dim_freq, np.sin(shifted_dim_freq), label="Calculated")
plt.plot(shifted_dim_freq, np.real(interpolator(shifted_dim_freq, 0)),
linestyle="--", color='k', label="Interpolated")
plt.legend()
plt.subplot(2,2,4)
plt.plot(shifted_dim_freq, np.real(interpolator(shifted_dim_freq, 0)) - np.sin(shifted_dim_freq))
plt.show()
#Same as above but for source position interpolation
plt.subplot(2,2,1)
plt.plot(sour_pos_array, np.sin(sour_pos_array), label="Caluclated")
plt.plot(sour_pos_array, np.imag(interpolator(0, sour_pos_array)),
linestyle="--", color='k', label="Interpolated")
plt.legend()
plt.subplot(2,2,2)
plt.plot(sour_pos_array, np.imag(interpolator(0, sour_pos_array)) - np.sin(sour_pos_array))
plt.subplot(2,2,3)
plt.plot(shifted_sour_pos, np.sin(shifted_sour_pos),label="Calculated")
plt.plot(shifted_sour_pos, np.imag(interpolator(0, shifted_sour_pos)),
linestyle="--", color='k', label="Interpolated")
plt.legend()
plt.subplot(2,2,4)
plt.plot(shifted_sour_pos, np.imag(interpolator(0, shifted_sour_pos)) - np.sin(shifted_sour_pos))
plt.show() | 0.417271 | 0.591959 |
from django.contrib.auth import get_user_model
from django.test import TestCase
from django.test.client import Client
from formfactory.tests.test_base import load_fixtures
class AdminTestCase(TestCase):
def setUp(self):
load_fixtures(self)
self.client = Client()
self.editor = get_user_model().objects.create(
username="editor",
email="<EMAIL>",
is_superuser=True,
is_staff=True
)
self.editor.set_password("password")
self.editor.save()
self.client.login(username="editor", password="password")
def test_admin(self):
response = self.client.get("/admin/")
self.assertEqual(response.status_code, 200)
response = self.client.get("/admin/formfactory/")
self.assertEqual(response.status_code, 200)
def test_admin_form(self):
response = self.client.get("/admin/formfactory/form/")
self.assertEqual(response.status_code, 200)
response = self.client.get("/admin/formfactory/form/add/")
self.assertEqual(response.status_code, 200)
def test_admin_action(self):
response = self.client.get("/admin/formfactory/action/")
self.assertEqual(response.status_code, 200)
response = self.client.get("/admin/formfactory/action/add/")
self.assertEqual(response.status_code, 200)
# Ensure that the action choice field is populated
self.assertContains(response, self.action_data["action"])
self.assertContains(response, self.dummy_action)
def test_admin_fieldoption(self):
response = self.client.get("/admin/formfactory/fieldchoice/")
self.assertEqual(response.status_code, 200)
response = self.client.get("/admin/formfactory/fieldchoice/add/")
self.assertEqual(response.status_code, 200)
def test_admin_formdata(self):
response = self.client.get("/admin/formfactory/formdata/")
self.assertEqual(response.status_code, 200)
response = self.client.get("/admin/formfactory/formdata/add/")
self.assertEqual(response.status_code, 200)
def test_admin_fieldgroup(self):
response = self.client.get("/admin/formfactory/formfieldgroup/")
self.assertEqual(response.status_code, 200)
response = self.client.get("/admin/formfactory/formfieldgroup/add/")
self.assertEqual(response.status_code, 200)
def test_admin_field(self):
response = self.client.get("/admin/formfactory/formfield/")
self.assertEqual(response.status_code, 200)
response = self.client.get("/admin/formfactory/formfield/add/")
self.assertEqual(response.status_code, 200)
def test_admin_wizard(self):
response = self.client.get("/admin/formfactory/wizard/")
self.assertEqual(response.status_code, 200)
response = self.client.get("/admin/formfactory/wizard/add/")
self.assertEqual(response.status_code, 200)
def tearDown(self):
pass | formfactory/tests/test_admin.py | from django.contrib.auth import get_user_model
from django.test import TestCase
from django.test.client import Client
from formfactory.tests.test_base import load_fixtures
class AdminTestCase(TestCase):
def setUp(self):
load_fixtures(self)
self.client = Client()
self.editor = get_user_model().objects.create(
username="editor",
email="<EMAIL>",
is_superuser=True,
is_staff=True
)
self.editor.set_password("password")
self.editor.save()
self.client.login(username="editor", password="password")
def test_admin(self):
response = self.client.get("/admin/")
self.assertEqual(response.status_code, 200)
response = self.client.get("/admin/formfactory/")
self.assertEqual(response.status_code, 200)
def test_admin_form(self):
response = self.client.get("/admin/formfactory/form/")
self.assertEqual(response.status_code, 200)
response = self.client.get("/admin/formfactory/form/add/")
self.assertEqual(response.status_code, 200)
def test_admin_action(self):
response = self.client.get("/admin/formfactory/action/")
self.assertEqual(response.status_code, 200)
response = self.client.get("/admin/formfactory/action/add/")
self.assertEqual(response.status_code, 200)
# Ensure that the action choice field is populated
self.assertContains(response, self.action_data["action"])
self.assertContains(response, self.dummy_action)
def test_admin_fieldoption(self):
response = self.client.get("/admin/formfactory/fieldchoice/")
self.assertEqual(response.status_code, 200)
response = self.client.get("/admin/formfactory/fieldchoice/add/")
self.assertEqual(response.status_code, 200)
def test_admin_formdata(self):
response = self.client.get("/admin/formfactory/formdata/")
self.assertEqual(response.status_code, 200)
response = self.client.get("/admin/formfactory/formdata/add/")
self.assertEqual(response.status_code, 200)
def test_admin_fieldgroup(self):
response = self.client.get("/admin/formfactory/formfieldgroup/")
self.assertEqual(response.status_code, 200)
response = self.client.get("/admin/formfactory/formfieldgroup/add/")
self.assertEqual(response.status_code, 200)
def test_admin_field(self):
response = self.client.get("/admin/formfactory/formfield/")
self.assertEqual(response.status_code, 200)
response = self.client.get("/admin/formfactory/formfield/add/")
self.assertEqual(response.status_code, 200)
def test_admin_wizard(self):
response = self.client.get("/admin/formfactory/wizard/")
self.assertEqual(response.status_code, 200)
response = self.client.get("/admin/formfactory/wizard/add/")
self.assertEqual(response.status_code, 200)
def tearDown(self):
pass | 0.506836 | 0.130535 |
import time
from binaryninja import (AddressField, BackgroundTaskThread, ChoiceField,
HighlightStandardColor, Settings,
execute_on_main_thread_and_wait, get_form_input, log)
from binaryninjaui import FileContext, LinearView, UIContext, ViewFrame
from emulator.errors import (UnimplementedOperationError,
UninitializedRegisterError)
from PySide2.QtCore import SIGNAL, QObject
from PySide2.QtGui import QFont, QFontMetrics
from PySide2.QtWidgets import QHBoxLayout, QPushButton, QWidget
from .hooks import add_hook, remove_hook
from .memory import EmulatorMemoryModel, rewrite_segments
from .stack import EmulatorStackModel
from .registers import RegisterEmulatorModel
class EmulatorRunTaskThread(BackgroundTaskThread):
def __init__(self, widget, emulator, il):
self.widget = widget
self.emulator = emulator
self.starting_il = il
super().__init__()
def run(self):
il = self.starting_il
view = self.emulator.view
self.emulator.set_next_instr_index(il.function, il.instr_index)
self.widget.running = True
while self.widget.running:
if (il.function, il.instr_index) in self.emulator.breakpoints:
il.function.source_function.set_user_instr_highlight(
il.address,
HighlightStandardColor.NoHighlightColor
)
view.navigate(view.file.view, il.address)
break
if self.widget.execute_one_instruction(self.emulator, il):
il = self.emulator.current_function[
self.emulator.current_instr_index
]
else:
break
print('Complete')
class EmulatorButton(QPushButton):
def __init__(self, view, label, callback):
super().__init__(label)
self.callback = callback
self.view = view
font_name = Settings().get_string('ui.font.name')
font_size = Settings().get_integer('ui.font.size')
button_font = QFont(font_name, font_size)
fm = QFontMetrics(button_font)
self.setFont(button_font)
self.setFixedWidth(fm.horizontalAdvance(label) + 10)
QObject.connect(self, SIGNAL('clicked()'), self.callback)
class EmulatorButtonsWidget(QWidget):
def __init__(self, parent, view):
super().__init__(parent)
self.view = view
self.view.session_data['emulator.buttons.widget'] = self
self.running = False
self.reset_button = EmulatorButton(view, '♻️', self.reset)
self.reset_button.setToolTip('Reset emulator')
self.run_button = EmulatorButton(view, '▶️', self.run)
self.run_button.setToolTip('Run emulator')
self.run_to_button = EmulatorButton(view, '⏭', self.run_to)
self.run_to_button.setToolTip('Run to set location')
self.set_stop_button = EmulatorButton(view, '⏹', self.set_stop)
self.set_stop_button.setToolTip('Set stop location on address')
self.pause_button = EmulatorButton(view, '⏸', self.pause)
self.pause_button.setToolTip('Pause emulator')
self.step_button = EmulatorButton(view, '⏯', self.step)
self.step_button.setToolTip('Step one disassembly instruction')
self.map_memory_button = EmulatorButton(view, '🗺', self.map_memory)
self.map_memory_button.setToolTip('Map virtual memory')
self.unmap_memory_button = EmulatorButton(view, '🚮', self.unmap_memory)
self.unmap_memory_button.setToolTip('Unmap virtual memory')
self.view_memory_button = EmulatorButton(view, '📈', self.view_memory)
self.view_memory_button.setToolTip('Open memory view')
self.add_hook_button = EmulatorButton(view, '🎣', self.add_hook)
self.add_hook_button.setToolTip('Add instruction hook')
self.remove_hook_button = EmulatorButton(view, '🐟', self.remove_hook)
self.remove_hook_button.setToolTip('Remove instruction hook')
self.button_layout = QHBoxLayout(self)
self.button_layout.addWidget(self.reset_button)
self.button_layout.addWidget(self.run_button)
self.button_layout.addWidget(self.pause_button)
self.button_layout.addWidget(self.run_to_button)
self.button_layout.addWidget(self.set_stop_button)
self.button_layout.addWidget(self.step_button)
self.button_layout.addWidget(self.map_memory_button)
self.button_layout.addWidget(self.unmap_memory_button)
self.button_layout.addWidget(self.view_memory_button)
self.button_layout.addWidget(self.add_hook_button)
self.button_layout.addWidget(self.remove_hook_button)
def get_context(self):
ctx = self.parent().view_frame.actionContext()
if ctx.lowLevelILFunction is not None:
function = ctx.lowLevelILFunction
if ctx.instrIndex == 0xffffffffffffffff:
il = function[0]
else:
il = function[ctx.instrIndex]
elif ctx.mediumLevelILFunction is not None:
if ctx.instrIndex == 0xffffffffffffffff:
il = ctx.mediumLevelILFunction[0].llil.non_ssa_form
else:
il = ctx.mediumLevelILFunction[
ctx.instrIndex
].llil.non_ssa_form
elif ctx.function is not None:
function = ctx.function
il = function.get_low_level_il_at(ctx.address)
return il
def run(self):
emulator = self.view.session_data['emulator']
il = self.get_context()
task = EmulatorRunTaskThread(self, emulator, il)
task.start()
def pause(self):
self.running = False
def run_to(self):
pass
def set_stop(self):
il = self.get_context()
emulator = self.view.session_data['emulator']
emulator.breakpoints.add((il.function, il.instr_index))
il.function.source_function.set_auto_instr_highlight(
il.address,
HighlightStandardColor.RedHighlightColor
)
def reset(self):
self.running = False
emulator = self.view.session_data['emulator']
if (emulator.current_function is not None and
emulator.current_instr_index is not None):
current_il = emulator.current_function[
emulator.current_instr_index
]
emulator.current_function.source_function.set_auto_instr_highlight(
current_il.address,
HighlightStandardColor.NoHighlightColor
)
self.view.session_data["emulator.memory.view"] = rewrite_segments(
self.view
)
model = EmulatorMemoryModel(self.view)
self.view.session_data["emulator.memory.model"] = model
self.view.session_data["emulator.memory.widget"].setModel(model)
model = EmulatorStackModel(self.view)
self.view.session_data['emulator.stack.widget'].setModel(model)
model = RegisterEmulatorModel(self.view)
self.view.session_data['emulator.registers.widget'].setModel(model)
self.view.session_data['emulator.registers.widget'].update()
def step(self):
ctx = self.parent().view_frame.actionContext()
emulator = self.parent().emulator
if ctx.lowLevelILFunction is not None:
function = ctx.lowLevelILFunction
if ctx.instrIndex == 0xffffffffffffffff:
il = function[0]
else:
il = function[ctx.instrIndex]
elif ctx.mediumLevelILFunction is not None:
if ctx.instrIndex == 0xffffffffffffffff:
il = ctx.mediumLevelILFunction[0].llil.non_ssa_form
else:
il = ctx.mediumLevelILFunction[
ctx.instrIndex
].llil.non_ssa_form
elif ctx.function is not None:
function = ctx.function
il = function.get_low_level_il_at(ctx.address)
emulator.set_next_instr_index(
il.function, il.instr_index
)
il_start = il.instr_index
exits = il.function.source_function.get_low_level_il_exits_at(
il.address
)
il_exit = max(
exits
) if exits else il_start
next_il = il
while (il.function == emulator.current_function and
il_start <= emulator.current_instr_index <= il_exit):
if not self.execute_one_instruction(emulator, next_il):
break
if emulator.current_instr_index < len(emulator.current_function):
next_il = emulator.current_function[
emulator.current_instr_index
]
else:
emulator.view.navigate(emulator.view.file.view, next_il.address)
def execute_one_instruction(self, emulator, il):
try:
emulator.execute(il)
except UninitializedRegisterError as e:
print(f'UninitializedRegisterError: {e.reg}')
return False
except UnimplementedOperationError as e:
print(f'UnimplementedOperationError: {e.op!r}')
return False
return True
def map_memory(self):
start = AddressField('Start (hex):')
length = AddressField('Length (hex):')
flags = ChoiceField(
'Flags',
[
'---',
'--x',
'-w-',
'-wx',
'r--',
'r-x',
'rw-',
'rwx'
]
)
get_form_input([start, length, flags], 'Map Memory')
self.parent().emulator.map_memory(
start.result,
length.result,
flags.result
)
def unmap_memory(self):
start = AddressField('Start (hex):')
length = AddressField('Length (hex):')
get_form_input([start, length], 'Unmap Memory')
self.parent().emulator.unmap_memory(start.result, length.result)
def view_memory(self):
memory_view = self.parent().view.session_data['emulator.memory.view']
ctx = UIContext.activeContext()
linear_view = LinearView(memory_view, None)
memory_view.register_notification(linear_view)
ctx.createTabForWidget('Emulator Memory', linear_view)
def add_hook(self):
emulator = self.parent().view.session_data['emulator']
ctx = UIContext.activeContext()
content = ctx.contentActionHandler()
action_context = content.actionContext()
llil = action_context.lowLevelILFunction
instr_index = action_context.instrIndex
if None in (llil, instr_index) or instr_index == 0xffffffffffffffff:
log.log_alert('LLIL Function/Instruction not selected!')
return
add_hook(emulator, llil[instr_index])
def remove_hook(self):
emulator = self.parent().view.session_data['emulator']
ctx = UIContext.activeContext()
content = ctx.contentActionHandler()
action_context = content.actionContext()
llil = action_context.lowLevelILFunction
instr_index = action_context.instrIndex
if None in (llil, instr_index) or instr_index == 0xffffffffffffffff:
log.log_alert('LLIL Function/Instruction not selected!')
return
remove_hook(emulator, llil[instr_index]) | emulator/emulatorui/buttons.py | import time
from binaryninja import (AddressField, BackgroundTaskThread, ChoiceField,
HighlightStandardColor, Settings,
execute_on_main_thread_and_wait, get_form_input, log)
from binaryninjaui import FileContext, LinearView, UIContext, ViewFrame
from emulator.errors import (UnimplementedOperationError,
UninitializedRegisterError)
from PySide2.QtCore import SIGNAL, QObject
from PySide2.QtGui import QFont, QFontMetrics
from PySide2.QtWidgets import QHBoxLayout, QPushButton, QWidget
from .hooks import add_hook, remove_hook
from .memory import EmulatorMemoryModel, rewrite_segments
from .stack import EmulatorStackModel
from .registers import RegisterEmulatorModel
class EmulatorRunTaskThread(BackgroundTaskThread):
def __init__(self, widget, emulator, il):
self.widget = widget
self.emulator = emulator
self.starting_il = il
super().__init__()
def run(self):
il = self.starting_il
view = self.emulator.view
self.emulator.set_next_instr_index(il.function, il.instr_index)
self.widget.running = True
while self.widget.running:
if (il.function, il.instr_index) in self.emulator.breakpoints:
il.function.source_function.set_user_instr_highlight(
il.address,
HighlightStandardColor.NoHighlightColor
)
view.navigate(view.file.view, il.address)
break
if self.widget.execute_one_instruction(self.emulator, il):
il = self.emulator.current_function[
self.emulator.current_instr_index
]
else:
break
print('Complete')
class EmulatorButton(QPushButton):
def __init__(self, view, label, callback):
super().__init__(label)
self.callback = callback
self.view = view
font_name = Settings().get_string('ui.font.name')
font_size = Settings().get_integer('ui.font.size')
button_font = QFont(font_name, font_size)
fm = QFontMetrics(button_font)
self.setFont(button_font)
self.setFixedWidth(fm.horizontalAdvance(label) + 10)
QObject.connect(self, SIGNAL('clicked()'), self.callback)
class EmulatorButtonsWidget(QWidget):
def __init__(self, parent, view):
super().__init__(parent)
self.view = view
self.view.session_data['emulator.buttons.widget'] = self
self.running = False
self.reset_button = EmulatorButton(view, '♻️', self.reset)
self.reset_button.setToolTip('Reset emulator')
self.run_button = EmulatorButton(view, '▶️', self.run)
self.run_button.setToolTip('Run emulator')
self.run_to_button = EmulatorButton(view, '⏭', self.run_to)
self.run_to_button.setToolTip('Run to set location')
self.set_stop_button = EmulatorButton(view, '⏹', self.set_stop)
self.set_stop_button.setToolTip('Set stop location on address')
self.pause_button = EmulatorButton(view, '⏸', self.pause)
self.pause_button.setToolTip('Pause emulator')
self.step_button = EmulatorButton(view, '⏯', self.step)
self.step_button.setToolTip('Step one disassembly instruction')
self.map_memory_button = EmulatorButton(view, '🗺', self.map_memory)
self.map_memory_button.setToolTip('Map virtual memory')
self.unmap_memory_button = EmulatorButton(view, '🚮', self.unmap_memory)
self.unmap_memory_button.setToolTip('Unmap virtual memory')
self.view_memory_button = EmulatorButton(view, '📈', self.view_memory)
self.view_memory_button.setToolTip('Open memory view')
self.add_hook_button = EmulatorButton(view, '🎣', self.add_hook)
self.add_hook_button.setToolTip('Add instruction hook')
self.remove_hook_button = EmulatorButton(view, '🐟', self.remove_hook)
self.remove_hook_button.setToolTip('Remove instruction hook')
self.button_layout = QHBoxLayout(self)
self.button_layout.addWidget(self.reset_button)
self.button_layout.addWidget(self.run_button)
self.button_layout.addWidget(self.pause_button)
self.button_layout.addWidget(self.run_to_button)
self.button_layout.addWidget(self.set_stop_button)
self.button_layout.addWidget(self.step_button)
self.button_layout.addWidget(self.map_memory_button)
self.button_layout.addWidget(self.unmap_memory_button)
self.button_layout.addWidget(self.view_memory_button)
self.button_layout.addWidget(self.add_hook_button)
self.button_layout.addWidget(self.remove_hook_button)
def get_context(self):
ctx = self.parent().view_frame.actionContext()
if ctx.lowLevelILFunction is not None:
function = ctx.lowLevelILFunction
if ctx.instrIndex == 0xffffffffffffffff:
il = function[0]
else:
il = function[ctx.instrIndex]
elif ctx.mediumLevelILFunction is not None:
if ctx.instrIndex == 0xffffffffffffffff:
il = ctx.mediumLevelILFunction[0].llil.non_ssa_form
else:
il = ctx.mediumLevelILFunction[
ctx.instrIndex
].llil.non_ssa_form
elif ctx.function is not None:
function = ctx.function
il = function.get_low_level_il_at(ctx.address)
return il
def run(self):
emulator = self.view.session_data['emulator']
il = self.get_context()
task = EmulatorRunTaskThread(self, emulator, il)
task.start()
def pause(self):
self.running = False
def run_to(self):
pass
def set_stop(self):
il = self.get_context()
emulator = self.view.session_data['emulator']
emulator.breakpoints.add((il.function, il.instr_index))
il.function.source_function.set_auto_instr_highlight(
il.address,
HighlightStandardColor.RedHighlightColor
)
def reset(self):
self.running = False
emulator = self.view.session_data['emulator']
if (emulator.current_function is not None and
emulator.current_instr_index is not None):
current_il = emulator.current_function[
emulator.current_instr_index
]
emulator.current_function.source_function.set_auto_instr_highlight(
current_il.address,
HighlightStandardColor.NoHighlightColor
)
self.view.session_data["emulator.memory.view"] = rewrite_segments(
self.view
)
model = EmulatorMemoryModel(self.view)
self.view.session_data["emulator.memory.model"] = model
self.view.session_data["emulator.memory.widget"].setModel(model)
model = EmulatorStackModel(self.view)
self.view.session_data['emulator.stack.widget'].setModel(model)
model = RegisterEmulatorModel(self.view)
self.view.session_data['emulator.registers.widget'].setModel(model)
self.view.session_data['emulator.registers.widget'].update()
def step(self):
ctx = self.parent().view_frame.actionContext()
emulator = self.parent().emulator
if ctx.lowLevelILFunction is not None:
function = ctx.lowLevelILFunction
if ctx.instrIndex == 0xffffffffffffffff:
il = function[0]
else:
il = function[ctx.instrIndex]
elif ctx.mediumLevelILFunction is not None:
if ctx.instrIndex == 0xffffffffffffffff:
il = ctx.mediumLevelILFunction[0].llil.non_ssa_form
else:
il = ctx.mediumLevelILFunction[
ctx.instrIndex
].llil.non_ssa_form
elif ctx.function is not None:
function = ctx.function
il = function.get_low_level_il_at(ctx.address)
emulator.set_next_instr_index(
il.function, il.instr_index
)
il_start = il.instr_index
exits = il.function.source_function.get_low_level_il_exits_at(
il.address
)
il_exit = max(
exits
) if exits else il_start
next_il = il
while (il.function == emulator.current_function and
il_start <= emulator.current_instr_index <= il_exit):
if not self.execute_one_instruction(emulator, next_il):
break
if emulator.current_instr_index < len(emulator.current_function):
next_il = emulator.current_function[
emulator.current_instr_index
]
else:
emulator.view.navigate(emulator.view.file.view, next_il.address)
def execute_one_instruction(self, emulator, il):
try:
emulator.execute(il)
except UninitializedRegisterError as e:
print(f'UninitializedRegisterError: {e.reg}')
return False
except UnimplementedOperationError as e:
print(f'UnimplementedOperationError: {e.op!r}')
return False
return True
def map_memory(self):
start = AddressField('Start (hex):')
length = AddressField('Length (hex):')
flags = ChoiceField(
'Flags',
[
'---',
'--x',
'-w-',
'-wx',
'r--',
'r-x',
'rw-',
'rwx'
]
)
get_form_input([start, length, flags], 'Map Memory')
self.parent().emulator.map_memory(
start.result,
length.result,
flags.result
)
def unmap_memory(self):
start = AddressField('Start (hex):')
length = AddressField('Length (hex):')
get_form_input([start, length], 'Unmap Memory')
self.parent().emulator.unmap_memory(start.result, length.result)
def view_memory(self):
memory_view = self.parent().view.session_data['emulator.memory.view']
ctx = UIContext.activeContext()
linear_view = LinearView(memory_view, None)
memory_view.register_notification(linear_view)
ctx.createTabForWidget('Emulator Memory', linear_view)
def add_hook(self):
emulator = self.parent().view.session_data['emulator']
ctx = UIContext.activeContext()
content = ctx.contentActionHandler()
action_context = content.actionContext()
llil = action_context.lowLevelILFunction
instr_index = action_context.instrIndex
if None in (llil, instr_index) or instr_index == 0xffffffffffffffff:
log.log_alert('LLIL Function/Instruction not selected!')
return
add_hook(emulator, llil[instr_index])
def remove_hook(self):
emulator = self.parent().view.session_data['emulator']
ctx = UIContext.activeContext()
content = ctx.contentActionHandler()
action_context = content.actionContext()
llil = action_context.lowLevelILFunction
instr_index = action_context.instrIndex
if None in (llil, instr_index) or instr_index == 0xffffffffffffffff:
log.log_alert('LLIL Function/Instruction not selected!')
return
remove_hook(emulator, llil[instr_index]) | 0.419172 | 0.090333 |
import unittest
from mapfmclient import MarkedLocation, Problem
from ictsm.solver import solve
"""
Basic single-agent pathfinding tests using top-level 'solve' interface
"""
class SolveTestSingle(unittest.TestCase):
def setUp(self) -> None:
def creator(start, goal):
return Problem(
[
[1, 1, 1, 1, 1],
[1, 0, 1, 0, 1],
[1, 0, 0, 0, 1],
[1, 0, 1, 0, 1],
[1, 1, 1, 1, 1],
],
5,
5,
[start],
[goal],
0,
1,
1,
)
self.instance_creator = creator
def test_solve_zero(self):
start: MarkedLocation = MarkedLocation(0, 1, 1)
goal: MarkedLocation = MarkedLocation(0, 1, 1)
problem: Problem = self.instance_creator(start, goal)
solution = solve(problem)
self.assertListEqual(solution.paths[0].route, [(1, 1), (1, 1)])
def test_solve_one(self):
start: MarkedLocation = MarkedLocation(0, 1, 1)
goal: MarkedLocation = MarkedLocation(0, 1, 2)
problem: Problem = self.instance_creator(start, goal)
solution = solve(problem)
self.assertListEqual(solution.paths[0].route, [(1, 1), (1, 2)])
def test_solve_many(self):
start: MarkedLocation = MarkedLocation(0, 1, 1)
goal: MarkedLocation = MarkedLocation(0, 3, 3)
problem: Problem = self.instance_creator(start, goal)
solution = solve(problem)
self.assertListEqual(
solution.paths[0].route, [(1, 1), (1, 2), (2, 2), (3, 2), (3, 3)]
)
"""
Elementary two-agent tests
"""
class SolveTestTwo(unittest.TestCase):
def setUp(self) -> None:
def creator(start1, start2, goal1, goal2):
return Problem(
[
[1, 1, 1, 1],
[1, 0, 0, 1],
[1, 0, 0, 1],
[1, 1, 1, 1],
],
4,
4,
[start1, start2],
[goal1, goal2],
0,
2,
2,
)
self.instance_creator = creator
def test_solve_zero(self):
start: MarkedLocation = MarkedLocation(0, 1, 1)
goal: MarkedLocation = MarkedLocation(0, 1, 2)
problem: Problem = self.instance_creator(start, goal, start, goal)
solution = solve(problem)
routes = list(map(lambda pi: pi.route, solution.paths))
self.assertListEqual(routes, [[(1, 1), (1, 1)], [(1, 2), (1, 2)]])
def test_solve_one(self):
problem: Problem = self.instance_creator(
MarkedLocation(0, 1, 1),
MarkedLocation(0, 1, 2),
MarkedLocation(0, 2, 1),
MarkedLocation(0, 2, 2),
)
solution = solve(problem)
routes = list(map(lambda pi: pi.route, solution.paths))
self.assertListEqual(routes, [[(1, 1), (2, 1)], [(1, 2), (2, 2)]])
def test_solve_many(self):
problem: Problem = self.instance_creator(
MarkedLocation(0, 1, 1),
MarkedLocation(1, 1, 2),
MarkedLocation(0, 2, 2),
MarkedLocation(1, 2, 1),
)
solution = solve(problem)
routes = list(map(lambda pi: pi.route, solution.paths))
self.assertListEqual(
routes, [[(1, 1), (2, 1), (2, 2)], [(1, 2), (1, 1), (2, 1)]]
)
def test_solve_swap(self):
problem: Problem = self.instance_creator(
MarkedLocation(0, 1, 1),
MarkedLocation(0, 2, 2),
MarkedLocation(0, 2, 2),
MarkedLocation(0, 1, 1),
)
solution = solve(problem)
s = sum(list(map(lambda pi: len(pi.route), solution.paths)))
self.assertEqual(s, 4) | src/test/test_solve.py | import unittest
from mapfmclient import MarkedLocation, Problem
from ictsm.solver import solve
"""
Basic single-agent pathfinding tests using top-level 'solve' interface
"""
class SolveTestSingle(unittest.TestCase):
def setUp(self) -> None:
def creator(start, goal):
return Problem(
[
[1, 1, 1, 1, 1],
[1, 0, 1, 0, 1],
[1, 0, 0, 0, 1],
[1, 0, 1, 0, 1],
[1, 1, 1, 1, 1],
],
5,
5,
[start],
[goal],
0,
1,
1,
)
self.instance_creator = creator
def test_solve_zero(self):
start: MarkedLocation = MarkedLocation(0, 1, 1)
goal: MarkedLocation = MarkedLocation(0, 1, 1)
problem: Problem = self.instance_creator(start, goal)
solution = solve(problem)
self.assertListEqual(solution.paths[0].route, [(1, 1), (1, 1)])
def test_solve_one(self):
start: MarkedLocation = MarkedLocation(0, 1, 1)
goal: MarkedLocation = MarkedLocation(0, 1, 2)
problem: Problem = self.instance_creator(start, goal)
solution = solve(problem)
self.assertListEqual(solution.paths[0].route, [(1, 1), (1, 2)])
def test_solve_many(self):
start: MarkedLocation = MarkedLocation(0, 1, 1)
goal: MarkedLocation = MarkedLocation(0, 3, 3)
problem: Problem = self.instance_creator(start, goal)
solution = solve(problem)
self.assertListEqual(
solution.paths[0].route, [(1, 1), (1, 2), (2, 2), (3, 2), (3, 3)]
)
"""
Elementary two-agent tests
"""
class SolveTestTwo(unittest.TestCase):
def setUp(self) -> None:
def creator(start1, start2, goal1, goal2):
return Problem(
[
[1, 1, 1, 1],
[1, 0, 0, 1],
[1, 0, 0, 1],
[1, 1, 1, 1],
],
4,
4,
[start1, start2],
[goal1, goal2],
0,
2,
2,
)
self.instance_creator = creator
def test_solve_zero(self):
start: MarkedLocation = MarkedLocation(0, 1, 1)
goal: MarkedLocation = MarkedLocation(0, 1, 2)
problem: Problem = self.instance_creator(start, goal, start, goal)
solution = solve(problem)
routes = list(map(lambda pi: pi.route, solution.paths))
self.assertListEqual(routes, [[(1, 1), (1, 1)], [(1, 2), (1, 2)]])
def test_solve_one(self):
problem: Problem = self.instance_creator(
MarkedLocation(0, 1, 1),
MarkedLocation(0, 1, 2),
MarkedLocation(0, 2, 1),
MarkedLocation(0, 2, 2),
)
solution = solve(problem)
routes = list(map(lambda pi: pi.route, solution.paths))
self.assertListEqual(routes, [[(1, 1), (2, 1)], [(1, 2), (2, 2)]])
def test_solve_many(self):
problem: Problem = self.instance_creator(
MarkedLocation(0, 1, 1),
MarkedLocation(1, 1, 2),
MarkedLocation(0, 2, 2),
MarkedLocation(1, 2, 1),
)
solution = solve(problem)
routes = list(map(lambda pi: pi.route, solution.paths))
self.assertListEqual(
routes, [[(1, 1), (2, 1), (2, 2)], [(1, 2), (1, 1), (2, 1)]]
)
def test_solve_swap(self):
problem: Problem = self.instance_creator(
MarkedLocation(0, 1, 1),
MarkedLocation(0, 2, 2),
MarkedLocation(0, 2, 2),
MarkedLocation(0, 1, 1),
)
solution = solve(problem)
s = sum(list(map(lambda pi: len(pi.route), solution.paths)))
self.assertEqual(s, 4) | 0.614857 | 0.770939 |